Merge branch 'master' into built_in_floor_ceil

This commit is contained in:
ychenfo 2021-12-02 01:08:55 +08:00
commit aa2d79fea6
13 changed files with 698 additions and 344 deletions

View File

@ -2,16 +2,16 @@
"nodes": { "nodes": {
"nixpkgs": { "nixpkgs": {
"locked": { "locked": {
"lastModified": 1637636156, "lastModified": 1638279546,
"narHash": "sha256-E2ym4Vcpqu9JYoQDXJZR48gVD+LPPbaCoYveIk7Xu3Y=", "narHash": "sha256-1KCwN7twjp1dBdp0jPgVdYFztDkCR8+roo0B34J9oBY=",
"owner": "NixOS", "owner": "NixOS",
"repo": "nixpkgs", "repo": "nixpkgs",
"rev": "b026e1cf87a108dd06fe521f224fdc72fd0b013d", "rev": "96b4157790fc96e70d6e6c115e3f34bba7be490f",
"type": "github" "type": "github"
}, },
"original": { "original": {
"owner": "NixOS", "owner": "NixOS",
"ref": "release-21.11", "ref": "nixos-21.11",
"repo": "nixpkgs", "repo": "nixpkgs",
"type": "github" "type": "github"
} }

View File

@ -1,7 +1,7 @@
{ {
description = "The third-generation ARTIQ compiler"; description = "The third-generation ARTIQ compiler";
inputs.nixpkgs.url = github:NixOS/nixpkgs/release-21.11; inputs.nixpkgs.url = github:NixOS/nixpkgs/nixos-21.11;
outputs = { self, nixpkgs }: outputs = { self, nixpkgs }:
let let

View File

@ -9,13 +9,17 @@ import nac3artiq
__all__ = ["KernelInvariant", "extern", "kernel", "portable", "nac3", __all__ = ["KernelInvariant", "extern", "kernel", "portable", "nac3",
"ms", "us", "ns", "ms", "us", "ns",
"print_int32", "print_int64", "print_int32", "print_int64",
"Core", "TTLOut", "parallel", "sequential"] "Core", "TTLOut", "parallel", "sequential", "virtual"]
T = TypeVar('T') T = TypeVar('T')
class KernelInvariant(Generic[T]): class KernelInvariant(Generic[T]):
pass pass
# The virtual class must exist before nac3artiq.NAC3 is created.
class virtual(Generic[T]):
pass
import device_db import device_db
core_arguments = device_db.device_db["core"]["arguments"] core_arguments = device_db.device_db["core"]["arguments"]

View File

@ -55,6 +55,10 @@ pub struct PrimitivePythonId {
bool: u64, bool: u64,
list: u64, list: u64,
tuple: u64, tuple: u64,
typevar: u64,
none: u64,
generic_alias: (u64, u64),
virtual_id: u64,
} }
// TopLevelComposer is unsendable as it holds the unification table, which is // TopLevelComposer is unsendable as it holds the unification table, which is
@ -96,10 +100,13 @@ impl Nac3 {
let val = id_fn.call1((member.get_item(1)?,))?.extract()?; let val = id_fn.call1((member.get_item(1)?,))?.extract()?;
name_to_pyid.insert(key.into(), val); name_to_pyid.insert(key.into(), val);
} }
let typings = PyModule::import(py, "typing")?;
let helper = PythonHelper { let helper = PythonHelper {
id_fn: builtins.getattr("id").unwrap().to_object(py), id_fn: builtins.getattr("id").unwrap().to_object(py),
len_fn: builtins.getattr("len").unwrap().to_object(py), len_fn: builtins.getattr("len").unwrap().to_object(py),
type_fn: builtins.getattr("type").unwrap().to_object(py), type_fn: builtins.getattr("type").unwrap().to_object(py),
origin_ty_fn: typings.getattr("get_origin").unwrap().to_object(py),
args_ty_fn: typings.getattr("get_args").unwrap().to_object(py),
}; };
Ok(( Ok((
module.getattr("__name__")?.extract()?, module.getattr("__name__")?.extract()?,
@ -284,7 +291,42 @@ impl Nac3 {
let builtins_mod = PyModule::import(py, "builtins").unwrap(); let builtins_mod = PyModule::import(py, "builtins").unwrap();
let id_fn = builtins_mod.getattr("id").unwrap(); let id_fn = builtins_mod.getattr("id").unwrap();
let numpy_mod = PyModule::import(py, "numpy").unwrap(); let numpy_mod = PyModule::import(py, "numpy").unwrap();
let typing_mod = PyModule::import(py, "typing").unwrap();
let types_mod = PyModule::import(py, "types").unwrap();
let primitive_ids = PrimitivePythonId { let primitive_ids = PrimitivePythonId {
virtual_id: id_fn
.call1((builtins_mod
.getattr("globals")
.unwrap()
.call0()
.unwrap()
.get_item("virtual")
.unwrap(),
)).unwrap()
.extract()
.unwrap(),
generic_alias: (
id_fn
.call1((typing_mod.getattr("_GenericAlias").unwrap(),))
.unwrap()
.extract()
.unwrap(),
id_fn
.call1((types_mod.getattr("GenericAlias").unwrap(),))
.unwrap()
.extract()
.unwrap(),
),
none: id_fn
.call1((builtins_mod.getattr("None").unwrap(),))
.unwrap()
.extract()
.unwrap(),
typevar: id_fn
.call1((typing_mod.getattr("TypeVar").unwrap(),))
.unwrap()
.extract()
.unwrap(),
int: id_fn int: id_fn
.call1((builtins_mod.getattr("int").unwrap(),)) .call1((builtins_mod.getattr("int").unwrap(),))
.unwrap() .unwrap()
@ -403,10 +445,13 @@ impl Nac3 {
}; };
let mut synthesized = parse_program(&synthesized).unwrap(); let mut synthesized = parse_program(&synthesized).unwrap();
let builtins = PyModule::import(py, "builtins")?; let builtins = PyModule::import(py, "builtins")?;
let typings = PyModule::import(py, "typing")?;
let helper = PythonHelper { let helper = PythonHelper {
id_fn: builtins.getattr("id").unwrap().to_object(py), id_fn: builtins.getattr("id").unwrap().to_object(py),
len_fn: builtins.getattr("len").unwrap().to_object(py), len_fn: builtins.getattr("len").unwrap().to_object(py),
type_fn: builtins.getattr("type").unwrap().to_object(py), type_fn: builtins.getattr("type").unwrap().to_object(py),
origin_ty_fn: typings.getattr("get_origin").unwrap().to_object(py),
args_ty_fn: typings.getattr("get_args").unwrap().to_object(py),
}; };
let resolver = Arc::new(Resolver(Arc::new(InnerResolver { let resolver = Arc::new(Resolver(Arc::new(InnerResolver {
id_to_type: self.builtins_ty.clone().into(), id_to_type: self.builtins_ty.clone().into(),

View File

@ -43,6 +43,8 @@ pub struct PythonHelper {
pub type_fn: PyObject, pub type_fn: PyObject,
pub len_fn: PyObject, pub len_fn: PyObject,
pub id_fn: PyObject, pub id_fn: PyObject,
pub origin_ty_fn: PyObject,
pub args_ty_fn: PyObject,
} }
struct PythonValue { struct PythonValue {
@ -133,47 +135,46 @@ impl InnerResolver {
})) }))
} }
fn get_obj_type( // handle python objects that represent types themselves
// primitives and class types should be themselves, use `ty_id` to check,
// TypeVars and GenericAlias(`A[int, bool]`) should use `ty_ty_id` to check
// the `bool` value returned indicates whether they are instantiated or not
fn get_pyty_obj_type(
&self, &self,
py: Python, py: Python,
obj: &PyAny, pyty: &PyAny,
unifier: &mut Unifier, unifier: &mut Unifier,
defs: &[Arc<RwLock<TopLevelDef>>], defs: &[Arc<RwLock<TopLevelDef>>],
primitives: &PrimitiveStore, primitives: &PrimitiveStore,
) -> PyResult<Option<Type>> { ) -> PyResult<Result<(Type, bool), String>> {
let ty_id: u64 = self let ty_id: u64 = self
.helper .helper
.id_fn .id_fn
.call1(py, (self.helper.type_fn.call1(py, (obj,))?,))? .call1(py, (pyty,))?
.extract(py)?;
let ty_ty_id: u64 = self
.helper
.id_fn
.call1(py, (self.helper.type_fn.call1(py, (pyty,))?,))?
.extract(py)?; .extract(py)?;
if ty_id == self.primitive_ids.int || ty_id == self.primitive_ids.int32 { if ty_id == self.primitive_ids.int || ty_id == self.primitive_ids.int32 {
Ok(Some(primitives.int32)) Ok(Ok((primitives.int32, true)))
} else if ty_id == self.primitive_ids.int64 { } else if ty_id == self.primitive_ids.int64 {
Ok(Some(primitives.int64)) Ok(Ok((primitives.int64, true)))
} else if ty_id == self.primitive_ids.bool { } else if ty_id == self.primitive_ids.bool {
Ok(Some(primitives.bool)) Ok(Ok((primitives.bool, true)))
} else if ty_id == self.primitive_ids.float { } else if ty_id == self.primitive_ids.float {
Ok(Some(primitives.float)) Ok(Ok((primitives.float, true)))
} else if ty_id == self.primitive_ids.list { } else if ty_id == self.primitive_ids.list {
let len: usize = self.helper.len_fn.call1(py, (obj,))?.extract(py)?; // do not handle type var param and concrete check here
if len == 0 { let var = unifier.get_fresh_var().0;
let var = unifier.get_fresh_var().0; let list = unifier.add_ty(TypeEnum::TList { ty: var });
let list = unifier.add_ty(TypeEnum::TList { ty: var }); Ok(Ok((list, false)))
Ok(Some(list))
} else {
let ty = self.get_list_elem_type(py, obj, len, unifier, defs, primitives)?;
Ok(ty.map(|ty| unifier.add_ty(TypeEnum::TList { ty })))
}
} else if ty_id == self.primitive_ids.tuple { } else if ty_id == self.primitive_ids.tuple {
let elements: &PyTuple = obj.cast_as()?; // do not handle type var param and concrete check here
let types: Result<Option<Vec<_>>, _> = elements Ok(Ok((unifier.add_ty(TypeEnum::TTuple { ty: vec![] }), false)))
.iter() } else if let Some(def_id) = self.pyid_to_def.read().get(&ty_id).cloned() {
.map(|elem| self.get_obj_type(py, elem, unifier, defs, primitives))
.collect();
let types = types?;
Ok(types.map(|types| unifier.add_ty(TypeEnum::TTuple { ty: types })))
} else if let Some(def_id) = self.pyid_to_def.read().get(&ty_id) {
let def = defs[def_id.0].read(); let def = defs[def_id.0].read();
if let TopLevelDef::Class { if let TopLevelDef::Class {
object_id, object_id,
@ -183,54 +184,274 @@ impl InnerResolver {
.. ..
} = &*def } = &*def
{ {
let var_map: HashMap<_, _> = type_vars // do not handle type var param and concrete check here, and no subst
.iter() Ok(Ok({
.map(|var| { let ty = TypeEnum::TObj {
( obj_id: *object_id,
if let TypeEnum::TVar { id, .. } = &*unifier.get_ty(*var) { params: RefCell::new({
*id type_vars
} else { .iter()
unreachable!() .map(|x| {
}, if let TypeEnum::TVar { id, .. } = &*unifier.get_ty(*x) {
unifier.get_fresh_var().0, (*id, *x)
) } else {
}) unreachable!()
.collect(); }
let mut fields_ty = HashMap::new(); }).collect()
for method in methods.iter() { }),
fields_ty.insert(method.0, (method.1, false)); fields: RefCell::new({
} let mut res = methods
for field in fields.iter() { .iter()
let name: String = field.0.into(); .map(|(iden, ty, _)| (*iden, (*ty, false)))
let field_data = obj.getattr(&name)?; .collect::<HashMap<_, _>>();
let ty = self res.extend(fields.clone().into_iter().map(|x| (x.0, (x.1, x.2))));
.get_obj_type(py, field_data, unifier, defs, primitives)? res
.unwrap_or(primitives.none); })
let field_ty = unifier.subst(field.1, &var_map).unwrap_or(field.1); };
if unifier.unify(ty, field_ty).is_err() { // here also false, later instantiation use python object to check compatible
// field type mismatch (unifier.add_ty(ty), false)
return Ok(None); }))
} else {
// only object is supported, functions are not supported
unreachable!("function type is not supported, should not be queried")
}
} else if ty_ty_id == self.primitive_ids.typevar {
let constraint_types = {
let constraints = pyty.getattr("__constraints__").unwrap();
let mut result: Vec<Type> = vec![];
for i in 0.. {
if let Ok(constr) = constraints.get_item(i) {
result.push({
match self.get_pyty_obj_type(py, constr, unifier, defs, primitives)? {
Ok((ty, _)) => {
if unifier.is_concrete(ty, &[]) {
ty
} else {
return Ok(Err(format!(
"the {}th constraint of TypeVar `{}` is not concrete",
i + 1,
pyty.getattr("__name__")?.extract::<String>()?
)))
}
},
Err(err) => return Ok(Err(err))
}
})
} else {
break;
}
}
result
};
let res = unifier.get_fresh_var_with_range(&constraint_types).0;
Ok(Ok((res, true)))
} else if ty_ty_id == self.primitive_ids.generic_alias.0 || ty_ty_id == self.primitive_ids.generic_alias.1 {
let origin = self.helper.origin_ty_fn.call1(py, (pyty,))?;
let args = self.helper.args_ty_fn.call1(py, (pyty,))?;
let args: &PyTuple = args.cast_as(py)?;
let origin_ty = match self.get_pyty_obj_type(py, origin.as_ref(py), unifier, defs, primitives)? {
Ok((ty, false)) => ty,
Ok((_, true)) => return Ok(Err("instantiated type does not take type parameters".into())),
Err(err) => return Ok(Err(err))
};
match &*unifier.get_ty(origin_ty) {
TypeEnum::TList { .. } => {
if args.len() == 1 {
let ty = match self.get_pyty_obj_type(py, args.get_item(0), unifier, defs, primitives)? {
Ok(ty) => ty,
Err(err) => return Ok(Err(err))
};
if !unifier.is_concrete(ty.0, &[]) && !ty.1 {
panic!("type list should take concrete parameters in type var ranges")
}
Ok(Ok((unifier.add_ty(TypeEnum::TList { ty: ty.0 }), true)))
} else {
return Ok(Err(format!("type list needs exactly 1 type parameters, found {}", args.len())))
}
},
TypeEnum::TTuple { .. } => {
let args = match args
.iter()
.map(|x| self.get_pyty_obj_type(py, x, unifier, defs, primitives))
.collect::<Result<Vec<_>, _>>()?
.into_iter()
.collect::<Result<Vec<_>, _>>() {
Ok(args) if !args.is_empty() => args
.into_iter()
.map(|(x, check)| if !unifier.is_concrete(x, &[]) && !check {
panic!("type tuple should take concrete parameters in type var ranges")
} else {
x
}
)
.collect::<Vec<_>>(),
Err(err) => return Ok(Err(err)),
_ => return Ok(Err("tuple type needs at least 1 type parameters".to_string()))
};
Ok(Ok((unifier.add_ty(TypeEnum::TTuple { ty: args }), true)))
},
TypeEnum::TObj { params, obj_id, .. } => {
let subst = {
let params = &*params.borrow();
if params.len() != args.len() {
return Ok(Err(format!(
"for class #{}, expect {} type parameters, got {}.",
obj_id.0,
params.len(),
args.len(),
)))
}
let args = match args
.iter()
.map(|x| self.get_pyty_obj_type(py, x, unifier, defs, primitives))
.collect::<Result<Vec<_>, _>>()?
.into_iter()
.collect::<Result<Vec<_>, _>>() {
Ok(args) => args
.into_iter()
.map(|(x, check)| if !unifier.is_concrete(x, &[]) && !check {
panic!("type class should take concrete parameters in type var ranges")
} else {
x
}
)
.collect::<Vec<_>>(),
Err(err) => return Ok(Err(err)),
};
params
.iter()
.zip(args.iter())
.map(|((id, _), ty)| (*id, *ty))
.collect::<HashMap<_, _>>()
};
Ok(Ok((unifier.subst(origin_ty, &subst).unwrap_or(origin_ty), true)))
},
TypeEnum::TVirtual { .. } => {
if args.len() == 1 {
let ty = match self.get_pyty_obj_type(py, args.get_item(0), unifier, defs, primitives)? {
Ok(ty) => ty,
Err(err) => return Ok(Err(err))
};
if !unifier.is_concrete(ty.0, &[]) && !ty.1 {
panic!("virtual class should take concrete parameters in type var ranges")
}
Ok(Ok((unifier.add_ty(TypeEnum::TVirtual { ty: ty.0 }), true)))
} else {
return Ok(Err(format!("virtual class needs exactly 1 type parameters, found {}", args.len())))
}
}
_ => unimplemented!()
}
} else if ty_id == self.primitive_ids.virtual_id {
Ok(Ok(({
let ty = TypeEnum::TVirtual { ty: unifier.get_fresh_var().0 };
unifier.add_ty(ty)
}, false)))
} else {
Ok(Err("unknown type".into()))
}
}
fn get_obj_type(
&self,
py: Python,
obj: &PyAny,
unifier: &mut Unifier,
defs: &[Arc<RwLock<TopLevelDef>>],
primitives: &PrimitiveStore,
) -> PyResult<Option<Type>> {
let ty = self.helper.type_fn.call1(py, (obj,)).unwrap();
let (extracted_ty, inst_check) = match self.get_pyty_obj_type(
py,
{
if [self.primitive_ids.typevar,
self.primitive_ids.generic_alias.0,
self.primitive_ids.generic_alias.1
].contains(&self.helper.id_fn.call1(py, (ty.clone(),))?.extract::<u64>(py)?) {
obj
} else {
ty.as_ref(py)
}
},
unifier,
defs,
primitives
)? {
Ok(s) => s,
Err(_) => return Ok(None)
};
return match (&*unifier.get_ty(extracted_ty), inst_check) {
// do the instantiation for these three types
(TypeEnum::TList { ty }, false) => {
let len: usize = self.helper.len_fn.call1(py, (obj,))?.extract(py)?;
if len == 0 {
assert!(matches!(
&*unifier.get_ty(extracted_ty),
TypeEnum::TVar { meta: nac3core::typecheck::typedef::TypeVarMeta::Generic, range, .. }
if range.borrow().is_empty()
));
Ok(Some(extracted_ty))
} else {
let actual_ty = self
.get_list_elem_type(py, obj, len, unifier, defs, primitives)?;
if let Some(actual_ty) = actual_ty {
unifier.unify(*ty, actual_ty).unwrap();
Ok(Some(extracted_ty))
} else {
Ok(None)
}
}
}
(TypeEnum::TTuple { .. }, false) => {
let elements: &PyTuple = obj.cast_as()?;
let types: Result<Option<Vec<_>>, _> = elements
.iter()
.map(|elem| self.get_obj_type(py, elem, unifier, defs, primitives))
.collect();
let types = types?;
Ok(types.map(|types| unifier.add_ty(TypeEnum::TTuple { ty: types })))
}
(TypeEnum::TObj { params, fields, .. }, false) => {
let var_map = params
.borrow()
.iter()
.map(|(id_var, ty)| {
if let TypeEnum::TVar { id, range, .. } = &*unifier.get_ty(*ty) {
assert_eq!(*id, *id_var);
(*id, unifier.get_fresh_var_with_range(&range.borrow()).0)
} else {
unreachable!()
}
})
.collect::<HashMap<_, _>>();
// loop through non-function fields of the class to get the instantiated value
for field in fields.borrow().iter() {
let name: String = (*field.0).into();
if let TypeEnum::TFunc( .. ) = &*unifier.get_ty(field.1.0) {
continue;
} else {
let field_data = obj.getattr(&name)?;
let ty = self
.get_obj_type(py, field_data, unifier, defs, primitives)?
.unwrap_or(primitives.none);
let field_ty = unifier.subst(field.1.0, &var_map).unwrap_or(field.1.0);
if unifier.unify(ty, field_ty).is_err() {
// field type mismatch
return Ok(None);
}
} }
fields_ty.insert(field.0, (ty, field.2));
} }
for (_, ty) in var_map.iter() { for (_, ty) in var_map.iter() {
// must be concrete type // must be concrete type
if !unifier.is_concrete(*ty, &[]) { if !unifier.is_concrete(*ty, &[]) {
return Ok(None); return Ok(None)
} }
} }
Ok(Some(unifier.add_ty(TypeEnum::TObj { return Ok(Some(unifier.subst(extracted_ty, &var_map).unwrap_or(extracted_ty)));
obj_id: *object_id,
fields: RefCell::new(fields_ty),
params: RefCell::new(var_map),
})))
} else {
// only object is supported, functions are not supported
Ok(None)
} }
} else { _ => Ok(Some(extracted_ty))
Ok(None) };
}
} }
fn get_obj_value<'ctx, 'a>( fn get_obj_value<'ctx, 'a>(
@ -620,4 +841,4 @@ impl SymbolResolver for Resolver {
result result
}) })
} }
} }

View File

@ -138,159 +138,165 @@ pub fn parse_type_annotation<T>(
let list_id = ids[6]; let list_id = ids[6];
let tuple_id = ids[7]; let tuple_id = ids[7];
match &expr.node { let name_handling = |id: &StrRef, unifier: &mut Unifier| {
Name { id, .. } => { if *id == int32_id {
if *id == int32_id { Ok(primitives.int32)
Ok(primitives.int32) } else if *id == int64_id {
} else if *id == int64_id { Ok(primitives.int64)
Ok(primitives.int64) } else if *id == float_id {
} else if *id == float_id { Ok(primitives.float)
Ok(primitives.float) } else if *id == bool_id {
} else if *id == bool_id { Ok(primitives.bool)
Ok(primitives.bool) } else if *id == none_id {
} else if *id == none_id { Ok(primitives.none)
Ok(primitives.none) } else {
} else { let obj_id = resolver.get_identifier_def(*id);
let obj_id = resolver.get_identifier_def(*id); if let Some(obj_id) = obj_id {
if let Some(obj_id) = obj_id { let def = top_level_defs[obj_id.0].read();
let def = top_level_defs[obj_id.0].read(); if let TopLevelDef::Class { fields, methods, type_vars, .. } = &*def {
if let TopLevelDef::Class { fields, methods, type_vars, .. } = &*def { if !type_vars.is_empty() {
if !type_vars.is_empty() { return Err(format!(
return Err(format!( "Unexpected number of type parameters: expected {} but got 0",
"Unexpected number of type parameters: expected {} but got 0", type_vars.len()
type_vars.len() ));
));
}
let fields = RefCell::new(
chain(
fields.iter().map(|(k, v, m)| (*k, (*v, *m))),
methods.iter().map(|(k, v, _)| (*k, (*v, false))),
)
.collect(),
);
Ok(unifier.add_ty(TypeEnum::TObj {
obj_id,
fields,
params: Default::default(),
}))
} else {
Err("Cannot use function name as type".into())
} }
let fields = RefCell::new(
chain(
fields.iter().map(|(k, v, m)| (*k, (*v, *m))),
methods.iter().map(|(k, v, _)| (*k, (*v, false))),
)
.collect(),
);
Ok(unifier.add_ty(TypeEnum::TObj {
obj_id,
fields,
params: Default::default(),
}))
} else { } else {
// it could be a type variable Err("Cannot use function name as type".into())
let ty = resolver }
.get_symbol_type(unifier, top_level_defs, primitives, *id) } else {
.ok_or_else(|| "unknown type variable name".to_owned())?; // it could be a type variable
if let TypeEnum::TVar { .. } = &*unifier.get_ty(ty) { let ty = resolver
Ok(ty) .get_symbol_type(unifier, top_level_defs, primitives, *id)
} else { .ok_or_else(|| "unknown type variable name".to_owned())?;
Err(format!("Unknown type annotation {}", id)) if let TypeEnum::TVar { .. } = &*unifier.get_ty(ty) {
} Ok(ty)
} else {
Err(format!("Unknown type annotation {}", id))
} }
} }
} }
Subscript { value, slice, .. } => { };
if let Name { id, .. } = &value.node {
if *id == virtual_id { let subscript_name_handle = |id: &StrRef, slice: &Expr<T>, unifier: &mut Unifier| {
let ty = parse_type_annotation( if *id == virtual_id {
resolver, let ty = parse_type_annotation(
top_level_defs, resolver,
unifier, top_level_defs,
primitives, unifier,
slice, primitives,
)?; slice,
Ok(unifier.add_ty(TypeEnum::TVirtual { ty })) )?;
} else if *id == list_id { Ok(unifier.add_ty(TypeEnum::TVirtual { ty }))
let ty = parse_type_annotation( } else if *id == list_id {
resolver, let ty = parse_type_annotation(
top_level_defs, resolver,
unifier, top_level_defs,
primitives, unifier,
slice, primitives,
)?; slice,
Ok(unifier.add_ty(TypeEnum::TList { ty })) )?;
} else if *id == tuple_id { Ok(unifier.add_ty(TypeEnum::TList { ty }))
if let Tuple { elts, .. } = &slice.node { } else if *id == tuple_id {
let ty = elts if let Tuple { elts, .. } = &slice.node {
.iter() let ty = elts
.map(|elt| { .iter()
parse_type_annotation( .map(|elt| {
resolver, parse_type_annotation(
top_level_defs,
unifier,
primitives,
elt,
)
})
.collect::<Result<Vec<_>, _>>()?;
Ok(unifier.add_ty(TypeEnum::TTuple { ty }))
} else {
Err("Expected multiple elements for tuple".into())
}
} else {
let types = if let Tuple { elts, .. } = &slice.node {
elts.iter()
.map(|v| {
parse_type_annotation(
resolver,
top_level_defs,
unifier,
primitives,
v,
)
})
.collect::<Result<Vec<_>, _>>()?
} else {
vec![parse_type_annotation(
resolver, resolver,
top_level_defs, top_level_defs,
unifier, unifier,
primitives, primitives,
slice, elt,
)?] )
}; })
.collect::<Result<Vec<_>, _>>()?;
Ok(unifier.add_ty(TypeEnum::TTuple { ty }))
} else {
Err("Expected multiple elements for tuple".into())
}
} else {
let types = if let Tuple { elts, .. } = &slice.node {
elts.iter()
.map(|v| {
parse_type_annotation(
resolver,
top_level_defs,
unifier,
primitives,
v,
)
})
.collect::<Result<Vec<_>, _>>()?
} else {
vec![parse_type_annotation(
resolver,
top_level_defs,
unifier,
primitives,
slice,
)?]
};
let obj_id = resolver let obj_id = resolver
.get_identifier_def(*id) .get_identifier_def(*id)
.ok_or_else(|| format!("Unknown type annotation {}", id))?; .ok_or_else(|| format!("Unknown type annotation {}", id))?;
let def = top_level_defs[obj_id.0].read(); let def = top_level_defs[obj_id.0].read();
if let TopLevelDef::Class { fields, methods, type_vars, .. } = &*def { if let TopLevelDef::Class { fields, methods, type_vars, .. } = &*def {
if types.len() != type_vars.len() { if types.len() != type_vars.len() {
return Err(format!( return Err(format!(
"Unexpected number of type parameters: expected {} but got {}", "Unexpected number of type parameters: expected {} but got {}",
type_vars.len(), type_vars.len(),
types.len() types.len()
)); ));
}
let mut subst = HashMap::new();
for (var, ty) in izip!(type_vars.iter(), types.iter()) {
let id = if let TypeEnum::TVar { id, .. } = &*unifier.get_ty(*var) {
*id
} else {
unreachable!()
};
subst.insert(id, *ty);
}
let mut fields = fields
.iter()
.map(|(attr, ty, is_mutable)| {
let ty = unifier.subst(*ty, &subst).unwrap_or(*ty);
(*attr, (ty, *is_mutable))
})
.collect::<HashMap<_, _>>();
fields.extend(methods.iter().map(|(attr, ty, _)| {
let ty = unifier.subst(*ty, &subst).unwrap_or(*ty);
(*attr, (ty, false))
}));
Ok(unifier.add_ty(TypeEnum::TObj {
obj_id,
fields: fields.into(),
params: subst.into(),
}))
} else {
Err("Cannot use function name as type".into())
}
} }
let mut subst = HashMap::new();
for (var, ty) in izip!(type_vars.iter(), types.iter()) {
let id = if let TypeEnum::TVar { id, .. } = &*unifier.get_ty(*var) {
*id
} else {
unreachable!()
};
subst.insert(id, *ty);
}
let mut fields = fields
.iter()
.map(|(attr, ty, is_mutable)| {
let ty = unifier.subst(*ty, &subst).unwrap_or(*ty);
(*attr, (ty, *is_mutable))
})
.collect::<HashMap<_, _>>();
fields.extend(methods.iter().map(|(attr, ty, _)| {
let ty = unifier.subst(*ty, &subst).unwrap_or(*ty);
(*attr, (ty, false))
}));
Ok(unifier.add_ty(TypeEnum::TObj {
obj_id,
fields: fields.into(),
params: subst.into(),
}))
} else {
Err("Cannot use function name as type".into())
}
}
};
match &expr.node {
Name { id, .. } => name_handling(id, unifier),
Subscript { value, slice, .. } => {
if let Name { id, .. } = &value.node {
subscript_name_handle(id, slice, unifier)
} else { } else {
Err("unsupported type expression".into()) Err("unsupported type expression".into())
} }

View File

@ -379,7 +379,14 @@ pub fn get_built_ins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo
let arg = args[0].1; let arg = args[0].1;
if ctx.unifier.unioned(arg_ty, boolean) { if ctx.unifier.unioned(arg_ty, boolean) {
Some(arg) Some(arg)
} else if ctx.unifier.unioned(arg_ty, int32) || ctx.unifier.unioned(arg_ty, int64) { } else if ctx.unifier.unioned(arg_ty, int32) {
Some(ctx.builder.build_int_compare(
IntPredicate::NE,
ctx.ctx.i32_type().const_zero(),
arg.into_int_value(),
"bool",
).into())
} else if ctx.unifier.unioned(arg_ty, int64) {
Some(ctx.builder.build_int_compare( Some(ctx.builder.build_int_compare(
IntPredicate::NE, IntPredicate::NE,
ctx.ctx.i64_type().const_zero(), ctx.ctx.i64_type().const_zero(),

View File

@ -1,10 +1,8 @@
use std::cell::RefCell; use std::cell::RefCell;
use nac3parser::ast::fold::Fold; use nac3parser::ast::fold::Fold;
use inkwell::{FloatPredicate, IntPredicate};
use crate::{ use crate::{
symbol_resolver::SymbolValue,
typecheck::type_inferencer::{FunctionData, Inferencer}, typecheck::type_inferencer::{FunctionData, Inferencer},
codegen::expr::get_subst_key, codegen::expr::get_subst_key,
}; };
@ -131,7 +129,7 @@ impl TopLevelComposer {
} }
} }
fn extract_def_list(&self) -> Vec<Arc<RwLock<TopLevelDef>>> { pub fn extract_def_list(&self) -> Vec<Arc<RwLock<TopLevelDef>>> {
self.definition_ast_list.iter().map(|(def, ..)| def.clone()).collect_vec() self.definition_ast_list.iter().map(|(def, ..)| def.clone()).collect_vec()
} }

View File

@ -24,8 +24,8 @@ pub struct DefinitionId(pub usize);
pub mod composer; pub mod composer;
pub mod helper; pub mod helper;
mod type_annotation;
pub mod builtins; pub mod builtins;
pub mod type_annotation;
use composer::*; use composer::*;
use type_annotation::*; use type_annotation::*;
#[cfg(test)] #[cfg(test)]

View File

@ -1,7 +1,6 @@
use std::cell::RefCell; use std::cell::RefCell;
use crate::typecheck::typedef::TypeVarMeta; use crate::typecheck::typedef::TypeVarMeta;
use super::*; use super::*;
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
@ -49,54 +48,121 @@ pub fn parse_ast_to_type_annotation_kinds<T>(
primitives: &PrimitiveStore, primitives: &PrimitiveStore,
expr: &ast::Expr<T>, expr: &ast::Expr<T>,
// the key stores the type_var of this topleveldef::class, we only need this field here // the key stores the type_var of this topleveldef::class, we only need this field here
mut locked: HashMap<DefinitionId, Vec<Type>>, locked: HashMap<DefinitionId, Vec<Type>>,
) -> Result<TypeAnnotation, String> { ) -> Result<TypeAnnotation, String> {
match &expr.node { let name_handle = |id: &StrRef, unifier: &mut Unifier, locked: HashMap<DefinitionId, Vec<Type>>| {
ast::ExprKind::Name { id, .. } => { if id == &"int32".into() {
if id == &"int32".into() { Ok(TypeAnnotation::Primitive(primitives.int32))
Ok(TypeAnnotation::Primitive(primitives.int32)) } else if id == &"int64".into() {
} else if id == &"int64".into() { Ok(TypeAnnotation::Primitive(primitives.int64))
Ok(TypeAnnotation::Primitive(primitives.int64)) } else if id == &"float".into() {
} else if id == &"float".into() { Ok(TypeAnnotation::Primitive(primitives.float))
Ok(TypeAnnotation::Primitive(primitives.float)) } else if id == &"bool".into() {
} else if id == &"bool".into() { Ok(TypeAnnotation::Primitive(primitives.bool))
Ok(TypeAnnotation::Primitive(primitives.bool)) } else if id == &"None".into() {
} else if id == &"None".into() { Ok(TypeAnnotation::Primitive(primitives.none))
Ok(TypeAnnotation::Primitive(primitives.none)) } else if id == &"str".into() {
} else if id == &"str".into() { Ok(TypeAnnotation::Primitive(primitives.str))
Ok(TypeAnnotation::Primitive(primitives.str)) } else if let Some(obj_id) = resolver.get_identifier_def(*id) {
} else if let Some(obj_id) = resolver.get_identifier_def(*id) { let type_vars = {
let type_vars = { let def_read = top_level_defs[obj_id.0].try_read();
let def_read = top_level_defs[obj_id.0].try_read(); if let Some(def_read) = def_read {
if let Some(def_read) = def_read { if let TopLevelDef::Class { type_vars, .. } = &*def_read {
if let TopLevelDef::Class { type_vars, .. } = &*def_read { type_vars.clone()
type_vars.clone()
} else {
return Err("function cannot be used as a type".into());
}
} else { } else {
locked.get(&obj_id).unwrap().clone() return Err("function cannot be used as a type".into());
} }
};
// check param number here
if !type_vars.is_empty() {
return Err(format!(
"expect {} type variable parameter but got 0",
type_vars.len()
));
}
Ok(TypeAnnotation::CustomClass { id: obj_id, params: vec![] })
} else if let Some(ty) = resolver.get_symbol_type(unifier, top_level_defs, primitives, *id) {
if let TypeEnum::TVar { .. } = unifier.get_ty(ty).as_ref() {
Ok(TypeAnnotation::TypeVar(ty))
} else { } else {
Err("not a type variable identifier".into()) locked.get(&obj_id).unwrap().clone()
}
};
// check param number here
if !type_vars.is_empty() {
return Err(format!(
"expect {} type variable parameter but got 0",
type_vars.len()
));
}
Ok(TypeAnnotation::CustomClass { id: obj_id, params: vec![] })
} else if let Some(ty) = resolver.get_symbol_type(unifier, top_level_defs, primitives, *id) {
if let TypeEnum::TVar { .. } = unifier.get_ty(ty).as_ref() {
Ok(TypeAnnotation::TypeVar(ty))
} else {
Err("not a type variable identifier".into())
}
} else {
Err("name cannot be parsed as a type annotation".into())
}
};
let class_name_handle =
|id: &StrRef, slice: &ast::Expr<T>, unifier: &mut Unifier, mut locked: HashMap<DefinitionId, Vec<Type>>| {
if vec!["virtual".into(), "Generic".into(), "list".into(), "tuple".into()]
.contains(id)
{
return Err("keywords cannot be class name".into());
}
let obj_id = resolver
.get_identifier_def(*id)
.ok_or_else(|| "unknown class name".to_string())?;
let type_vars = {
let def_read = top_level_defs[obj_id.0].try_read();
if let Some(def_read) = def_read {
if let TopLevelDef::Class { type_vars, .. } = &*def_read {
type_vars.clone()
} else {
unreachable!("must be class here")
} }
} else { } else {
Err("name cannot be parsed as a type annotation".into()) locked.get(&obj_id).unwrap().clone()
} }
} };
// we do not check whether the application of type variables are compatible here
let param_type_infos = {
let params_ast = if let ast::ExprKind::Tuple { elts, .. } = &slice.node {
elts.iter().collect_vec()
} else {
vec![slice]
};
if type_vars.len() != params_ast.len() {
return Err(format!(
"expect {} type parameters but got {}",
type_vars.len(),
params_ast.len()
));
}
let result = params_ast
.into_iter()
.map(|x| {
parse_ast_to_type_annotation_kinds(
resolver,
top_level_defs,
unifier,
primitives,
x,
{
locked.insert(obj_id, type_vars.clone());
locked.clone()
},
)
})
.collect::<Result<Vec<_>, _>>()?;
// make sure the result do not contain any type vars
let no_type_var = result
.iter()
.all(|x| get_type_var_contained_in_type_annotation(x).is_empty());
if no_type_var {
result
} else {
return Err("application of type vars to generic class \
is not currently supported"
.into());
}
};
Ok(TypeAnnotation::CustomClass { id: obj_id, params: param_type_infos })
};
match &expr.node {
ast::ExprKind::Name { id, .. } => name_handle(id, unifier, locked),
// virtual // virtual
ast::ExprKind::Subscript { value, slice, .. } ast::ExprKind::Subscript { value, slice, .. }
if { if {
@ -163,71 +229,7 @@ pub fn parse_ast_to_type_annotation_kinds<T>(
// custom class // custom class
ast::ExprKind::Subscript { value, slice, .. } => { ast::ExprKind::Subscript { value, slice, .. } => {
if let ast::ExprKind::Name { id, .. } = &value.node { if let ast::ExprKind::Name { id, .. } = &value.node {
if vec!["virtual".into(), "Generic".into(), "list".into(), "tuple".into()] class_name_handle(id, slice, unifier, locked)
.contains(id)
{
return Err("keywords cannot be class name".into());
}
let obj_id = resolver
.get_identifier_def(*id)
.ok_or_else(|| "unknown class name".to_string())?;
let type_vars = {
let def_read = top_level_defs[obj_id.0].try_read();
if let Some(def_read) = def_read {
if let TopLevelDef::Class { type_vars, .. } = &*def_read {
type_vars.clone()
} else {
unreachable!("must be class here")
}
} else {
locked.get(&obj_id).unwrap().clone()
}
};
// we do not check whether the application of type variables are compatible here
let param_type_infos = {
let params_ast = if let ast::ExprKind::Tuple { elts, .. } = &slice.node {
elts.iter().collect_vec()
} else {
vec![slice.as_ref()]
};
if type_vars.len() != params_ast.len() {
return Err(format!(
"expect {} type parameters but got {}",
type_vars.len(),
params_ast.len()
));
}
let result = params_ast
.into_iter()
.map(|x| {
parse_ast_to_type_annotation_kinds(
resolver,
top_level_defs,
unifier,
primitives,
x,
{
locked.insert(obj_id, type_vars.clone());
locked.clone()
},
)
})
.collect::<Result<Vec<_>, _>>()?;
// make sure the result do not contain any type vars
let no_type_var = result
.iter()
.all(|x| get_type_var_contained_in_type_annotation(x).is_empty());
if no_type_var {
result
} else {
return Err("application of type vars to generic class \
is not currently supported"
.into());
}
};
Ok(TypeAnnotation::CustomClass { id: obj_id, params: param_type_infos })
} else { } else {
Err("unsupported expression type for class name".into()) Err("unsupported expression type for class name".into())
} }
@ -370,13 +372,7 @@ pub fn get_type_from_type_annotation_kinds(
/// But note that here we do not make a duplication of `T`, `V`, we direclty /// But note that here we do not make a duplication of `T`, `V`, we direclty
/// use them as they are in the TopLevelDef::Class since those in the /// use them as they are in the TopLevelDef::Class since those in the
/// TopLevelDef::Class.type_vars will be substitute later when seeing applications/instantiations /// TopLevelDef::Class.type_vars will be substitute later when seeing applications/instantiations
/// the Type of their fields and methods will also be subst when application/instantiation \ /// the Type of their fields and methods will also be subst when application/instantiation
/// \
/// Note this implicit self type is different with seeing `A[T, V]` explicitly outside
/// the class def ast body, where it is a new instantiation of the generic class `A`,
/// but equivalent to seeing `A[T, V]` inside the class def body ast, where although we
/// create copies of `T` and `V`, we will find them out as occured type vars in the analyze_class()
/// and unify them with the class generic `T`, `V`
pub fn make_self_type_annotation(type_vars: &[Type], object_id: DefinitionId) -> TypeAnnotation { pub fn make_self_type_annotation(type_vars: &[Type], object_id: DefinitionId) -> TypeAnnotation {
TypeAnnotation::CustomClass { TypeAnnotation::CustomClass {
id: object_id, id: object_id,

View File

@ -167,7 +167,6 @@ impl<'a> Inferencer<'a> {
} }
ExprKind::Constant { .. } => {} ExprKind::Constant { .. } => {}
_ => { _ => {
println!("{:?}", expr.node);
unimplemented!() unimplemented!()
} }
} }

View File

@ -855,7 +855,6 @@ impl Unifier {
} }
} }
_ => { _ => {
println!("{}", ty.get_type_name());
unreachable!("{} not expected", ty.get_type_name()) unreachable!("{} not expected", ty.get_type_name())
} }
} }

View File

@ -3,8 +3,9 @@ use inkwell::{
targets::*, targets::*,
OptimizationLevel, OptimizationLevel,
}; };
use nac3core::typecheck::type_inferencer::PrimitiveStore; use nac3core::typecheck::{type_inferencer::PrimitiveStore, typedef::{Type, Unifier}};
use nac3parser::{ast::{Expr, ExprKind, StmtKind}, parser}; use nac3parser::{ast::{Expr, ExprKind, StmtKind}, parser};
use parking_lot::RwLock;
use std::{borrow::Borrow, env}; use std::{borrow::Borrow, env};
use std::fs; use std::fs;
use std::{collections::HashMap, path::Path, sync::Arc, time::SystemTime}; use std::{collections::HashMap, path::Path, sync::Arc, time::SystemTime};
@ -15,7 +16,11 @@ use nac3core::{
WorkerRegistry, WorkerRegistry,
}, },
symbol_resolver::SymbolResolver, symbol_resolver::SymbolResolver,
toplevel::{composer::TopLevelComposer, TopLevelDef, helper::parse_parameter_default_value}, toplevel::{
composer::TopLevelComposer,
TopLevelDef, helper::parse_parameter_default_value,
type_annotation::*,
},
typecheck::typedef::FunSignature, typecheck::typedef::FunSignature,
}; };
@ -68,25 +73,84 @@ fn main() {
for stmt in parser_result.into_iter() { for stmt in parser_result.into_iter() {
if let StmtKind::Assign { targets, value, .. } = &stmt.node { if let StmtKind::Assign { targets, value, .. } = &stmt.node {
fn handle_typevar_definition(
var: &Expr,
resolver: &(dyn SymbolResolver + Send + Sync),
def_list: &[Arc<RwLock<TopLevelDef>>],
unifier: &mut Unifier,
primitives: &PrimitiveStore,
) -> Result<Type, String> {
if let ExprKind::Call { func, args, .. } = &var.node {
if matches!(&func.node, ExprKind::Name { id, .. } if id == &"TypeVar".into()) {
let constraints = args
.iter()
.skip(1)
.map(|x| -> Result<Type, String> {
let ty = parse_ast_to_type_annotation_kinds(
resolver,
def_list,
unifier,
primitives,
x,
Default::default(),
)?;
get_type_from_type_annotation_kinds(def_list, unifier, primitives, &ty)
})
.collect::<Result<Vec<_>, _>>()?;
Ok(unifier.get_fresh_var_with_range(&constraints).0)
} else {
Err(format!("expression {:?} cannot be handled as a TypeVar in global scope", var))
}
} else {
Err(format!("expression {:?} cannot be handled as a TypeVar in global scope", var))
}
}
fn handle_assignment_pattern( fn handle_assignment_pattern(
targets: &[Expr], targets: &[Expr],
value: &Expr, value: &Expr,
resolver: &(dyn SymbolResolver + Send + Sync), resolver: &(dyn SymbolResolver + Send + Sync),
internal_resolver: &ResolverInternal, internal_resolver: &ResolverInternal,
def_list: &[Arc<RwLock<TopLevelDef>>],
unifier: &mut Unifier,
primitives: &PrimitiveStore,
) -> Result<(), String> { ) -> Result<(), String> {
if targets.len() == 1 { if targets.len() == 1 {
match &targets[0].node { match &targets[0].node {
ExprKind::Name { id, .. } => { ExprKind::Name { id, .. } => {
let val = parse_parameter_default_value(value.borrow(), resolver)?; if let Ok(var) = handle_typevar_definition(
internal_resolver.add_module_global(*id, val); value.borrow(),
Ok(()) resolver,
def_list,
unifier,
primitives,
) {
internal_resolver.add_id_type(*id, var);
Ok(())
} else if let Ok(val) = parse_parameter_default_value(value.borrow(), resolver) {
internal_resolver.add_module_global(*id, val);
Ok(())
} else {
Err(format!("fails to evaluate this expression `{:?}` as a constant or TypeVar at {}",
targets[0].node,
targets[0].location,
))
}
} }
ExprKind::List { elts, .. } ExprKind::List { elts, .. }
| ExprKind::Tuple { elts, .. } => { | ExprKind::Tuple { elts, .. } => {
handle_assignment_pattern(elts, value, resolver, internal_resolver)?; handle_assignment_pattern(
elts,
value,
resolver,
internal_resolver,
def_list,
unifier,
primitives
)?;
Ok(()) Ok(())
} }
_ => unreachable!("cannot be assigned") _ => Err(format!("assignment to {:?} is not supported at {}", targets[0], targets[0].location))
} }
} else { } else {
match &value.node { match &value.node {
@ -105,7 +169,10 @@ fn main() {
std::slice::from_ref(tar), std::slice::from_ref(tar),
val, val,
resolver, resolver,
internal_resolver internal_resolver,
def_list,
unifier,
primitives
)?; )?;
} }
Ok(()) Ok(())
@ -115,7 +182,19 @@ fn main() {
} }
} }
} }
if let Err(err) = handle_assignment_pattern(targets, value, resolver.as_ref(), internal_resolver.as_ref()) {
let def_list = composer.extract_def_list();
let unifier = &mut composer.unifier;
let primitives = &composer.primitives_ty;
if let Err(err) = handle_assignment_pattern(
targets,
value,
resolver.as_ref(),
internal_resolver.as_ref(),
&def_list,
unifier,
primitives,
) {
eprintln!("{}", err); eprintln!("{}", err);
return; return;
} }