Add floor and ceil, move built-in functions to a separate file #120

Merged
sb10q merged 3 commits from built_in_floor_ceil into master 2021-12-02 10:40:50 +08:00
13 changed files with 698 additions and 344 deletions
Showing only changes of commit aa2d79fea6 - Show all commits

View File

@ -2,16 +2,16 @@
"nodes": {
"nixpkgs": {
"locked": {
"lastModified": 1637636156,
"narHash": "sha256-E2ym4Vcpqu9JYoQDXJZR48gVD+LPPbaCoYveIk7Xu3Y=",
"lastModified": 1638279546,
"narHash": "sha256-1KCwN7twjp1dBdp0jPgVdYFztDkCR8+roo0B34J9oBY=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "b026e1cf87a108dd06fe521f224fdc72fd0b013d",
"rev": "96b4157790fc96e70d6e6c115e3f34bba7be490f",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "release-21.11",
"ref": "nixos-21.11",
"repo": "nixpkgs",
"type": "github"
}

View File

@ -1,7 +1,7 @@
{
description = "The third-generation ARTIQ compiler";
inputs.nixpkgs.url = github:NixOS/nixpkgs/release-21.11;
inputs.nixpkgs.url = github:NixOS/nixpkgs/nixos-21.11;
outputs = { self, nixpkgs }:
let

View File

@ -9,13 +9,17 @@ import nac3artiq
__all__ = ["KernelInvariant", "extern", "kernel", "portable", "nac3",
"ms", "us", "ns",
"print_int32", "print_int64",
"Core", "TTLOut", "parallel", "sequential"]
"Core", "TTLOut", "parallel", "sequential", "virtual"]
T = TypeVar('T')
class KernelInvariant(Generic[T]):
pass
# The virtual class must exist before nac3artiq.NAC3 is created.
class virtual(Generic[T]):
pass
import device_db
core_arguments = device_db.device_db["core"]["arguments"]

View File

@ -55,6 +55,10 @@ pub struct PrimitivePythonId {
bool: u64,
list: u64,
tuple: u64,
typevar: u64,
none: u64,
generic_alias: (u64, u64),
virtual_id: u64,
}
// TopLevelComposer is unsendable as it holds the unification table, which is
@ -96,10 +100,13 @@ impl Nac3 {
let val = id_fn.call1((member.get_item(1)?,))?.extract()?;
name_to_pyid.insert(key.into(), val);
}
let typings = PyModule::import(py, "typing")?;
let helper = PythonHelper {
id_fn: builtins.getattr("id").unwrap().to_object(py),
len_fn: builtins.getattr("len").unwrap().to_object(py),
type_fn: builtins.getattr("type").unwrap().to_object(py),
origin_ty_fn: typings.getattr("get_origin").unwrap().to_object(py),
args_ty_fn: typings.getattr("get_args").unwrap().to_object(py),
};
Ok((
module.getattr("__name__")?.extract()?,
@ -284,7 +291,42 @@ impl Nac3 {
let builtins_mod = PyModule::import(py, "builtins").unwrap();
let id_fn = builtins_mod.getattr("id").unwrap();
let numpy_mod = PyModule::import(py, "numpy").unwrap();
let typing_mod = PyModule::import(py, "typing").unwrap();
let types_mod = PyModule::import(py, "types").unwrap();
let primitive_ids = PrimitivePythonId {
virtual_id: id_fn
.call1((builtins_mod
.getattr("globals")
.unwrap()
.call0()
.unwrap()
.get_item("virtual")
.unwrap(),
)).unwrap()
.extract()
.unwrap(),
generic_alias: (
id_fn
.call1((typing_mod.getattr("_GenericAlias").unwrap(),))
.unwrap()
.extract()
.unwrap(),
id_fn
.call1((types_mod.getattr("GenericAlias").unwrap(),))
.unwrap()
.extract()
.unwrap(),
),
none: id_fn
.call1((builtins_mod.getattr("None").unwrap(),))
.unwrap()
.extract()
.unwrap(),
typevar: id_fn
.call1((typing_mod.getattr("TypeVar").unwrap(),))
.unwrap()
.extract()
.unwrap(),
int: id_fn
.call1((builtins_mod.getattr("int").unwrap(),))
.unwrap()
@ -403,10 +445,13 @@ impl Nac3 {
};
let mut synthesized = parse_program(&synthesized).unwrap();
let builtins = PyModule::import(py, "builtins")?;
let typings = PyModule::import(py, "typing")?;
let helper = PythonHelper {
id_fn: builtins.getattr("id").unwrap().to_object(py),
len_fn: builtins.getattr("len").unwrap().to_object(py),
type_fn: builtins.getattr("type").unwrap().to_object(py),
origin_ty_fn: typings.getattr("get_origin").unwrap().to_object(py),
args_ty_fn: typings.getattr("get_args").unwrap().to_object(py),
};
let resolver = Arc::new(Resolver(Arc::new(InnerResolver {
id_to_type: self.builtins_ty.clone().into(),

View File

@ -43,6 +43,8 @@ pub struct PythonHelper {
pub type_fn: PyObject,
pub len_fn: PyObject,
pub id_fn: PyObject,
pub origin_ty_fn: PyObject,
pub args_ty_fn: PyObject,
}
struct PythonValue {
@ -133,47 +135,46 @@ impl InnerResolver {
}))
}
fn get_obj_type(
// handle python objects that represent types themselves
// primitives and class types should be themselves, use `ty_id` to check,
// TypeVars and GenericAlias(`A[int, bool]`) should use `ty_ty_id` to check
// the `bool` value returned indicates whether they are instantiated or not
fn get_pyty_obj_type(
&self,
py: Python,
obj: &PyAny,
pyty: &PyAny,
unifier: &mut Unifier,
defs: &[Arc<RwLock<TopLevelDef>>],
primitives: &PrimitiveStore,
) -> PyResult<Option<Type>> {
) -> PyResult<Result<(Type, bool), String>> {
let ty_id: u64 = self
.helper
.id_fn
.call1(py, (self.helper.type_fn.call1(py, (obj,))?,))?
.call1(py, (pyty,))?
.extract(py)?;
let ty_ty_id: u64 = self
.helper
.id_fn
.call1(py, (self.helper.type_fn.call1(py, (pyty,))?,))?
.extract(py)?;
if ty_id == self.primitive_ids.int || ty_id == self.primitive_ids.int32 {
Ok(Some(primitives.int32))
Ok(Ok((primitives.int32, true)))
} else if ty_id == self.primitive_ids.int64 {
Ok(Some(primitives.int64))
Ok(Ok((primitives.int64, true)))
} else if ty_id == self.primitive_ids.bool {
Ok(Some(primitives.bool))
Ok(Ok((primitives.bool, true)))
} else if ty_id == self.primitive_ids.float {
Ok(Some(primitives.float))
Ok(Ok((primitives.float, true)))
} else if ty_id == self.primitive_ids.list {
let len: usize = self.helper.len_fn.call1(py, (obj,))?.extract(py)?;
if len == 0 {
// do not handle type var param and concrete check here
let var = unifier.get_fresh_var().0;
let list = unifier.add_ty(TypeEnum::TList { ty: var });
Ok(Some(list))
} else {
let ty = self.get_list_elem_type(py, obj, len, unifier, defs, primitives)?;
Ok(ty.map(|ty| unifier.add_ty(TypeEnum::TList { ty })))
}
Ok(Ok((list, false)))
} else if ty_id == self.primitive_ids.tuple {
let elements: &PyTuple = obj.cast_as()?;
let types: Result<Option<Vec<_>>, _> = elements
.iter()
.map(|elem| self.get_obj_type(py, elem, unifier, defs, primitives))
.collect();
let types = types?;
Ok(types.map(|types| unifier.add_ty(TypeEnum::TTuple { ty: types })))
} else if let Some(def_id) = self.pyid_to_def.read().get(&ty_id) {
// do not handle type var param and concrete check here
Ok(Ok((unifier.add_ty(TypeEnum::TTuple { ty: vec![] }), false)))
} else if let Some(def_id) = self.pyid_to_def.read().get(&ty_id).cloned() {
let def = defs[def_id.0].read();
if let TopLevelDef::Class {
object_id,
@ -183,54 +184,274 @@ impl InnerResolver {
..
} = &*def
{
let var_map: HashMap<_, _> = type_vars
// do not handle type var param and concrete check here, and no subst
Ok(Ok({
let ty = TypeEnum::TObj {
obj_id: *object_id,
params: RefCell::new({
type_vars
.iter()
.map(|var| {
(
if let TypeEnum::TVar { id, .. } = &*unifier.get_ty(*var) {
*id
.map(|x| {
if let TypeEnum::TVar { id, .. } = &*unifier.get_ty(*x) {
(*id, *x)
} else {
unreachable!()
},
unifier.get_fresh_var().0,
)
})
.collect();
let mut fields_ty = HashMap::new();
for method in methods.iter() {
fields_ty.insert(method.0, (method.1, false));
}
for field in fields.iter() {
let name: String = field.0.into();
}).collect()
}),
fields: RefCell::new({
let mut res = methods
.iter()
.map(|(iden, ty, _)| (*iden, (*ty, false)))
.collect::<HashMap<_, _>>();
res.extend(fields.clone().into_iter().map(|x| (x.0, (x.1, x.2))));
res
})
};
// here also false, later instantiation use python object to check compatible
(unifier.add_ty(ty), false)
}))
} else {
// only object is supported, functions are not supported
unreachable!("function type is not supported, should not be queried")
}
} else if ty_ty_id == self.primitive_ids.typevar {
let constraint_types = {
let constraints = pyty.getattr("__constraints__").unwrap();
let mut result: Vec<Type> = vec![];
for i in 0.. {
if let Ok(constr) = constraints.get_item(i) {
result.push({
match self.get_pyty_obj_type(py, constr, unifier, defs, primitives)? {
Ok((ty, _)) => {
if unifier.is_concrete(ty, &[]) {
ty
} else {
return Ok(Err(format!(
"the {}th constraint of TypeVar `{}` is not concrete",
i + 1,
pyty.getattr("__name__")?.extract::<String>()?
)))
}
},
Err(err) => return Ok(Err(err))
}
})
} else {
break;
}
}
result
};
let res = unifier.get_fresh_var_with_range(&constraint_types).0;
Ok(Ok((res, true)))
} else if ty_ty_id == self.primitive_ids.generic_alias.0 || ty_ty_id == self.primitive_ids.generic_alias.1 {
let origin = self.helper.origin_ty_fn.call1(py, (pyty,))?;
let args = self.helper.args_ty_fn.call1(py, (pyty,))?;
let args: &PyTuple = args.cast_as(py)?;
let origin_ty = match self.get_pyty_obj_type(py, origin.as_ref(py), unifier, defs, primitives)? {
Ok((ty, false)) => ty,
Ok((_, true)) => return Ok(Err("instantiated type does not take type parameters".into())),
Err(err) => return Ok(Err(err))
};
match &*unifier.get_ty(origin_ty) {
TypeEnum::TList { .. } => {
if args.len() == 1 {
let ty = match self.get_pyty_obj_type(py, args.get_item(0), unifier, defs, primitives)? {
Ok(ty) => ty,
Err(err) => return Ok(Err(err))
};
if !unifier.is_concrete(ty.0, &[]) && !ty.1 {
panic!("type list should take concrete parameters in type var ranges")
}
Ok(Ok((unifier.add_ty(TypeEnum::TList { ty: ty.0 }), true)))
} else {
return Ok(Err(format!("type list needs exactly 1 type parameters, found {}", args.len())))
}
},
TypeEnum::TTuple { .. } => {
let args = match args
.iter()
.map(|x| self.get_pyty_obj_type(py, x, unifier, defs, primitives))
.collect::<Result<Vec<_>, _>>()?
.into_iter()
.collect::<Result<Vec<_>, _>>() {
Ok(args) if !args.is_empty() => args
.into_iter()
.map(|(x, check)| if !unifier.is_concrete(x, &[]) && !check {
panic!("type tuple should take concrete parameters in type var ranges")
} else {
x
}
)
.collect::<Vec<_>>(),
Err(err) => return Ok(Err(err)),
_ => return Ok(Err("tuple type needs at least 1 type parameters".to_string()))
};
Ok(Ok((unifier.add_ty(TypeEnum::TTuple { ty: args }), true)))
},
TypeEnum::TObj { params, obj_id, .. } => {
let subst = {
let params = &*params.borrow();
if params.len() != args.len() {
return Ok(Err(format!(
"for class #{}, expect {} type parameters, got {}.",
obj_id.0,
params.len(),
args.len(),
)))
}
let args = match args
.iter()
.map(|x| self.get_pyty_obj_type(py, x, unifier, defs, primitives))
.collect::<Result<Vec<_>, _>>()?
.into_iter()
.collect::<Result<Vec<_>, _>>() {
Ok(args) => args
.into_iter()
.map(|(x, check)| if !unifier.is_concrete(x, &[]) && !check {
panic!("type class should take concrete parameters in type var ranges")
} else {
x
}
)
.collect::<Vec<_>>(),
Err(err) => return Ok(Err(err)),
};
params
.iter()
.zip(args.iter())
.map(|((id, _), ty)| (*id, *ty))
.collect::<HashMap<_, _>>()
};
Ok(Ok((unifier.subst(origin_ty, &subst).unwrap_or(origin_ty), true)))
},
TypeEnum::TVirtual { .. } => {
if args.len() == 1 {
let ty = match self.get_pyty_obj_type(py, args.get_item(0), unifier, defs, primitives)? {
Ok(ty) => ty,
Err(err) => return Ok(Err(err))
};
if !unifier.is_concrete(ty.0, &[]) && !ty.1 {
panic!("virtual class should take concrete parameters in type var ranges")
}
Ok(Ok((unifier.add_ty(TypeEnum::TVirtual { ty: ty.0 }), true)))
} else {
return Ok(Err(format!("virtual class needs exactly 1 type parameters, found {}", args.len())))
}
}
_ => unimplemented!()
}
} else if ty_id == self.primitive_ids.virtual_id {
Ok(Ok(({
let ty = TypeEnum::TVirtual { ty: unifier.get_fresh_var().0 };
unifier.add_ty(ty)
}, false)))
} else {
Ok(Err("unknown type".into()))
}
}
fn get_obj_type(
&self,
py: Python,
obj: &PyAny,
unifier: &mut Unifier,
defs: &[Arc<RwLock<TopLevelDef>>],
primitives: &PrimitiveStore,
) -> PyResult<Option<Type>> {
let ty = self.helper.type_fn.call1(py, (obj,)).unwrap();
let (extracted_ty, inst_check) = match self.get_pyty_obj_type(
py,
{
if [self.primitive_ids.typevar,
self.primitive_ids.generic_alias.0,
self.primitive_ids.generic_alias.1
].contains(&self.helper.id_fn.call1(py, (ty.clone(),))?.extract::<u64>(py)?) {
obj
} else {
ty.as_ref(py)
}
},
unifier,
defs,
primitives
)? {
Ok(s) => s,
Err(_) => return Ok(None)
};
return match (&*unifier.get_ty(extracted_ty), inst_check) {
// do the instantiation for these three types
(TypeEnum::TList { ty }, false) => {
let len: usize = self.helper.len_fn.call1(py, (obj,))?.extract(py)?;
if len == 0 {
assert!(matches!(
&*unifier.get_ty(extracted_ty),
TypeEnum::TVar { meta: nac3core::typecheck::typedef::TypeVarMeta::Generic, range, .. }
if range.borrow().is_empty()
));
Ok(Some(extracted_ty))
} else {
let actual_ty = self
.get_list_elem_type(py, obj, len, unifier, defs, primitives)?;
if let Some(actual_ty) = actual_ty {
unifier.unify(*ty, actual_ty).unwrap();
Ok(Some(extracted_ty))
} else {
Ok(None)
}
}
}
(TypeEnum::TTuple { .. }, false) => {
let elements: &PyTuple = obj.cast_as()?;
let types: Result<Option<Vec<_>>, _> = elements
.iter()
.map(|elem| self.get_obj_type(py, elem, unifier, defs, primitives))
.collect();
let types = types?;
Ok(types.map(|types| unifier.add_ty(TypeEnum::TTuple { ty: types })))
}
(TypeEnum::TObj { params, fields, .. }, false) => {
let var_map = params
.borrow()
.iter()
.map(|(id_var, ty)| {
if let TypeEnum::TVar { id, range, .. } = &*unifier.get_ty(*ty) {
assert_eq!(*id, *id_var);
(*id, unifier.get_fresh_var_with_range(&range.borrow()).0)
} else {
unreachable!()
}
})
.collect::<HashMap<_, _>>();
// loop through non-function fields of the class to get the instantiated value
for field in fields.borrow().iter() {
let name: String = (*field.0).into();
if let TypeEnum::TFunc( .. ) = &*unifier.get_ty(field.1.0) {
continue;
} else {
let field_data = obj.getattr(&name)?;
let ty = self
.get_obj_type(py, field_data, unifier, defs, primitives)?
.unwrap_or(primitives.none);
let field_ty = unifier.subst(field.1, &var_map).unwrap_or(field.1);
let field_ty = unifier.subst(field.1.0, &var_map).unwrap_or(field.1.0);
if unifier.unify(ty, field_ty).is_err() {
// field type mismatch
return Ok(None);
}
fields_ty.insert(field.0, (ty, field.2));
}
}
for (_, ty) in var_map.iter() {
// must be concrete type
if !unifier.is_concrete(*ty, &[]) {
return Ok(None);
return Ok(None)
}
}
Ok(Some(unifier.add_ty(TypeEnum::TObj {
obj_id: *object_id,
fields: RefCell::new(fields_ty),
params: RefCell::new(var_map),
})))
} else {
// only object is supported, functions are not supported
Ok(None)
}
} else {
Ok(None)
return Ok(Some(unifier.subst(extracted_ty, &var_map).unwrap_or(extracted_ty)));
}
_ => Ok(Some(extracted_ty))
};
}
fn get_obj_value<'ctx, 'a>(

View File

@ -138,8 +138,7 @@ pub fn parse_type_annotation<T>(
let list_id = ids[6];
let tuple_id = ids[7];
match &expr.node {
Name { id, .. } => {
let name_handling = |id: &StrRef, unifier: &mut Unifier| {
if *id == int32_id {
Ok(primitives.int32)
} else if *id == int64_id {
@ -188,9 +187,9 @@ pub fn parse_type_annotation<T>(
}
}
}
}
Subscript { value, slice, .. } => {
if let Name { id, .. } = &value.node {
};
let subscript_name_handle = |id: &StrRef, slice: &Expr<T>, unifier: &mut Unifier| {
if *id == virtual_id {
let ty = parse_type_annotation(
resolver,
@ -291,6 +290,13 @@ pub fn parse_type_annotation<T>(
Err("Cannot use function name as type".into())
}
}
};
match &expr.node {
Name { id, .. } => name_handling(id, unifier),
Subscript { value, slice, .. } => {
if let Name { id, .. } = &value.node {
subscript_name_handle(id, slice, unifier)
} else {
Err("unsupported type expression".into())
}

View File

@ -379,7 +379,14 @@ pub fn get_built_ins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo
let arg = args[0].1;
if ctx.unifier.unioned(arg_ty, boolean) {
Some(arg)
} else if ctx.unifier.unioned(arg_ty, int32) || ctx.unifier.unioned(arg_ty, int64) {
} else if ctx.unifier.unioned(arg_ty, int32) {
Some(ctx.builder.build_int_compare(
IntPredicate::NE,
ctx.ctx.i32_type().const_zero(),
arg.into_int_value(),
"bool",
).into())
} else if ctx.unifier.unioned(arg_ty, int64) {
Some(ctx.builder.build_int_compare(
IntPredicate::NE,
ctx.ctx.i64_type().const_zero(),

View File

@ -1,10 +1,8 @@
use std::cell::RefCell;
use nac3parser::ast::fold::Fold;
use inkwell::{FloatPredicate, IntPredicate};
use crate::{
symbol_resolver::SymbolValue,
typecheck::type_inferencer::{FunctionData, Inferencer},
codegen::expr::get_subst_key,
};
@ -131,7 +129,7 @@ impl TopLevelComposer {
}
}
fn extract_def_list(&self) -> Vec<Arc<RwLock<TopLevelDef>>> {
pub fn extract_def_list(&self) -> Vec<Arc<RwLock<TopLevelDef>>> {
self.definition_ast_list.iter().map(|(def, ..)| def.clone()).collect_vec()
}

View File

@ -24,8 +24,8 @@ pub struct DefinitionId(pub usize);
pub mod composer;
pub mod helper;
mod type_annotation;
pub mod builtins;
pub mod type_annotation;
use composer::*;
use type_annotation::*;
#[cfg(test)]

View File

@ -1,7 +1,6 @@
use std::cell::RefCell;
use crate::typecheck::typedef::TypeVarMeta;
use super::*;
#[derive(Clone, Debug)]
@ -49,10 +48,9 @@ pub fn parse_ast_to_type_annotation_kinds<T>(
primitives: &PrimitiveStore,
expr: &ast::Expr<T>,
// the key stores the type_var of this topleveldef::class, we only need this field here
mut locked: HashMap<DefinitionId, Vec<Type>>,
locked: HashMap<DefinitionId, Vec<Type>>,
) -> Result<TypeAnnotation, String> {
match &expr.node {
ast::ExprKind::Name { id, .. } => {
let name_handle = |id: &StrRef, unifier: &mut Unifier, locked: HashMap<DefinitionId, Vec<Type>>| {
if id == &"int32".into() {
Ok(TypeAnnotation::Primitive(primitives.int32))
} else if id == &"int64".into() {
@ -95,8 +93,76 @@ pub fn parse_ast_to_type_annotation_kinds<T>(
} else {
Err("name cannot be parsed as a type annotation".into())
}
}
};
let class_name_handle =
|id: &StrRef, slice: &ast::Expr<T>, unifier: &mut Unifier, mut locked: HashMap<DefinitionId, Vec<Type>>| {
if vec!["virtual".into(), "Generic".into(), "list".into(), "tuple".into()]
.contains(id)
{
return Err("keywords cannot be class name".into());
}
let obj_id = resolver
.get_identifier_def(*id)
.ok_or_else(|| "unknown class name".to_string())?;
let type_vars = {
let def_read = top_level_defs[obj_id.0].try_read();
if let Some(def_read) = def_read {
if let TopLevelDef::Class { type_vars, .. } = &*def_read {
type_vars.clone()
} else {
unreachable!("must be class here")
}
} else {
locked.get(&obj_id).unwrap().clone()
}
};
// we do not check whether the application of type variables are compatible here
let param_type_infos = {
let params_ast = if let ast::ExprKind::Tuple { elts, .. } = &slice.node {
elts.iter().collect_vec()
} else {
vec![slice]
};
if type_vars.len() != params_ast.len() {
return Err(format!(
"expect {} type parameters but got {}",
type_vars.len(),
params_ast.len()
));
}
let result = params_ast
.into_iter()
.map(|x| {
parse_ast_to_type_annotation_kinds(
resolver,
top_level_defs,
unifier,
primitives,
x,
{
locked.insert(obj_id, type_vars.clone());
locked.clone()
},
)
})
.collect::<Result<Vec<_>, _>>()?;
// make sure the result do not contain any type vars
let no_type_var = result
.iter()
.all(|x| get_type_var_contained_in_type_annotation(x).is_empty());
if no_type_var {
result
} else {
return Err("application of type vars to generic class \
is not currently supported"
.into());
}
};
Ok(TypeAnnotation::CustomClass { id: obj_id, params: param_type_infos })
};
match &expr.node {
ast::ExprKind::Name { id, .. } => name_handle(id, unifier, locked),
// virtual
ast::ExprKind::Subscript { value, slice, .. }
if {
@ -163,71 +229,7 @@ pub fn parse_ast_to_type_annotation_kinds<T>(
// custom class
ast::ExprKind::Subscript { value, slice, .. } => {
if let ast::ExprKind::Name { id, .. } = &value.node {
if vec!["virtual".into(), "Generic".into(), "list".into(), "tuple".into()]
.contains(id)
{
return Err("keywords cannot be class name".into());
}
let obj_id = resolver
.get_identifier_def(*id)
.ok_or_else(|| "unknown class name".to_string())?;
let type_vars = {
let def_read = top_level_defs[obj_id.0].try_read();
if let Some(def_read) = def_read {
if let TopLevelDef::Class { type_vars, .. } = &*def_read {
type_vars.clone()
} else {
unreachable!("must be class here")
}
} else {
locked.get(&obj_id).unwrap().clone()
}
};
// we do not check whether the application of type variables are compatible here
let param_type_infos = {
let params_ast = if let ast::ExprKind::Tuple { elts, .. } = &slice.node {
elts.iter().collect_vec()
} else {
vec![slice.as_ref()]
};
if type_vars.len() != params_ast.len() {
return Err(format!(
"expect {} type parameters but got {}",
type_vars.len(),
params_ast.len()
));
}
let result = params_ast
.into_iter()
.map(|x| {
parse_ast_to_type_annotation_kinds(
resolver,
top_level_defs,
unifier,
primitives,
x,
{
locked.insert(obj_id, type_vars.clone());
locked.clone()
},
)
})
.collect::<Result<Vec<_>, _>>()?;
// make sure the result do not contain any type vars
let no_type_var = result
.iter()
.all(|x| get_type_var_contained_in_type_annotation(x).is_empty());
if no_type_var {
result
} else {
return Err("application of type vars to generic class \
is not currently supported"
.into());
}
};
Ok(TypeAnnotation::CustomClass { id: obj_id, params: param_type_infos })
class_name_handle(id, slice, unifier, locked)
} else {
Err("unsupported expression type for class name".into())
}
@ -370,13 +372,7 @@ pub fn get_type_from_type_annotation_kinds(
/// But note that here we do not make a duplication of `T`, `V`, we direclty
/// use them as they are in the TopLevelDef::Class since those in the
/// TopLevelDef::Class.type_vars will be substitute later when seeing applications/instantiations
/// the Type of their fields and methods will also be subst when application/instantiation \
/// \
/// Note this implicit self type is different with seeing `A[T, V]` explicitly outside
/// the class def ast body, where it is a new instantiation of the generic class `A`,
/// but equivalent to seeing `A[T, V]` inside the class def body ast, where although we
/// create copies of `T` and `V`, we will find them out as occured type vars in the analyze_class()
/// and unify them with the class generic `T`, `V`
/// the Type of their fields and methods will also be subst when application/instantiation
pub fn make_self_type_annotation(type_vars: &[Type], object_id: DefinitionId) -> TypeAnnotation {
TypeAnnotation::CustomClass {
id: object_id,

View File

@ -167,7 +167,6 @@ impl<'a> Inferencer<'a> {
}
ExprKind::Constant { .. } => {}
_ => {
println!("{:?}", expr.node);
unimplemented!()
}
}

View File

@ -855,7 +855,6 @@ impl Unifier {
}
}
_ => {
println!("{}", ty.get_type_name());
unreachable!("{} not expected", ty.get_type_name())
}
}

View File

@ -3,8 +3,9 @@ use inkwell::{
targets::*,
OptimizationLevel,
};
use nac3core::typecheck::type_inferencer::PrimitiveStore;
use nac3core::typecheck::{type_inferencer::PrimitiveStore, typedef::{Type, Unifier}};
use nac3parser::{ast::{Expr, ExprKind, StmtKind}, parser};
use parking_lot::RwLock;
use std::{borrow::Borrow, env};
use std::fs;
use std::{collections::HashMap, path::Path, sync::Arc, time::SystemTime};
@ -15,7 +16,11 @@ use nac3core::{
WorkerRegistry,
},
symbol_resolver::SymbolResolver,
toplevel::{composer::TopLevelComposer, TopLevelDef, helper::parse_parameter_default_value},
toplevel::{
composer::TopLevelComposer,
TopLevelDef, helper::parse_parameter_default_value,
type_annotation::*,
},
typecheck::typedef::FunSignature,
};
@ -68,25 +73,84 @@ fn main() {
for stmt in parser_result.into_iter() {
if let StmtKind::Assign { targets, value, .. } = &stmt.node {
fn handle_typevar_definition(
var: &Expr,
resolver: &(dyn SymbolResolver + Send + Sync),
def_list: &[Arc<RwLock<TopLevelDef>>],
unifier: &mut Unifier,
primitives: &PrimitiveStore,
) -> Result<Type, String> {
if let ExprKind::Call { func, args, .. } = &var.node {
if matches!(&func.node, ExprKind::Name { id, .. } if id == &"TypeVar".into()) {
let constraints = args
.iter()
.skip(1)
.map(|x| -> Result<Type, String> {
let ty = parse_ast_to_type_annotation_kinds(
resolver,
def_list,
unifier,
primitives,
x,
Default::default(),
)?;
get_type_from_type_annotation_kinds(def_list, unifier, primitives, &ty)
})
.collect::<Result<Vec<_>, _>>()?;
Ok(unifier.get_fresh_var_with_range(&constraints).0)
} else {
Err(format!("expression {:?} cannot be handled as a TypeVar in global scope", var))
}
} else {
Err(format!("expression {:?} cannot be handled as a TypeVar in global scope", var))
}
}
fn handle_assignment_pattern(
targets: &[Expr],
value: &Expr,
resolver: &(dyn SymbolResolver + Send + Sync),
internal_resolver: &ResolverInternal,
def_list: &[Arc<RwLock<TopLevelDef>>],
unifier: &mut Unifier,
primitives: &PrimitiveStore,
) -> Result<(), String> {
if targets.len() == 1 {
match &targets[0].node {
ExprKind::Name { id, .. } => {
let val = parse_parameter_default_value(value.borrow(), resolver)?;
if let Ok(var) = handle_typevar_definition(
value.borrow(),
resolver,
def_list,
unifier,
primitives,
) {
internal_resolver.add_id_type(*id, var);
Ok(())
} else if let Ok(val) = parse_parameter_default_value(value.borrow(), resolver) {
internal_resolver.add_module_global(*id, val);
Ok(())
} else {
Err(format!("fails to evaluate this expression `{:?}` as a constant or TypeVar at {}",
targets[0].node,
targets[0].location,
))
}
}
ExprKind::List { elts, .. }
| ExprKind::Tuple { elts, .. } => {
handle_assignment_pattern(elts, value, resolver, internal_resolver)?;
handle_assignment_pattern(
elts,
value,
resolver,
internal_resolver,
def_list,
unifier,
primitives
)?;
Ok(())
}
_ => unreachable!("cannot be assigned")
_ => Err(format!("assignment to {:?} is not supported at {}", targets[0], targets[0].location))
}
} else {
match &value.node {
@ -105,7 +169,10 @@ fn main() {
std::slice::from_ref(tar),
val,
resolver,
internal_resolver
internal_resolver,
def_list,
unifier,
primitives
)?;
}
Ok(())
@ -115,7 +182,19 @@ fn main() {
}
}
}
if let Err(err) = handle_assignment_pattern(targets, value, resolver.as_ref(), internal_resolver.as_ref()) {
let def_list = composer.extract_def_list();
let unifier = &mut composer.unifier;
let primitives = &composer.primitives_ty;
if let Err(err) = handle_assignment_pattern(
targets,
value,
resolver.as_ref(),
internal_resolver.as_ref(),
&def_list,
unifier,
primitives,
) {
eprintln!("{}", err);
return;
}