forked from M-Labs/nac3
nac3core: refactored for better error messages
This commit is contained in:
parent
352831b2ca
commit
d9cb506f6a
@ -1,4 +1,13 @@
|
|||||||
from min_artiq import *
|
from min_artiq import *
|
||||||
|
from numpy import int32, int64
|
||||||
|
|
||||||
|
@extern
|
||||||
|
def output_int(x: int32):
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
class InexistingException(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
@nac3
|
@nac3
|
||||||
class Demo:
|
class Demo:
|
||||||
@ -11,6 +20,16 @@ class Demo:
|
|||||||
self.led0 = TTLOut(self.core, 18)
|
self.led0 = TTLOut(self.core, 18)
|
||||||
self.led1 = TTLOut(self.core, 19)
|
self.led1 = TTLOut(self.core, 19)
|
||||||
|
|
||||||
|
@kernel
|
||||||
|
def test(self):
|
||||||
|
a = (1, True)
|
||||||
|
a[0]()
|
||||||
|
|
||||||
|
@kernel
|
||||||
|
def test2(self):
|
||||||
|
a = (1, True)
|
||||||
|
output_int(int32(a))
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def run(self):
|
def run(self):
|
||||||
self.core.reset()
|
self.core.reset()
|
||||||
|
@ -64,10 +64,10 @@ impl<'b> CodeGenerator for ArtiqCodeGenerator<'b> {
|
|||||||
obj: Option<(Type, ValueEnum<'ctx>)>,
|
obj: Option<(Type, ValueEnum<'ctx>)>,
|
||||||
fun: (&FunSignature, DefinitionId),
|
fun: (&FunSignature, DefinitionId),
|
||||||
params: Vec<(Option<StrRef>, ValueEnum<'ctx>)>,
|
params: Vec<(Option<StrRef>, ValueEnum<'ctx>)>,
|
||||||
) -> Option<BasicValueEnum<'ctx>> {
|
) -> Result<Option<BasicValueEnum<'ctx>>, String> {
|
||||||
let result = gen_call(self, ctx, obj, fun, params);
|
let result = gen_call(self, ctx, obj, fun, params)?;
|
||||||
if let Some(end) = self.end.clone() {
|
if let Some(end) = self.end.clone() {
|
||||||
let old_end = self.gen_expr(ctx, &end).unwrap().to_basic_value_enum(ctx, self);
|
let old_end = self.gen_expr(ctx, &end)?.unwrap().to_basic_value_enum(ctx, self);
|
||||||
let now = self.timeline.emit_now_mu(ctx);
|
let now = self.timeline.emit_now_mu(ctx);
|
||||||
let smax = ctx.module.get_function("llvm.smax.i64").unwrap_or_else(|| {
|
let smax = ctx.module.get_function("llvm.smax.i64").unwrap_or_else(|| {
|
||||||
let i64 = ctx.ctx.i64_type();
|
let i64 = ctx.ctx.i64_type();
|
||||||
@ -83,21 +83,21 @@ impl<'b> CodeGenerator for ArtiqCodeGenerator<'b> {
|
|||||||
.try_as_basic_value()
|
.try_as_basic_value()
|
||||||
.left()
|
.left()
|
||||||
.unwrap();
|
.unwrap();
|
||||||
let end_store = self.gen_store_target(ctx, &end);
|
let end_store = self.gen_store_target(ctx, &end)?;
|
||||||
ctx.builder.build_store(end_store, max);
|
ctx.builder.build_store(end_store, max);
|
||||||
}
|
}
|
||||||
if let Some(start) = self.start.clone() {
|
if let Some(start) = self.start.clone() {
|
||||||
let start_val = self.gen_expr(ctx, &start).unwrap().to_basic_value_enum(ctx, self);
|
let start_val = self.gen_expr(ctx, &start)?.unwrap().to_basic_value_enum(ctx, self);
|
||||||
self.timeline.emit_at_mu(ctx, start_val);
|
self.timeline.emit_at_mu(ctx, start_val);
|
||||||
}
|
}
|
||||||
result
|
Ok(result)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn gen_with<'ctx, 'a>(
|
fn gen_with<'ctx, 'a>(
|
||||||
&mut self,
|
&mut self,
|
||||||
ctx: &mut CodeGenContext<'ctx, 'a>,
|
ctx: &mut CodeGenContext<'ctx, 'a>,
|
||||||
stmt: &Stmt<Option<Type>>,
|
stmt: &Stmt<Option<Type>>,
|
||||||
) {
|
) -> Result<(), String> {
|
||||||
if let StmtKind::With { items, body, .. } = &stmt.node {
|
if let StmtKind::With { items, body, .. } = &stmt.node {
|
||||||
if items.len() == 1 && items[0].optional_vars.is_none() {
|
if items.len() == 1 && items[0].optional_vars.is_none() {
|
||||||
let item = &items[0];
|
let item = &items[0];
|
||||||
@ -119,7 +119,7 @@ impl<'b> CodeGenerator for ArtiqCodeGenerator<'b> {
|
|||||||
let old_start = self.start.take();
|
let old_start = self.start.take();
|
||||||
let old_end = self.end.take();
|
let old_end = self.end.take();
|
||||||
let now = if let Some(old_start) = &old_start {
|
let now = if let Some(old_start) = &old_start {
|
||||||
self.gen_expr(ctx, old_start).unwrap().to_basic_value_enum(ctx, self)
|
self.gen_expr(ctx, old_start)?.unwrap().to_basic_value_enum(ctx, self)
|
||||||
} else {
|
} else {
|
||||||
self.timeline.emit_now_mu(ctx)
|
self.timeline.emit_now_mu(ctx)
|
||||||
};
|
};
|
||||||
@ -130,7 +130,7 @@ impl<'b> CodeGenerator for ArtiqCodeGenerator<'b> {
|
|||||||
// the LLVM Context.
|
// the LLVM Context.
|
||||||
// The name is guaranteed to be unique as users cannot use this as variable
|
// The name is guaranteed to be unique as users cannot use this as variable
|
||||||
// name.
|
// name.
|
||||||
self.start = old_start.clone().or_else(|| {
|
self.start = old_start.clone().map_or_else(|| {
|
||||||
let start = format!("with-{}-start", self.name_counter).into();
|
let start = format!("with-{}-start", self.name_counter).into();
|
||||||
let start_expr = Located {
|
let start_expr = Located {
|
||||||
// location does not matter at this point
|
// location does not matter at this point
|
||||||
@ -138,10 +138,10 @@ impl<'b> CodeGenerator for ArtiqCodeGenerator<'b> {
|
|||||||
node: ExprKind::Name { id: start, ctx: name_ctx.clone() },
|
node: ExprKind::Name { id: start, ctx: name_ctx.clone() },
|
||||||
custom: Some(ctx.primitives.int64),
|
custom: Some(ctx.primitives.int64),
|
||||||
};
|
};
|
||||||
let start = self.gen_store_target(ctx, &start_expr);
|
let start = self.gen_store_target(ctx, &start_expr)?;
|
||||||
ctx.builder.build_store(start, now);
|
ctx.builder.build_store(start, now);
|
||||||
Some(start_expr)
|
Ok(Some(start_expr)) as Result<_, String>
|
||||||
});
|
}, |v| Ok(Some(v)))?;
|
||||||
let end = format!("with-{}-end", self.name_counter).into();
|
let end = format!("with-{}-end", self.name_counter).into();
|
||||||
let end_expr = Located {
|
let end_expr = Located {
|
||||||
// location does not matter at this point
|
// location does not matter at this point
|
||||||
@ -149,11 +149,11 @@ impl<'b> CodeGenerator for ArtiqCodeGenerator<'b> {
|
|||||||
node: ExprKind::Name { id: end, ctx: name_ctx.clone() },
|
node: ExprKind::Name { id: end, ctx: name_ctx.clone() },
|
||||||
custom: Some(ctx.primitives.int64),
|
custom: Some(ctx.primitives.int64),
|
||||||
};
|
};
|
||||||
let end = self.gen_store_target(ctx, &end_expr);
|
let end = self.gen_store_target(ctx, &end_expr)?;
|
||||||
ctx.builder.build_store(end, now);
|
ctx.builder.build_store(end, now);
|
||||||
self.end = Some(end_expr);
|
self.end = Some(end_expr);
|
||||||
self.name_counter += 1;
|
self.name_counter += 1;
|
||||||
gen_block(self, ctx, body.iter());
|
gen_block(self, ctx, body.iter())?;
|
||||||
let current = ctx.builder.get_insert_block().unwrap();
|
let current = ctx.builder.get_insert_block().unwrap();
|
||||||
// if the current block is terminated, move before the terminator
|
// if the current block is terminated, move before the terminator
|
||||||
// we want to set the timeline before reaching the terminator
|
// we want to set the timeline before reaching the terminator
|
||||||
@ -171,7 +171,7 @@ impl<'b> CodeGenerator for ArtiqCodeGenerator<'b> {
|
|||||||
// set duration
|
// set duration
|
||||||
let end_expr = self.end.take().unwrap();
|
let end_expr = self.end.take().unwrap();
|
||||||
let end_val =
|
let end_val =
|
||||||
self.gen_expr(ctx, &end_expr).unwrap().to_basic_value_enum(ctx, self);
|
self.gen_expr(ctx, &end_expr)?.unwrap().to_basic_value_enum(ctx, self);
|
||||||
|
|
||||||
// inside a sequential block
|
// inside a sequential block
|
||||||
if old_start.is_none() {
|
if old_start.is_none() {
|
||||||
@ -180,7 +180,7 @@ impl<'b> CodeGenerator for ArtiqCodeGenerator<'b> {
|
|||||||
// inside a parallel block, should update the outer max now_mu
|
// inside a parallel block, should update the outer max now_mu
|
||||||
if let Some(old_end) = &old_end {
|
if let Some(old_end) = &old_end {
|
||||||
let outer_end_val =
|
let outer_end_val =
|
||||||
self.gen_expr(ctx, old_end).unwrap().to_basic_value_enum(ctx, self);
|
self.gen_expr(ctx, old_end)?.unwrap().to_basic_value_enum(ctx, self);
|
||||||
let smax =
|
let smax =
|
||||||
ctx.module.get_function("llvm.smax.i64").unwrap_or_else(|| {
|
ctx.module.get_function("llvm.smax.i64").unwrap_or_else(|| {
|
||||||
let i64 = ctx.ctx.i64_type();
|
let i64 = ctx.ctx.i64_type();
|
||||||
@ -196,7 +196,7 @@ impl<'b> CodeGenerator for ArtiqCodeGenerator<'b> {
|
|||||||
.try_as_basic_value()
|
.try_as_basic_value()
|
||||||
.left()
|
.left()
|
||||||
.unwrap();
|
.unwrap();
|
||||||
let outer_end = self.gen_store_target(ctx, old_end);
|
let outer_end = self.gen_store_target(ctx, old_end)?;
|
||||||
ctx.builder.build_store(outer_end, max);
|
ctx.builder.build_store(outer_end, max);
|
||||||
}
|
}
|
||||||
self.start = old_start;
|
self.start = old_start;
|
||||||
@ -204,29 +204,29 @@ impl<'b> CodeGenerator for ArtiqCodeGenerator<'b> {
|
|||||||
if reset_position {
|
if reset_position {
|
||||||
ctx.builder.position_at_end(current);
|
ctx.builder.position_at_end(current);
|
||||||
}
|
}
|
||||||
return;
|
return Ok(());
|
||||||
} else if id == &"sequential".into() {
|
} else if id == &"sequential".into() {
|
||||||
let start = self.start.take();
|
let start = self.start.take();
|
||||||
for stmt in body.iter() {
|
for stmt in body.iter() {
|
||||||
self.gen_stmt(ctx, stmt);
|
self.gen_stmt(ctx, stmt)?;
|
||||||
if ctx.is_terminated() {
|
if ctx.is_terminated() {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
self.start = start;
|
self.start = start;
|
||||||
return
|
return Ok(());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// not parallel/sequential
|
// not parallel/sequential
|
||||||
gen_with(self, ctx, stmt);
|
gen_with(self, ctx, stmt)
|
||||||
} else {
|
} else {
|
||||||
unreachable!()
|
unreachable!()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn gen_rpc_tag<'ctx, 'a>(ctx: &mut CodeGenContext<'ctx, 'a>, ty: Type, buffer: &mut Vec<u8>) {
|
fn gen_rpc_tag<'ctx, 'a>(ctx: &mut CodeGenContext<'ctx, 'a>, ty: Type, buffer: &mut Vec<u8>) -> Result<(), String> {
|
||||||
use nac3core::typecheck::typedef::TypeEnum::*;
|
use nac3core::typecheck::typedef::TypeEnum::*;
|
||||||
|
|
||||||
let int32 = ctx.primitives.int32;
|
let int32 = ctx.primitives.int32;
|
||||||
@ -249,24 +249,25 @@ fn gen_rpc_tag<'ctx, 'a>(ctx: &mut CodeGenContext<'ctx, 'a>, ty: Type, buffer: &
|
|||||||
} else if ctx.unifier.unioned(ty, none) {
|
} else if ctx.unifier.unioned(ty, none) {
|
||||||
buffer.push(b'n');
|
buffer.push(b'n');
|
||||||
} else {
|
} else {
|
||||||
let ty = ctx.unifier.get_ty(ty);
|
let ty_enum = ctx.unifier.get_ty(ty);
|
||||||
match &*ty {
|
match &*ty_enum {
|
||||||
TTuple { ty } => {
|
TTuple { ty } => {
|
||||||
buffer.push(b't');
|
buffer.push(b't');
|
||||||
buffer.push(ty.len() as u8);
|
buffer.push(ty.len() as u8);
|
||||||
for ty in ty {
|
for ty in ty {
|
||||||
gen_rpc_tag(ctx, *ty, buffer);
|
gen_rpc_tag(ctx, *ty, buffer)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
TList { ty } => {
|
TList { ty } => {
|
||||||
buffer.push(b'l');
|
buffer.push(b'l');
|
||||||
gen_rpc_tag(ctx, *ty, buffer);
|
gen_rpc_tag(ctx, *ty, buffer)?;
|
||||||
}
|
}
|
||||||
// we should return an error, this will be fixed after improving error message
|
// we should return an error, this will be fixed after improving error message
|
||||||
// as this requires returning an error during codegen
|
// as this requires returning an error during codegen
|
||||||
_ => unimplemented!(),
|
_ => return Err(format!("Unsupported type: {:?}", ctx.unifier.stringify(ty))),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn rpc_codegen_callback_fn<'ctx, 'a>(
|
fn rpc_codegen_callback_fn<'ctx, 'a>(
|
||||||
@ -275,7 +276,7 @@ fn rpc_codegen_callback_fn<'ctx, 'a>(
|
|||||||
fun: (&FunSignature, DefinitionId),
|
fun: (&FunSignature, DefinitionId),
|
||||||
args: Vec<(Option<StrRef>, ValueEnum<'ctx>)>,
|
args: Vec<(Option<StrRef>, ValueEnum<'ctx>)>,
|
||||||
generator: &mut dyn CodeGenerator,
|
generator: &mut dyn CodeGenerator,
|
||||||
) -> Option<BasicValueEnum<'ctx>> {
|
) -> Result<Option<BasicValueEnum<'ctx>>, String> {
|
||||||
let ptr_type = ctx.ctx.i8_type().ptr_type(inkwell::AddressSpace::Generic);
|
let ptr_type = ctx.ctx.i8_type().ptr_type(inkwell::AddressSpace::Generic);
|
||||||
let size_type = generator.get_size_type(ctx.ctx);
|
let size_type = generator.get_size_type(ctx.ctx);
|
||||||
let int8 = ctx.ctx.i8_type();
|
let int8 = ctx.ctx.i8_type();
|
||||||
@ -289,10 +290,10 @@ fn rpc_codegen_callback_fn<'ctx, 'a>(
|
|||||||
tag.push(b'O');
|
tag.push(b'O');
|
||||||
}
|
}
|
||||||
for arg in fun.0.args.iter() {
|
for arg in fun.0.args.iter() {
|
||||||
gen_rpc_tag(ctx, arg.ty, &mut tag);
|
gen_rpc_tag(ctx, arg.ty, &mut tag)?;
|
||||||
}
|
}
|
||||||
tag.push(b':');
|
tag.push(b':');
|
||||||
gen_rpc_tag(ctx, fun.0.ret, &mut tag);
|
gen_rpc_tag(ctx, fun.0.ret, &mut tag)?;
|
||||||
|
|
||||||
let mut hasher = DefaultHasher::new();
|
let mut hasher = DefaultHasher::new();
|
||||||
tag.hash(&mut hasher);
|
tag.hash(&mut hasher);
|
||||||
@ -432,7 +433,7 @@ fn rpc_codegen_callback_fn<'ctx, 'a>(
|
|||||||
|
|
||||||
if ctx.unifier.unioned(fun.0.ret, ctx.primitives.none) {
|
if ctx.unifier.unioned(fun.0.ret, ctx.primitives.none) {
|
||||||
ctx.build_call_or_invoke(rpc_recv, &[ptr_type.const_null().into()], "rpc_recv");
|
ctx.build_call_or_invoke(rpc_recv, &[ptr_type.const_null().into()], "rpc_recv");
|
||||||
return None
|
return Ok(None)
|
||||||
}
|
}
|
||||||
|
|
||||||
let prehead_bb = ctx.builder.get_insert_block().unwrap();
|
let prehead_bb = ctx.builder.get_insert_block().unwrap();
|
||||||
@ -474,7 +475,7 @@ fn rpc_codegen_callback_fn<'ctx, 'a>(
|
|||||||
|
|
||||||
ctx.builder.position_at_end(tail_bb);
|
ctx.builder.position_at_end(tail_bb);
|
||||||
|
|
||||||
if need_load {
|
Ok(if need_load {
|
||||||
let result = ctx.builder.build_load(slot, "rpc.result");
|
let result = ctx.builder.build_load(slot, "rpc.result");
|
||||||
ctx.builder.build_call(
|
ctx.builder.build_call(
|
||||||
stackrestore,
|
stackrestore,
|
||||||
@ -484,7 +485,7 @@ fn rpc_codegen_callback_fn<'ctx, 'a>(
|
|||||||
Some(result)
|
Some(result)
|
||||||
} else {
|
} else {
|
||||||
Some(slot.into())
|
Some(slot.into())
|
||||||
}
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn rpc_codegen_callback() -> Arc<GenCall> {
|
pub fn rpc_codegen_callback() -> Arc<GenCall> {
|
||||||
|
@ -203,7 +203,7 @@ impl Nac3 {
|
|||||||
let fun_ty = if method_name.is_empty() {
|
let fun_ty = if method_name.is_empty() {
|
||||||
base_ty
|
base_ty
|
||||||
} else if let TypeEnum::TObj { fields, .. } = &*unifier.get_ty(base_ty) {
|
} else if let TypeEnum::TObj { fields, .. } = &*unifier.get_ty(base_ty) {
|
||||||
match fields.borrow().get(&(*method_name).into()) {
|
match fields.get(&(*method_name).into()) {
|
||||||
Some(t) => t.0,
|
Some(t) => t.0,
|
||||||
None => return Some(
|
None => return Some(
|
||||||
format!("object launching kernel does not have method `{}`", method_name)
|
format!("object launching kernel does not have method `{}`", method_name)
|
||||||
@ -213,8 +213,7 @@ impl Nac3 {
|
|||||||
return Some("cannot launch kernel by calling a non-callable".into())
|
return Some("cannot launch kernel by calling a non-callable".into())
|
||||||
};
|
};
|
||||||
|
|
||||||
if let TypeEnum::TFunc(sig) = &*unifier.get_ty(fun_ty) {
|
if let TypeEnum::TFunc(FunSignature { args, .. }) = &*unifier.get_ty(fun_ty) {
|
||||||
let FunSignature { args, .. } = &*sig.borrow();
|
|
||||||
if arg_names.len() > args.len() {
|
if arg_names.len() > args.len() {
|
||||||
return Some(format!(
|
return Some(format!(
|
||||||
"launching kernel function with too many arguments (expect {}, found {})",
|
"launching kernel function with too many arguments (expect {}, found {})",
|
||||||
@ -243,7 +242,7 @@ impl Nac3 {
|
|||||||
};
|
};
|
||||||
if let Err(e) = unifier.unify(in_ty, *ty) {
|
if let Err(e) = unifier.unify(in_ty, *ty) {
|
||||||
return Some(format!(
|
return Some(format!(
|
||||||
"type error ({}) at parameter #{} when calling kernel function", e, i
|
"type error ({}) at parameter #{} when calling kernel function", e.to_display(unifier).to_string(), i
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -281,7 +280,7 @@ impl Nac3 {
|
|||||||
vars: HashMap::new(),
|
vars: HashMap::new(),
|
||||||
},
|
},
|
||||||
Arc::new(GenCall::new(Box::new(move |ctx, _, _, _, _| {
|
Arc::new(GenCall::new(Box::new(move |ctx, _, _, _, _| {
|
||||||
Some(time_fns.emit_now_mu(ctx))
|
Ok(Some(time_fns.emit_now_mu(ctx)))
|
||||||
}))),
|
}))),
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
@ -298,7 +297,7 @@ impl Nac3 {
|
|||||||
Arc::new(GenCall::new(Box::new(move |ctx, _, _, args, generator| {
|
Arc::new(GenCall::new(Box::new(move |ctx, _, _, args, generator| {
|
||||||
let arg = args[0].1.clone().to_basic_value_enum(ctx, generator);
|
let arg = args[0].1.clone().to_basic_value_enum(ctx, generator);
|
||||||
time_fns.emit_at_mu(ctx, arg);
|
time_fns.emit_at_mu(ctx, arg);
|
||||||
None
|
Ok(None)
|
||||||
}))),
|
}))),
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
@ -315,7 +314,7 @@ impl Nac3 {
|
|||||||
Arc::new(GenCall::new(Box::new(move |ctx, _, _, args, generator| {
|
Arc::new(GenCall::new(Box::new(move |ctx, _, _, args, generator| {
|
||||||
let arg = args[0].1.clone().to_basic_value_enum(ctx, generator);
|
let arg = args[0].1.clone().to_basic_value_enum(ctx, generator);
|
||||||
time_fns.emit_delay_mu(ctx, arg);
|
time_fns.emit_delay_mu(ctx, arg);
|
||||||
None
|
Ok(None)
|
||||||
}))),
|
}))),
|
||||||
),
|
),
|
||||||
];
|
];
|
||||||
@ -536,7 +535,7 @@ impl Nac3 {
|
|||||||
let (name, def_id, ty) = composer
|
let (name, def_id, ty) = composer
|
||||||
.register_top_level(stmt.clone(), Some(resolver.clone()), path.clone())
|
.register_top_level(stmt.clone(), Some(resolver.clone()), path.clone())
|
||||||
.map_err(|e| {
|
.map_err(|e| {
|
||||||
exceptions::PyRuntimeError::new_err(format!("nac3 compilation failure: {}", e))
|
exceptions::PyRuntimeError::new_err(format!("nac3 compilation failure\n----------\n{}", e))
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
match &stmt.node {
|
match &stmt.node {
|
||||||
@ -637,7 +636,7 @@ impl Nac3 {
|
|||||||
// report error of __modinit__ separately
|
// report error of __modinit__ separately
|
||||||
if !e.contains("__nac3_synthesized_modinit__") {
|
if !e.contains("__nac3_synthesized_modinit__") {
|
||||||
return Err(exceptions::PyRuntimeError::new_err(
|
return Err(exceptions::PyRuntimeError::new_err(
|
||||||
format!("nac3 compilation failure: {}", e)
|
format!("nac3 compilation failure: \n----------\n{}", e)
|
||||||
));
|
));
|
||||||
} else {
|
} else {
|
||||||
let msg = Self::report_modinit(
|
let msg = Self::report_modinit(
|
||||||
|
@ -15,7 +15,6 @@ use pyo3::{
|
|||||||
PyAny, PyObject, PyResult, Python,
|
PyAny, PyObject, PyResult, Python,
|
||||||
};
|
};
|
||||||
use std::{
|
use std::{
|
||||||
cell::RefCell,
|
|
||||||
collections::{HashMap, HashSet},
|
collections::{HashMap, HashSet},
|
||||||
sync::Arc,
|
sync::Arc,
|
||||||
};
|
};
|
||||||
@ -208,7 +207,7 @@ impl InnerResolver {
|
|||||||
ty = match unifier.unify(ty, b) {
|
ty = match unifier.unify(ty, b) {
|
||||||
Ok(_) => ty,
|
Ok(_) => ty,
|
||||||
Err(e) => return Ok(Err(format!(
|
Err(e) => return Ok(Err(format!(
|
||||||
"inhomogeneous type ({}) at element #{} of the list", e, i
|
"inhomogeneous type ({}) at element #{} of the list", e.to_display(unifier).to_string(), i
|
||||||
)))
|
)))
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
@ -246,7 +245,7 @@ impl InnerResolver {
|
|||||||
Ok(Ok((primitives.exception, true)))
|
Ok(Ok((primitives.exception, true)))
|
||||||
}else if ty_id == self.primitive_ids.list {
|
}else if ty_id == self.primitive_ids.list {
|
||||||
// do not handle type var param and concrete check here
|
// do not handle type var param and concrete check here
|
||||||
let var = unifier.get_fresh_var().0;
|
let var = unifier.get_dummy_var().0;
|
||||||
let list = unifier.add_ty(TypeEnum::TList { ty: var });
|
let list = unifier.add_ty(TypeEnum::TList { ty: var });
|
||||||
Ok(Ok((list, false)))
|
Ok(Ok((list, false)))
|
||||||
} else if ty_id == self.primitive_ids.tuple {
|
} else if ty_id == self.primitive_ids.tuple {
|
||||||
@ -266,8 +265,7 @@ impl InnerResolver {
|
|||||||
Ok(Ok({
|
Ok(Ok({
|
||||||
let ty = TypeEnum::TObj {
|
let ty = TypeEnum::TObj {
|
||||||
obj_id: *object_id,
|
obj_id: *object_id,
|
||||||
params: RefCell::new({
|
params: type_vars
|
||||||
type_vars
|
|
||||||
.iter()
|
.iter()
|
||||||
.map(|x| {
|
.map(|x| {
|
||||||
if let TypeEnum::TVar { id, .. } = &*unifier.get_ty(*x) {
|
if let TypeEnum::TVar { id, .. } = &*unifier.get_ty(*x) {
|
||||||
@ -276,16 +274,15 @@ impl InnerResolver {
|
|||||||
unreachable!()
|
unreachable!()
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.collect()
|
.collect(),
|
||||||
}),
|
fields: {
|
||||||
fields: RefCell::new({
|
|
||||||
let mut res = methods
|
let mut res = methods
|
||||||
.iter()
|
.iter()
|
||||||
.map(|(iden, ty, _)| (*iden, (*ty, false)))
|
.map(|(iden, ty, _)| (*iden, (*ty, false)))
|
||||||
.collect::<HashMap<_, _>>();
|
.collect::<HashMap<_, _>>();
|
||||||
res.extend(fields.clone().into_iter().map(|x| (x.0, (x.1, x.2))));
|
res.extend(fields.clone().into_iter().map(|x| (x.0, (x.1, x.2))));
|
||||||
res
|
res
|
||||||
}),
|
},
|
||||||
};
|
};
|
||||||
// here also false, later instantiation use python object to check compatible
|
// here also false, later instantiation use python object to check compatible
|
||||||
(unifier.add_ty(ty), false)
|
(unifier.add_ty(ty), false)
|
||||||
@ -295,6 +292,7 @@ impl InnerResolver {
|
|||||||
unreachable!("function type is not supported, should not be queried")
|
unreachable!("function type is not supported, should not be queried")
|
||||||
}
|
}
|
||||||
} else if ty_ty_id == self.primitive_ids.typevar {
|
} else if ty_ty_id == self.primitive_ids.typevar {
|
||||||
|
let name: &str = pyty.getattr("__name__").unwrap().extract().unwrap();
|
||||||
let constraint_types = {
|
let constraint_types = {
|
||||||
let constraints = pyty.getattr("__constraints__").unwrap();
|
let constraints = pyty.getattr("__constraints__").unwrap();
|
||||||
let mut result: Vec<Type> = vec![];
|
let mut result: Vec<Type> = vec![];
|
||||||
@ -322,7 +320,7 @@ impl InnerResolver {
|
|||||||
}
|
}
|
||||||
result
|
result
|
||||||
};
|
};
|
||||||
let res = unifier.get_fresh_var_with_range(&constraint_types).0;
|
let res = unifier.get_fresh_var_with_range(&constraint_types, Some(name.into()), None).0;
|
||||||
Ok(Ok((res, true)))
|
Ok(Ok((res, true)))
|
||||||
} else if ty_ty_id == self.primitive_ids.generic_alias.0
|
} else if ty_ty_id == self.primitive_ids.generic_alias.0
|
||||||
|| ty_ty_id == self.primitive_ids.generic_alias.1
|
|| ty_ty_id == self.primitive_ids.generic_alias.1
|
||||||
@ -388,7 +386,6 @@ impl InnerResolver {
|
|||||||
}
|
}
|
||||||
TypeEnum::TObj { params, obj_id, .. } => {
|
TypeEnum::TObj { params, obj_id, .. } => {
|
||||||
let subst = {
|
let subst = {
|
||||||
let params = &*params.borrow();
|
|
||||||
if params.len() != args.len() {
|
if params.len() != args.len() {
|
||||||
return Ok(Err(format!(
|
return Ok(Err(format!(
|
||||||
"for class #{}, expect {} type parameters, got {}.",
|
"for class #{}, expect {} type parameters, got {}.",
|
||||||
@ -456,14 +453,16 @@ impl InnerResolver {
|
|||||||
Ok(Ok((
|
Ok(Ok((
|
||||||
{
|
{
|
||||||
let ty = TypeEnum::TVirtual {
|
let ty = TypeEnum::TVirtual {
|
||||||
ty: unifier.get_fresh_var().0,
|
ty: unifier.get_dummy_var().0,
|
||||||
};
|
};
|
||||||
unifier.add_ty(ty)
|
unifier.add_ty(ty)
|
||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
)))
|
)))
|
||||||
} else {
|
} else {
|
||||||
Ok(Err("unknown type".into()))
|
let str_fn = pyo3::types::PyModule::import(py, "builtins").unwrap().getattr("repr").unwrap();
|
||||||
|
let str_repr: String = str_fn.call1((pyty,)).unwrap().extract().unwrap();
|
||||||
|
Ok(Err(format!("{} is not supported in nac3 (did you forgot to put @nac3 annotation?)", str_repr)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -510,8 +509,8 @@ impl InnerResolver {
|
|||||||
if len == 0 {
|
if len == 0 {
|
||||||
assert!(matches!(
|
assert!(matches!(
|
||||||
&*unifier.get_ty(extracted_ty),
|
&*unifier.get_ty(extracted_ty),
|
||||||
TypeEnum::TVar { meta: nac3core::typecheck::typedef::TypeVarMeta::Generic, range, .. }
|
TypeEnum::TVar { fields: None, range, .. }
|
||||||
if range.borrow().is_empty()
|
if range.is_empty()
|
||||||
));
|
));
|
||||||
Ok(Ok(extracted_ty))
|
Ok(Ok(extracted_ty))
|
||||||
} else {
|
} else {
|
||||||
@ -520,7 +519,7 @@ impl InnerResolver {
|
|||||||
match actual_ty {
|
match actual_ty {
|
||||||
Ok(t) => match unifier.unify(*ty, t) {
|
Ok(t) => match unifier.unify(*ty, t) {
|
||||||
Ok(_) => Ok(Ok(unifier.add_ty(TypeEnum::TList{ ty: *ty }))),
|
Ok(_) => Ok(Ok(unifier.add_ty(TypeEnum::TList{ ty: *ty }))),
|
||||||
Err(e) => Ok(Err(format!("type error ({}) for the list", e))),
|
Err(e) => Ok(Err(format!("type error ({}) for the list", e.to_display(unifier).to_string()))),
|
||||||
}
|
}
|
||||||
Err(e) => Ok(Err(e)),
|
Err(e) => Ok(Err(e)),
|
||||||
}
|
}
|
||||||
@ -537,19 +536,18 @@ impl InnerResolver {
|
|||||||
}
|
}
|
||||||
(TypeEnum::TObj { params, fields, .. }, false) => {
|
(TypeEnum::TObj { params, fields, .. }, false) => {
|
||||||
let var_map = params
|
let var_map = params
|
||||||
.borrow()
|
|
||||||
.iter()
|
.iter()
|
||||||
.map(|(id_var, ty)| {
|
.map(|(id_var, ty)| {
|
||||||
if let TypeEnum::TVar { id, range, .. } = &*unifier.get_ty(*ty) {
|
if let TypeEnum::TVar { id, range, name, loc, .. } = &*unifier.get_ty(*ty) {
|
||||||
assert_eq!(*id, *id_var);
|
assert_eq!(*id, *id_var);
|
||||||
(*id, unifier.get_fresh_var_with_range(&range.borrow()).0)
|
(*id, unifier.get_fresh_var_with_range(range, *name, *loc).0)
|
||||||
} else {
|
} else {
|
||||||
unreachable!()
|
unreachable!()
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.collect::<HashMap<_, _>>();
|
.collect::<HashMap<_, _>>();
|
||||||
// loop through non-function fields of the class to get the instantiated value
|
// loop through non-function fields of the class to get the instantiated value
|
||||||
for field in fields.borrow().iter() {
|
for field in fields.iter() {
|
||||||
let name: String = (*field.0).into();
|
let name: String = (*field.0).into();
|
||||||
if let TypeEnum::TFunc(..) = &*unifier.get_ty(field.1 .0) {
|
if let TypeEnum::TFunc(..) = &*unifier.get_ty(field.1 .0) {
|
||||||
continue;
|
continue;
|
||||||
@ -566,7 +564,7 @@ impl InnerResolver {
|
|||||||
if let Err(e) = unifier.unify(ty, field_ty) {
|
if let Err(e) = unifier.unify(ty, field_ty) {
|
||||||
// field type mismatch
|
// field type mismatch
|
||||||
return Ok(Err(format!(
|
return Ok(Err(format!(
|
||||||
"error when getting type of field `{}` ({})", name, e
|
"error when getting type of field `{}` ({})", name, e.to_display(unifier).to_string()
|
||||||
)));
|
)));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -988,18 +986,19 @@ impl SymbolResolver for Resolver {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_identifier_def(&self, id: StrRef) -> Option<DefinitionId> {
|
fn get_identifier_def(&self, id: StrRef) -> Result<DefinitionId, String> {
|
||||||
{
|
{
|
||||||
let id_to_def = self.0.id_to_def.read();
|
let id_to_def = self.0.id_to_def.read();
|
||||||
id_to_def.get(&id).cloned()
|
id_to_def.get(&id).cloned().ok_or_else(|| "".to_string())
|
||||||
}
|
}
|
||||||
.or_else(|| {
|
.or_else(|_| {
|
||||||
let py_id = self.0.name_to_pyid.get(&id);
|
let py_id = self.0.name_to_pyid.get(&id).ok_or(format!("Undefined identifier `{}`", id))?;
|
||||||
let result = py_id.and_then(|id| self.0.pyid_to_def.read().get(id).copied());
|
let result = self.0.pyid_to_def.read().get(py_id).copied().ok_or(format!(
|
||||||
if let Some(result) = &result {
|
"`{}` is not registered in nac3, did you forgot to add @nac3?",
|
||||||
self.0.id_to_def.write().insert(id, *result);
|
id
|
||||||
}
|
))?;
|
||||||
result
|
self.0.id_to_def.write().insert(id, result);
|
||||||
|
Ok(result)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -157,7 +157,6 @@ impl ConcreteTypeStore {
|
|||||||
TypeEnum::TObj { obj_id, fields, params } => ConcreteTypeEnum::TObj {
|
TypeEnum::TObj { obj_id, fields, params } => ConcreteTypeEnum::TObj {
|
||||||
obj_id: *obj_id,
|
obj_id: *obj_id,
|
||||||
fields: fields
|
fields: fields
|
||||||
.borrow()
|
|
||||||
.iter()
|
.iter()
|
||||||
.filter_map(|(name, ty)| {
|
.filter_map(|(name, ty)| {
|
||||||
// here we should not have type vars, but some partial instantiated
|
// here we should not have type vars, but some partial instantiated
|
||||||
@ -171,7 +170,6 @@ impl ConcreteTypeStore {
|
|||||||
})
|
})
|
||||||
.collect(),
|
.collect(),
|
||||||
params: params
|
params: params
|
||||||
.borrow()
|
|
||||||
.iter()
|
.iter()
|
||||||
.map(|(id, ty)| {
|
.map(|(id, ty)| {
|
||||||
(*id, self.from_unifier_type(unifier, primitives, *ty, cache))
|
(*id, self.from_unifier_type(unifier, primitives, *ty, cache))
|
||||||
@ -182,7 +180,6 @@ impl ConcreteTypeStore {
|
|||||||
ty: self.from_unifier_type(unifier, primitives, *ty, cache),
|
ty: self.from_unifier_type(unifier, primitives, *ty, cache),
|
||||||
},
|
},
|
||||||
TypeEnum::TFunc(signature) => {
|
TypeEnum::TFunc(signature) => {
|
||||||
let signature = signature.borrow();
|
|
||||||
self.from_signature(unifier, primitives, &*signature, cache)
|
self.from_signature(unifier, primitives, &*signature, cache)
|
||||||
}
|
}
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
@ -210,7 +207,7 @@ impl ConcreteTypeStore {
|
|||||||
return if let Some(ty) = ty {
|
return if let Some(ty) = ty {
|
||||||
*ty
|
*ty
|
||||||
} else {
|
} else {
|
||||||
*ty = Some(unifier.get_fresh_var().0);
|
*ty = Some(unifier.get_dummy_var().0);
|
||||||
ty.unwrap()
|
ty.unwrap()
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -31,7 +31,7 @@ pub fn get_subst_key(
|
|||||||
let mut vars = obj
|
let mut vars = obj
|
||||||
.map(|ty| {
|
.map(|ty| {
|
||||||
if let TypeEnum::TObj { params, .. } = &*unifier.get_ty(ty) {
|
if let TypeEnum::TObj { params, .. } = &*unifier.get_ty(ty) {
|
||||||
params.borrow().clone()
|
params.clone()
|
||||||
} else {
|
} else {
|
||||||
unreachable!()
|
unreachable!()
|
||||||
}
|
}
|
||||||
@ -40,7 +40,7 @@ pub fn get_subst_key(
|
|||||||
vars.extend(fun_vars.iter());
|
vars.extend(fun_vars.iter());
|
||||||
let sorted = vars.keys().filter(|id| filter.map(|v| v.contains(id)).unwrap_or(true)).sorted();
|
let sorted = vars.keys().filter(|id| filter.map(|v| v.contains(id)).unwrap_or(true)).sorted();
|
||||||
sorted
|
sorted
|
||||||
.map(|id| unifier.stringify(vars[id], &mut |id| id.to_string(), &mut |id| id.to_string()))
|
.map(|id| unifier.internal_stringify(vars[id], &mut |id| id.to_string(), &mut |id| id.to_string(), &mut None))
|
||||||
.join(", ")
|
.join(", ")
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -352,7 +352,7 @@ pub fn gen_constructor<'ctx, 'a, G: CodeGenerator>(
|
|||||||
signature: &FunSignature,
|
signature: &FunSignature,
|
||||||
def: &TopLevelDef,
|
def: &TopLevelDef,
|
||||||
params: Vec<(Option<StrRef>, ValueEnum<'ctx>)>,
|
params: Vec<(Option<StrRef>, ValueEnum<'ctx>)>,
|
||||||
) -> BasicValueEnum<'ctx> {
|
) -> Result<BasicValueEnum<'ctx>, String> {
|
||||||
match def {
|
match def {
|
||||||
TopLevelDef::Class { methods, .. } => {
|
TopLevelDef::Class { methods, .. } => {
|
||||||
// TODO: what about other fields that require alloca?
|
// TODO: what about other fields that require alloca?
|
||||||
@ -374,9 +374,9 @@ pub fn gen_constructor<'ctx, 'a, G: CodeGenerator>(
|
|||||||
Some((signature.ret, zelf.into())),
|
Some((signature.ret, zelf.into())),
|
||||||
(&sign, fun_id),
|
(&sign, fun_id),
|
||||||
params,
|
params,
|
||||||
);
|
)?;
|
||||||
}
|
}
|
||||||
zelf
|
Ok(zelf)
|
||||||
}
|
}
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
}
|
}
|
||||||
@ -387,7 +387,7 @@ pub fn gen_func_instance<'ctx, 'a>(
|
|||||||
obj: Option<(Type, ValueEnum<'ctx>)>,
|
obj: Option<(Type, ValueEnum<'ctx>)>,
|
||||||
fun: (&FunSignature, &mut TopLevelDef, String),
|
fun: (&FunSignature, &mut TopLevelDef, String),
|
||||||
id: usize,
|
id: usize,
|
||||||
) -> String {
|
) -> Result<String, String> {
|
||||||
if let (
|
if let (
|
||||||
sign,
|
sign,
|
||||||
TopLevelDef::Function {
|
TopLevelDef::Function {
|
||||||
@ -396,7 +396,9 @@ pub fn gen_func_instance<'ctx, 'a>(
|
|||||||
key,
|
key,
|
||||||
) = fun
|
) = fun
|
||||||
{
|
{
|
||||||
instance_to_symbol.get(&key).cloned().unwrap_or_else(|| {
|
if let Some(sym) = instance_to_symbol.get(&key) {
|
||||||
|
return Ok(sym.clone());
|
||||||
|
}
|
||||||
let symbol = format!("{}.{}", name, instance_to_symbol.len());
|
let symbol = format!("{}.{}", name, instance_to_symbol.len());
|
||||||
instance_to_symbol.insert(key.clone(), symbol.clone());
|
instance_to_symbol.insert(key.clone(), symbol.clone());
|
||||||
let key = ctx.get_subst_key(obj.as_ref().map(|a| a.0), sign, Some(var_id));
|
let key = ctx.get_subst_key(obj.as_ref().map(|a| a.0), sign, Some(var_id));
|
||||||
@ -444,8 +446,7 @@ pub fn gen_func_instance<'ctx, 'a>(
|
|||||||
unifier_index: instance.unifier_id,
|
unifier_index: instance.unifier_id,
|
||||||
id,
|
id,
|
||||||
});
|
});
|
||||||
symbol
|
Ok(symbol)
|
||||||
})
|
|
||||||
} else {
|
} else {
|
||||||
unreachable!()
|
unreachable!()
|
||||||
}
|
}
|
||||||
@ -457,9 +458,8 @@ pub fn gen_call<'ctx, 'a, G: CodeGenerator>(
|
|||||||
obj: Option<(Type, ValueEnum<'ctx>)>,
|
obj: Option<(Type, ValueEnum<'ctx>)>,
|
||||||
fun: (&FunSignature, DefinitionId),
|
fun: (&FunSignature, DefinitionId),
|
||||||
params: Vec<(Option<StrRef>, ValueEnum<'ctx>)>,
|
params: Vec<(Option<StrRef>, ValueEnum<'ctx>)>,
|
||||||
) -> Option<BasicValueEnum<'ctx>> {
|
) -> Result<Option<BasicValueEnum<'ctx>>, String> {
|
||||||
let definition = ctx.top_level.definitions.read().get(fun.1 .0).cloned().unwrap();
|
let definition = ctx.top_level.definitions.read().get(fun.1 .0).cloned().unwrap();
|
||||||
|
|
||||||
let id;
|
let id;
|
||||||
let key;
|
let key;
|
||||||
let param_vals;
|
let param_vals;
|
||||||
@ -492,7 +492,6 @@ pub fn gen_call<'ctx, 'a, G: CodeGenerator>(
|
|||||||
if let Some(obj) = &obj {
|
if let Some(obj) = &obj {
|
||||||
real_params.insert(0, obj.1.clone());
|
real_params.insert(0, obj.1.clone());
|
||||||
}
|
}
|
||||||
|
|
||||||
let static_params = real_params
|
let static_params = real_params
|
||||||
.iter()
|
.iter()
|
||||||
.enumerate()
|
.enumerate()
|
||||||
@ -530,16 +529,16 @@ pub fn gen_call<'ctx, 'a, G: CodeGenerator>(
|
|||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|p| p.to_basic_value_enum(ctx, generator))
|
.map(|p| p.to_basic_value_enum(ctx, generator))
|
||||||
.collect_vec();
|
.collect_vec();
|
||||||
instance_to_symbol.get(&key).cloned()
|
instance_to_symbol.get(&key).cloned().ok_or_else(|| "".into())
|
||||||
}
|
}
|
||||||
TopLevelDef::Class { .. } => {
|
TopLevelDef::Class { .. } => {
|
||||||
return Some(generator.gen_constructor(ctx, fun.0, &*def, params))
|
return Ok(Some(generator.gen_constructor(ctx, fun.0, &*def, params)?))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
.unwrap_or_else(|| {
|
.or_else(|_: String| {
|
||||||
generator.gen_func_instance(ctx, obj.clone(), (fun.0, &mut *definition.write(), key), id)
|
generator.gen_func_instance(ctx, obj.clone(), (fun.0, &mut *definition.write(), key), id)
|
||||||
});
|
})?;
|
||||||
let fun_val = ctx.module.get_function(&symbol).unwrap_or_else(|| {
|
let fun_val = ctx.module.get_function(&symbol).unwrap_or_else(|| {
|
||||||
let mut args = fun.0.args.clone();
|
let mut args = fun.0.args.clone();
|
||||||
if let Some(obj) = &obj {
|
if let Some(obj) = &obj {
|
||||||
@ -554,8 +553,7 @@ pub fn gen_call<'ctx, 'a, G: CodeGenerator>(
|
|||||||
};
|
};
|
||||||
ctx.module.add_function(&symbol, fun_ty, None)
|
ctx.module.add_function(&symbol, fun_ty, None)
|
||||||
});
|
});
|
||||||
|
Ok(ctx.build_call_or_invoke(fun_val, ¶m_vals, "call"))
|
||||||
ctx.build_call_or_invoke(fun_val, ¶m_vals, "call")
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn destructure_range<'ctx, 'a>(
|
pub fn destructure_range<'ctx, 'a>(
|
||||||
@ -607,7 +605,7 @@ pub fn gen_comprehension<'ctx, 'a, G: CodeGenerator>(
|
|||||||
generator: &mut G,
|
generator: &mut G,
|
||||||
ctx: &mut CodeGenContext<'ctx, 'a>,
|
ctx: &mut CodeGenContext<'ctx, 'a>,
|
||||||
expr: &Expr<Option<Type>>,
|
expr: &Expr<Option<Type>>,
|
||||||
) -> BasicValueEnum<'ctx> {
|
) -> Result<BasicValueEnum<'ctx>, String> {
|
||||||
if let ExprKind::ListComp { elt, generators } = &expr.node {
|
if let ExprKind::ListComp { elt, generators } = &expr.node {
|
||||||
let current = ctx.builder.get_insert_block().unwrap().get_parent().unwrap();
|
let current = ctx.builder.get_insert_block().unwrap().get_parent().unwrap();
|
||||||
let test_bb = ctx.ctx.append_basic_block(current, "test");
|
let test_bb = ctx.ctx.append_basic_block(current, "test");
|
||||||
@ -615,13 +613,13 @@ pub fn gen_comprehension<'ctx, 'a, G: CodeGenerator>(
|
|||||||
let cont_bb = ctx.ctx.append_basic_block(current, "cont");
|
let cont_bb = ctx.ctx.append_basic_block(current, "cont");
|
||||||
|
|
||||||
let Comprehension { target, iter, ifs, .. } = &generators[0];
|
let Comprehension { target, iter, ifs, .. } = &generators[0];
|
||||||
let iter_val = generator.gen_expr(ctx, iter).unwrap().to_basic_value_enum(ctx, generator);
|
let iter_val = generator.gen_expr(ctx, iter)?.unwrap().to_basic_value_enum(ctx, generator);
|
||||||
let int32 = ctx.ctx.i32_type();
|
let int32 = ctx.ctx.i32_type();
|
||||||
let size_t = generator.get_size_type(ctx.ctx);
|
let size_t = generator.get_size_type(ctx.ctx);
|
||||||
let zero_size_t = size_t.const_zero();
|
let zero_size_t = size_t.const_zero();
|
||||||
let zero_32 = int32.const_zero();
|
let zero_32 = int32.const_zero();
|
||||||
|
|
||||||
let index = generator.gen_var_alloc(ctx, size_t.into());
|
let index = generator.gen_var_alloc(ctx, size_t.into())?;
|
||||||
ctx.builder.build_store(index, zero_size_t);
|
ctx.builder.build_store(index, zero_size_t);
|
||||||
|
|
||||||
let elem_ty = ctx.get_llvm_type(generator, elt.custom.unwrap());
|
let elem_ty = ctx.get_llvm_type(generator, elt.custom.unwrap());
|
||||||
@ -664,7 +662,7 @@ pub fn gen_comprehension<'ctx, 'a, G: CodeGenerator>(
|
|||||||
list_content =
|
list_content =
|
||||||
ctx.build_gep_and_load(list, &[zero_size_t, zero_32]).into_pointer_value();
|
ctx.build_gep_and_load(list, &[zero_size_t, zero_32]).into_pointer_value();
|
||||||
|
|
||||||
let i = generator.gen_store_target(ctx, target);
|
let i = generator.gen_store_target(ctx, target)?;
|
||||||
ctx.builder.build_store(i, ctx.builder.build_int_sub(start, step, "start_init"));
|
ctx.builder.build_store(i, ctx.builder.build_int_sub(start, step, "start_init"));
|
||||||
ctx.builder.build_unconditional_branch(test_bb);
|
ctx.builder.build_unconditional_branch(test_bb);
|
||||||
ctx.builder.position_at_end(test_bb);
|
ctx.builder.position_at_end(test_bb);
|
||||||
@ -699,7 +697,7 @@ pub fn gen_comprehension<'ctx, 'a, G: CodeGenerator>(
|
|||||||
list = allocate_list(generator, ctx, elem_ty, length);
|
list = allocate_list(generator, ctx, elem_ty, length);
|
||||||
list_content =
|
list_content =
|
||||||
ctx.build_gep_and_load(list, &[zero_size_t, zero_32]).into_pointer_value();
|
ctx.build_gep_and_load(list, &[zero_size_t, zero_32]).into_pointer_value();
|
||||||
let counter = generator.gen_var_alloc(ctx, size_t.into());
|
let counter = generator.gen_var_alloc(ctx, size_t.into())?;
|
||||||
// counter = -1
|
// counter = -1
|
||||||
ctx.builder.build_store(counter, size_t.const_int(u64::max_value(), true));
|
ctx.builder.build_store(counter, size_t.const_int(u64::max_value(), true));
|
||||||
ctx.builder.build_unconditional_branch(test_bb);
|
ctx.builder.build_unconditional_branch(test_bb);
|
||||||
@ -714,11 +712,11 @@ pub fn gen_comprehension<'ctx, 'a, G: CodeGenerator>(
|
|||||||
.build_gep_and_load(iter_val.into_pointer_value(), &[zero_size_t, zero_32])
|
.build_gep_and_load(iter_val.into_pointer_value(), &[zero_size_t, zero_32])
|
||||||
.into_pointer_value();
|
.into_pointer_value();
|
||||||
let val = ctx.build_gep_and_load(arr_ptr, &[tmp]);
|
let val = ctx.build_gep_and_load(arr_ptr, &[tmp]);
|
||||||
generator.gen_assign(ctx, target, val.into());
|
generator.gen_assign(ctx, target, val.into())?;
|
||||||
}
|
}
|
||||||
for cond in ifs.iter() {
|
for cond in ifs.iter() {
|
||||||
let result = generator
|
let result = generator
|
||||||
.gen_expr(ctx, cond)
|
.gen_expr(ctx, cond)?
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.to_basic_value_enum(ctx, generator)
|
.to_basic_value_enum(ctx, generator)
|
||||||
.into_int_value();
|
.into_int_value();
|
||||||
@ -726,7 +724,7 @@ pub fn gen_comprehension<'ctx, 'a, G: CodeGenerator>(
|
|||||||
ctx.builder.build_conditional_branch(result, succ, test_bb);
|
ctx.builder.build_conditional_branch(result, succ, test_bb);
|
||||||
ctx.builder.position_at_end(succ);
|
ctx.builder.position_at_end(succ);
|
||||||
}
|
}
|
||||||
let elem = generator.gen_expr(ctx, elt).unwrap();
|
let elem = generator.gen_expr(ctx, elt)?.unwrap();
|
||||||
let i = ctx.builder.build_load(index, "i").into_int_value();
|
let i = ctx.builder.build_load(index, "i").into_int_value();
|
||||||
let elem_ptr = unsafe { ctx.builder.build_gep(list_content, &[i], "elem_ptr") };
|
let elem_ptr = unsafe { ctx.builder.build_gep(list_content, &[i], "elem_ptr") };
|
||||||
let val = elem.to_basic_value_enum(ctx, generator);
|
let val = elem.to_basic_value_enum(ctx, generator);
|
||||||
@ -739,7 +737,7 @@ pub fn gen_comprehension<'ctx, 'a, G: CodeGenerator>(
|
|||||||
ctx.builder.build_gep(list, &[zero_size_t, int32.const_int(1, false)], "length")
|
ctx.builder.build_gep(list, &[zero_size_t, int32.const_int(1, false)], "length")
|
||||||
};
|
};
|
||||||
ctx.builder.build_store(len_ptr, ctx.builder.build_load(index, "index"));
|
ctx.builder.build_store(len_ptr, ctx.builder.build_load(index, "index"));
|
||||||
list.into()
|
Ok(list.into())
|
||||||
} else {
|
} else {
|
||||||
unreachable!()
|
unreachable!()
|
||||||
}
|
}
|
||||||
@ -751,16 +749,16 @@ pub fn gen_binop_expr<'ctx, 'a, G: CodeGenerator>(
|
|||||||
left: &Expr<Option<Type>>,
|
left: &Expr<Option<Type>>,
|
||||||
op: &Operator,
|
op: &Operator,
|
||||||
right: &Expr<Option<Type>>,
|
right: &Expr<Option<Type>>,
|
||||||
) -> ValueEnum<'ctx> {
|
) -> Result<ValueEnum<'ctx>, String> {
|
||||||
let ty1 = ctx.unifier.get_representative(left.custom.unwrap());
|
let ty1 = ctx.unifier.get_representative(left.custom.unwrap());
|
||||||
let ty2 = ctx.unifier.get_representative(right.custom.unwrap());
|
let ty2 = ctx.unifier.get_representative(right.custom.unwrap());
|
||||||
let left = generator.gen_expr(ctx, left).unwrap().to_basic_value_enum(ctx, generator);
|
let left = generator.gen_expr(ctx, left)?.unwrap().to_basic_value_enum(ctx, generator);
|
||||||
let right = generator.gen_expr(ctx, right).unwrap().to_basic_value_enum(ctx, generator);
|
let right = generator.gen_expr(ctx, right)?.unwrap().to_basic_value_enum(ctx, generator);
|
||||||
|
|
||||||
// we can directly compare the types, because we've got their representatives
|
// we can directly compare the types, because we've got their representatives
|
||||||
// which would be unchanged until further unification, which we would never do
|
// which would be unchanged until further unification, which we would never do
|
||||||
// when doing code generation for function instances
|
// when doing code generation for function instances
|
||||||
if ty1 == ty2 && [ctx.primitives.int32, ctx.primitives.int64].contains(&ty1) {
|
Ok(if ty1 == ty2 && [ctx.primitives.int32, ctx.primitives.int64].contains(&ty1) {
|
||||||
ctx.gen_int_ops(op, left, right)
|
ctx.gen_int_ops(op, left, right)
|
||||||
} else if ty1 == ty2 && ctx.primitives.float == ty1 {
|
} else if ty1 == ty2 && ctx.primitives.float == ty1 {
|
||||||
ctx.gen_float_ops(op, left, right)
|
ctx.gen_float_ops(op, left, right)
|
||||||
@ -783,17 +781,17 @@ pub fn gen_binop_expr<'ctx, 'a, G: CodeGenerator>(
|
|||||||
} else {
|
} else {
|
||||||
unimplemented!()
|
unimplemented!()
|
||||||
}
|
}
|
||||||
.into()
|
.into())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn gen_expr<'ctx, 'a, G: CodeGenerator>(
|
pub fn gen_expr<'ctx, 'a, G: CodeGenerator>(
|
||||||
generator: &mut G,
|
generator: &mut G,
|
||||||
ctx: &mut CodeGenContext<'ctx, 'a>,
|
ctx: &mut CodeGenContext<'ctx, 'a>,
|
||||||
expr: &Expr<Option<Type>>,
|
expr: &Expr<Option<Type>>,
|
||||||
) -> Option<ValueEnum<'ctx>> {
|
) -> Result<Option<ValueEnum<'ctx>>, String> {
|
||||||
let int32 = ctx.ctx.i32_type();
|
let int32 = ctx.ctx.i32_type();
|
||||||
let zero = int32.const_int(0, false);
|
let zero = int32.const_int(0, false);
|
||||||
Some(match &expr.node {
|
Ok(Some(match &expr.node {
|
||||||
ExprKind::Constant { value, .. } => {
|
ExprKind::Constant { value, .. } => {
|
||||||
let ty = expr.custom.unwrap();
|
let ty = expr.custom.unwrap();
|
||||||
ctx.gen_const(generator, value, ty).into()
|
ctx.gen_const(generator, value, ty).into()
|
||||||
@ -823,8 +821,8 @@ pub fn gen_expr<'ctx, 'a, G: CodeGenerator>(
|
|||||||
// we should use memcpy for that instead of generating thousands of stores
|
// we should use memcpy for that instead of generating thousands of stores
|
||||||
let elements = elts
|
let elements = elts
|
||||||
.iter()
|
.iter()
|
||||||
.map(|x| generator.gen_expr(ctx, x).unwrap().to_basic_value_enum(ctx, generator))
|
.map(|x| generator.gen_expr(ctx, x).map(|v| v.unwrap().to_basic_value_enum(ctx, generator)))
|
||||||
.collect_vec();
|
.collect::<Result<Vec<_>, _>>()?;
|
||||||
let ty = if elements.is_empty() {
|
let ty = if elements.is_empty() {
|
||||||
if let TypeEnum::TList { ty } = &*ctx.unifier.get_ty(expr.custom.unwrap()) {
|
if let TypeEnum::TList { ty } = &*ctx.unifier.get_ty(expr.custom.unwrap()) {
|
||||||
ctx.get_llvm_type(generator, *ty)
|
ctx.get_llvm_type(generator, *ty)
|
||||||
@ -852,8 +850,8 @@ pub fn gen_expr<'ctx, 'a, G: CodeGenerator>(
|
|||||||
ExprKind::Tuple { elts, .. } => {
|
ExprKind::Tuple { elts, .. } => {
|
||||||
let element_val = elts
|
let element_val = elts
|
||||||
.iter()
|
.iter()
|
||||||
.map(|x| generator.gen_expr(ctx, x).unwrap().to_basic_value_enum(ctx, generator))
|
.map(|x| generator.gen_expr(ctx, x).map(|v| v.unwrap().to_basic_value_enum(ctx, generator)))
|
||||||
.collect_vec();
|
.collect::<Result<Vec<_>, _>>()?;
|
||||||
let element_ty = element_val.iter().map(BasicValueEnum::get_type).collect_vec();
|
let element_ty = element_val.iter().map(BasicValueEnum::get_type).collect_vec();
|
||||||
let tuple_ty = ctx.ctx.struct_type(&element_ty, false);
|
let tuple_ty = ctx.ctx.struct_type(&element_ty, false);
|
||||||
let tuple_ptr = ctx.builder.build_alloca(tuple_ty, "tuple");
|
let tuple_ptr = ctx.builder.build_alloca(tuple_ty, "tuple");
|
||||||
@ -871,7 +869,7 @@ pub fn gen_expr<'ctx, 'a, G: CodeGenerator>(
|
|||||||
}
|
}
|
||||||
ExprKind::Attribute { value, attr, .. } => {
|
ExprKind::Attribute { value, attr, .. } => {
|
||||||
// note that we would handle class methods directly in calls
|
// note that we would handle class methods directly in calls
|
||||||
match generator.gen_expr(ctx, value).unwrap() {
|
match generator.gen_expr(ctx, value)?.unwrap() {
|
||||||
ValueEnum::Static(v) => v.get_field(*attr, ctx).unwrap_or_else(|| {
|
ValueEnum::Static(v) => v.get_field(*attr, ctx).unwrap_or_else(|| {
|
||||||
let v = v.to_basic_value_enum(ctx, generator);
|
let v = v.to_basic_value_enum(ctx, generator);
|
||||||
let index = ctx.get_attr_index(value.custom.unwrap(), *attr);
|
let index = ctx.get_attr_index(value.custom.unwrap(), *attr);
|
||||||
@ -892,7 +890,7 @@ pub fn gen_expr<'ctx, 'a, G: CodeGenerator>(
|
|||||||
ExprKind::BoolOp { op, values } => {
|
ExprKind::BoolOp { op, values } => {
|
||||||
// requires conditional branches for short-circuiting...
|
// requires conditional branches for short-circuiting...
|
||||||
let left = generator
|
let left = generator
|
||||||
.gen_expr(ctx, &values[0])
|
.gen_expr(ctx, &values[0])?
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.to_basic_value_enum(ctx, generator)
|
.to_basic_value_enum(ctx, generator)
|
||||||
.into_int_value();
|
.into_int_value();
|
||||||
@ -908,7 +906,7 @@ pub fn gen_expr<'ctx, 'a, G: CodeGenerator>(
|
|||||||
ctx.builder.build_unconditional_branch(cont_bb);
|
ctx.builder.build_unconditional_branch(cont_bb);
|
||||||
ctx.builder.position_at_end(b_bb);
|
ctx.builder.position_at_end(b_bb);
|
||||||
let b = generator
|
let b = generator
|
||||||
.gen_expr(ctx, &values[1])
|
.gen_expr(ctx, &values[1])?
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.to_basic_value_enum(ctx, generator)
|
.to_basic_value_enum(ctx, generator)
|
||||||
.into_int_value();
|
.into_int_value();
|
||||||
@ -918,7 +916,7 @@ pub fn gen_expr<'ctx, 'a, G: CodeGenerator>(
|
|||||||
Boolop::And => {
|
Boolop::And => {
|
||||||
ctx.builder.position_at_end(a_bb);
|
ctx.builder.position_at_end(a_bb);
|
||||||
let a = generator
|
let a = generator
|
||||||
.gen_expr(ctx, &values[1])
|
.gen_expr(ctx, &values[1])?
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.to_basic_value_enum(ctx, generator)
|
.to_basic_value_enum(ctx, generator)
|
||||||
.into_int_value();
|
.into_int_value();
|
||||||
@ -934,10 +932,10 @@ pub fn gen_expr<'ctx, 'a, G: CodeGenerator>(
|
|||||||
phi.add_incoming(&[(&a, a_bb), (&b, b_bb)]);
|
phi.add_incoming(&[(&a, a_bb), (&b, b_bb)]);
|
||||||
phi.as_basic_value().into()
|
phi.as_basic_value().into()
|
||||||
}
|
}
|
||||||
ExprKind::BinOp { op, left, right } => gen_binop_expr(generator, ctx, left, op, right),
|
ExprKind::BinOp { op, left, right } => gen_binop_expr(generator, ctx, left, op, right)?,
|
||||||
ExprKind::UnaryOp { op, operand } => {
|
ExprKind::UnaryOp { op, operand } => {
|
||||||
let ty = ctx.unifier.get_representative(operand.custom.unwrap());
|
let ty = ctx.unifier.get_representative(operand.custom.unwrap());
|
||||||
let val = generator.gen_expr(ctx, operand).unwrap().to_basic_value_enum(ctx, generator);
|
let val = generator.gen_expr(ctx, operand)?.unwrap().to_basic_value_enum(ctx, generator);
|
||||||
if ty == ctx.primitives.bool {
|
if ty == ctx.primitives.bool {
|
||||||
let val = val.into_int_value();
|
let val = val.into_int_value();
|
||||||
match op {
|
match op {
|
||||||
@ -984,7 +982,7 @@ pub fn gen_expr<'ctx, 'a, G: CodeGenerator>(
|
|||||||
}
|
}
|
||||||
ExprKind::Compare { left, ops, comparators } => {
|
ExprKind::Compare { left, ops, comparators } => {
|
||||||
izip!(chain(once(left.as_ref()), comparators.iter()), comparators.iter(), ops.iter(),)
|
izip!(chain(once(left.as_ref()), comparators.iter()), comparators.iter(), ops.iter(),)
|
||||||
.fold(None, |prev, (lhs, rhs, op)| {
|
.fold(Ok(None), |prev: Result<Option<_>, String>, (lhs, rhs, op)| {
|
||||||
let ty = ctx.unifier.get_representative(lhs.custom.unwrap());
|
let ty = ctx.unifier.get_representative(lhs.custom.unwrap());
|
||||||
let current =
|
let current =
|
||||||
if [ctx.primitives.int32, ctx.primitives.int64, ctx.primitives.bool]
|
if [ctx.primitives.int32, ctx.primitives.int64, ctx.primitives.bool]
|
||||||
@ -995,11 +993,11 @@ pub fn gen_expr<'ctx, 'a, G: CodeGenerator>(
|
|||||||
BasicValueEnum::IntValue(rhs),
|
BasicValueEnum::IntValue(rhs),
|
||||||
) = (
|
) = (
|
||||||
generator
|
generator
|
||||||
.gen_expr(ctx, lhs)
|
.gen_expr(ctx, lhs)?
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.to_basic_value_enum(ctx, generator),
|
.to_basic_value_enum(ctx, generator),
|
||||||
generator
|
generator
|
||||||
.gen_expr(ctx, rhs)
|
.gen_expr(ctx, rhs)?
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.to_basic_value_enum(ctx, generator),
|
.to_basic_value_enum(ctx, generator),
|
||||||
) {
|
) {
|
||||||
@ -1023,11 +1021,11 @@ pub fn gen_expr<'ctx, 'a, G: CodeGenerator>(
|
|||||||
BasicValueEnum::FloatValue(rhs),
|
BasicValueEnum::FloatValue(rhs),
|
||||||
) = (
|
) = (
|
||||||
generator
|
generator
|
||||||
.gen_expr(ctx, lhs)
|
.gen_expr(ctx, lhs)?
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.to_basic_value_enum(ctx, generator),
|
.to_basic_value_enum(ctx, generator),
|
||||||
generator
|
generator
|
||||||
.gen_expr(ctx, rhs)
|
.gen_expr(ctx, rhs)?
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.to_basic_value_enum(ctx, generator),
|
.to_basic_value_enum(ctx, generator),
|
||||||
) {
|
) {
|
||||||
@ -1048,14 +1046,14 @@ pub fn gen_expr<'ctx, 'a, G: CodeGenerator>(
|
|||||||
} else {
|
} else {
|
||||||
unimplemented!()
|
unimplemented!()
|
||||||
};
|
};
|
||||||
prev.map(|v| ctx.builder.build_and(v, current, "cmp")).or(Some(current))
|
Ok(prev?.map(|v| ctx.builder.build_and(v, current, "cmp")).or(Some(current)))
|
||||||
})
|
})?
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.into() // as there should be at least 1 element, it should never be none
|
.into() // as there should be at least 1 element, it should never be none
|
||||||
}
|
}
|
||||||
ExprKind::IfExp { test, body, orelse } => {
|
ExprKind::IfExp { test, body, orelse } => {
|
||||||
let test = generator
|
let test = generator
|
||||||
.gen_expr(ctx, test)
|
.gen_expr(ctx, test)?
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.to_basic_value_enum(ctx, generator)
|
.to_basic_value_enum(ctx, generator)
|
||||||
.into_int_value();
|
.into_int_value();
|
||||||
@ -1065,10 +1063,10 @@ pub fn gen_expr<'ctx, 'a, G: CodeGenerator>(
|
|||||||
let cont_bb = ctx.ctx.append_basic_block(current, "cont");
|
let cont_bb = ctx.ctx.append_basic_block(current, "cont");
|
||||||
ctx.builder.build_conditional_branch(test, then_bb, else_bb);
|
ctx.builder.build_conditional_branch(test, then_bb, else_bb);
|
||||||
ctx.builder.position_at_end(then_bb);
|
ctx.builder.position_at_end(then_bb);
|
||||||
let a = generator.gen_expr(ctx, body).unwrap().to_basic_value_enum(ctx, generator);
|
let a = generator.gen_expr(ctx, body)?.unwrap().to_basic_value_enum(ctx, generator);
|
||||||
ctx.builder.build_unconditional_branch(cont_bb);
|
ctx.builder.build_unconditional_branch(cont_bb);
|
||||||
ctx.builder.position_at_end(else_bb);
|
ctx.builder.position_at_end(else_bb);
|
||||||
let b = generator.gen_expr(ctx, orelse).unwrap().to_basic_value_enum(ctx, generator);
|
let b = generator.gen_expr(ctx, orelse)?.unwrap().to_basic_value_enum(ctx, generator);
|
||||||
ctx.builder.build_unconditional_branch(cont_bb);
|
ctx.builder.build_unconditional_branch(cont_bb);
|
||||||
ctx.builder.position_at_end(cont_bb);
|
ctx.builder.position_at_end(cont_bb);
|
||||||
let phi = ctx.builder.build_phi(a.get_type(), "ifexpr");
|
let phi = ctx.builder.build_phi(a.get_type(), "ifexpr");
|
||||||
@ -1077,13 +1075,15 @@ pub fn gen_expr<'ctx, 'a, G: CodeGenerator>(
|
|||||||
}
|
}
|
||||||
ExprKind::Call { func, args, keywords } => {
|
ExprKind::Call { func, args, keywords } => {
|
||||||
let mut params =
|
let mut params =
|
||||||
args.iter().map(|arg| (None, generator.gen_expr(ctx, arg).unwrap())).collect_vec();
|
args.iter().map(|arg| Ok((None, generator.gen_expr(ctx, arg)?.unwrap())) as Result<_, String>)
|
||||||
|
.collect::<Result<Vec<_>, _>>()?;
|
||||||
let kw_iter = keywords.iter().map(|kw| {
|
let kw_iter = keywords.iter().map(|kw| {
|
||||||
(
|
Ok((
|
||||||
Some(*kw.node.arg.as_ref().unwrap()),
|
Some(*kw.node.arg.as_ref().unwrap()),
|
||||||
generator.gen_expr(ctx, &kw.node.value).unwrap(),
|
generator.gen_expr(ctx, &kw.node.value)?.unwrap(),
|
||||||
)
|
)) as Result<_, String>
|
||||||
});
|
});
|
||||||
|
let kw_iter = kw_iter.collect::<Result<Vec<_>, _>>()?;
|
||||||
params.extend(kw_iter);
|
params.extend(kw_iter);
|
||||||
let call = ctx.calls.get(&expr.location.into());
|
let call = ctx.calls.get(&expr.location.into());
|
||||||
let signature = match call {
|
let signature = match call {
|
||||||
@ -1091,22 +1091,23 @@ pub fn gen_expr<'ctx, 'a, G: CodeGenerator>(
|
|||||||
None => {
|
None => {
|
||||||
let ty = func.custom.unwrap();
|
let ty = func.custom.unwrap();
|
||||||
if let TypeEnum::TFunc(sign) = &*ctx.unifier.get_ty(ty) {
|
if let TypeEnum::TFunc(sign) = &*ctx.unifier.get_ty(ty) {
|
||||||
sign.borrow().clone()
|
sign.clone()
|
||||||
} else {
|
} else {
|
||||||
unreachable!()
|
unreachable!()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
match &func.as_ref().node {
|
let func = func.as_ref();
|
||||||
|
match &func.node {
|
||||||
ExprKind::Name { id, .. } => {
|
ExprKind::Name { id, .. } => {
|
||||||
// TODO: handle primitive casts and function pointers
|
// TODO: handle primitive casts and function pointers
|
||||||
let fun = ctx.resolver.get_identifier_def(*id).expect("Unknown identifier");
|
let fun = ctx.resolver.get_identifier_def(*id).map_err(|e| format!("{} (at {})", e, func.location))?;
|
||||||
return generator
|
return Ok(generator
|
||||||
.gen_call(ctx, None, (&signature, fun), params)
|
.gen_call(ctx, None, (&signature, fun), params)?
|
||||||
.map(|v| v.into());
|
.map(|v| v.into()));
|
||||||
}
|
}
|
||||||
ExprKind::Attribute { value, attr, .. } => {
|
ExprKind::Attribute { value, attr, .. } => {
|
||||||
let val = generator.gen_expr(ctx, value).unwrap();
|
let val = generator.gen_expr(ctx, value)?.unwrap();
|
||||||
let id = if let TypeEnum::TObj { obj_id, .. } =
|
let id = if let TypeEnum::TObj { obj_id, .. } =
|
||||||
&*ctx.unifier.get_ty(value.custom.unwrap())
|
&*ctx.unifier.get_ty(value.custom.unwrap())
|
||||||
{
|
{
|
||||||
@ -1129,14 +1130,14 @@ pub fn gen_expr<'ctx, 'a, G: CodeGenerator>(
|
|||||||
unreachable!()
|
unreachable!()
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
return generator
|
return Ok(generator
|
||||||
.gen_call(
|
.gen_call(
|
||||||
ctx,
|
ctx,
|
||||||
Some((value.custom.unwrap(), val)),
|
Some((value.custom.unwrap(), val)),
|
||||||
(&signature, fun_id),
|
(&signature, fun_id),
|
||||||
params,
|
params,
|
||||||
)
|
)?
|
||||||
.map(|v| v.into());
|
.map(|v| v.into()));
|
||||||
}
|
}
|
||||||
_ => unimplemented!(),
|
_ => unimplemented!(),
|
||||||
}
|
}
|
||||||
@ -1144,7 +1145,7 @@ pub fn gen_expr<'ctx, 'a, G: CodeGenerator>(
|
|||||||
ExprKind::Subscript { value, slice, .. } => {
|
ExprKind::Subscript { value, slice, .. } => {
|
||||||
if let TypeEnum::TList { ty } = &*ctx.unifier.get_ty(value.custom.unwrap()) {
|
if let TypeEnum::TList { ty } = &*ctx.unifier.get_ty(value.custom.unwrap()) {
|
||||||
let v = generator
|
let v = generator
|
||||||
.gen_expr(ctx, value)
|
.gen_expr(ctx, value)?
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.to_basic_value_enum(ctx, generator)
|
.to_basic_value_enum(ctx, generator)
|
||||||
.into_pointer_value();
|
.into_pointer_value();
|
||||||
@ -1153,7 +1154,7 @@ pub fn gen_expr<'ctx, 'a, G: CodeGenerator>(
|
|||||||
if let ExprKind::Slice { lower, upper, step } = &slice.node {
|
if let ExprKind::Slice { lower, upper, step } = &slice.node {
|
||||||
let one = int32.const_int(1, false);
|
let one = int32.const_int(1, false);
|
||||||
let (start, end, step) =
|
let (start, end, step) =
|
||||||
handle_slice_indices(lower, upper, step, ctx, generator, v);
|
handle_slice_indices(lower, upper, step, ctx, generator, v)?;
|
||||||
let length = calculate_len_for_slice_range(
|
let length = calculate_len_for_slice_range(
|
||||||
ctx,
|
ctx,
|
||||||
start,
|
start,
|
||||||
@ -1174,7 +1175,7 @@ pub fn gen_expr<'ctx, 'a, G: CodeGenerator>(
|
|||||||
);
|
);
|
||||||
let res_array_ret = allocate_list(generator, ctx, ty, length);
|
let res_array_ret = allocate_list(generator, ctx, ty, length);
|
||||||
let res_ind =
|
let res_ind =
|
||||||
handle_slice_indices(&None, &None, &None, ctx, generator, res_array_ret);
|
handle_slice_indices(&None, &None, &None, ctx, generator, res_array_ret)?;
|
||||||
list_slice_assignment(
|
list_slice_assignment(
|
||||||
ctx,
|
ctx,
|
||||||
generator.get_size_type(ctx.ctx),
|
generator.get_size_type(ctx.ctx),
|
||||||
@ -1189,7 +1190,7 @@ pub fn gen_expr<'ctx, 'a, G: CodeGenerator>(
|
|||||||
let len = ctx.build_gep_and_load(v, &[zero, int32.const_int(1, false)])
|
let len = ctx.build_gep_and_load(v, &[zero, int32.const_int(1, false)])
|
||||||
.into_int_value();
|
.into_int_value();
|
||||||
let raw_index = generator
|
let raw_index = generator
|
||||||
.gen_expr(ctx, slice)
|
.gen_expr(ctx, slice)?
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.to_basic_value_enum(ctx, generator)
|
.to_basic_value_enum(ctx, generator)
|
||||||
.into_int_value();
|
.into_int_value();
|
||||||
@ -1208,7 +1209,7 @@ pub fn gen_expr<'ctx, 'a, G: CodeGenerator>(
|
|||||||
}
|
}
|
||||||
} else if let TypeEnum::TTuple { .. } = &*ctx.unifier.get_ty(value.custom.unwrap()) {
|
} else if let TypeEnum::TTuple { .. } = &*ctx.unifier.get_ty(value.custom.unwrap()) {
|
||||||
let v = generator
|
let v = generator
|
||||||
.gen_expr(ctx, value)
|
.gen_expr(ctx, value)?
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.to_basic_value_enum(ctx, generator)
|
.to_basic_value_enum(ctx, generator)
|
||||||
.into_struct_value();
|
.into_struct_value();
|
||||||
@ -1224,7 +1225,7 @@ pub fn gen_expr<'ctx, 'a, G: CodeGenerator>(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
.into(),
|
.into(),
|
||||||
ExprKind::ListComp { .. } => gen_comprehension(generator, ctx, expr).into(),
|
ExprKind::ListComp { .. } => gen_comprehension(generator, ctx, expr)?.into(),
|
||||||
_ => unimplemented!(),
|
_ => unimplemented!(),
|
||||||
})
|
}))
|
||||||
}
|
}
|
||||||
|
@ -28,7 +28,7 @@ pub trait CodeGenerator {
|
|||||||
obj: Option<(Type, ValueEnum<'ctx>)>,
|
obj: Option<(Type, ValueEnum<'ctx>)>,
|
||||||
fun: (&FunSignature, DefinitionId),
|
fun: (&FunSignature, DefinitionId),
|
||||||
params: Vec<(Option<StrRef>, ValueEnum<'ctx>)>,
|
params: Vec<(Option<StrRef>, ValueEnum<'ctx>)>,
|
||||||
) -> Option<BasicValueEnum<'ctx>>
|
) -> Result<Option<BasicValueEnum<'ctx>>, String>
|
||||||
where
|
where
|
||||||
Self: Sized,
|
Self: Sized,
|
||||||
{
|
{
|
||||||
@ -45,7 +45,7 @@ pub trait CodeGenerator {
|
|||||||
signature: &FunSignature,
|
signature: &FunSignature,
|
||||||
def: &TopLevelDef,
|
def: &TopLevelDef,
|
||||||
params: Vec<(Option<StrRef>, ValueEnum<'ctx>)>,
|
params: Vec<(Option<StrRef>, ValueEnum<'ctx>)>,
|
||||||
) -> BasicValueEnum<'ctx>
|
) -> Result<BasicValueEnum<'ctx>, String>
|
||||||
where
|
where
|
||||||
Self: Sized,
|
Self: Sized,
|
||||||
{
|
{
|
||||||
@ -65,7 +65,7 @@ pub trait CodeGenerator {
|
|||||||
obj: Option<(Type, ValueEnum<'ctx>)>,
|
obj: Option<(Type, ValueEnum<'ctx>)>,
|
||||||
fun: (&FunSignature, &mut TopLevelDef, String),
|
fun: (&FunSignature, &mut TopLevelDef, String),
|
||||||
id: usize,
|
id: usize,
|
||||||
) -> String {
|
) -> Result<String, String> {
|
||||||
gen_func_instance(ctx, obj, fun, id)
|
gen_func_instance(ctx, obj, fun, id)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -74,7 +74,7 @@ pub trait CodeGenerator {
|
|||||||
&mut self,
|
&mut self,
|
||||||
ctx: &mut CodeGenContext<'ctx, 'a>,
|
ctx: &mut CodeGenContext<'ctx, 'a>,
|
||||||
expr: &Expr<Option<Type>>,
|
expr: &Expr<Option<Type>>,
|
||||||
) -> Option<ValueEnum<'ctx>>
|
) -> Result<Option<ValueEnum<'ctx>>, String>
|
||||||
where
|
where
|
||||||
Self: Sized,
|
Self: Sized,
|
||||||
{
|
{
|
||||||
@ -87,7 +87,7 @@ pub trait CodeGenerator {
|
|||||||
&mut self,
|
&mut self,
|
||||||
ctx: &mut CodeGenContext<'ctx, 'a>,
|
ctx: &mut CodeGenContext<'ctx, 'a>,
|
||||||
ty: BasicTypeEnum<'ctx>,
|
ty: BasicTypeEnum<'ctx>,
|
||||||
) -> PointerValue<'ctx> {
|
) -> Result<PointerValue<'ctx>, String> {
|
||||||
gen_var(ctx, ty)
|
gen_var(ctx, ty)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -96,7 +96,7 @@ pub trait CodeGenerator {
|
|||||||
&mut self,
|
&mut self,
|
||||||
ctx: &mut CodeGenContext<'ctx, 'a>,
|
ctx: &mut CodeGenContext<'ctx, 'a>,
|
||||||
pattern: &Expr<Option<Type>>,
|
pattern: &Expr<Option<Type>>,
|
||||||
) -> PointerValue<'ctx>
|
) -> Result<PointerValue<'ctx>, String>
|
||||||
where
|
where
|
||||||
Self: Sized,
|
Self: Sized,
|
||||||
{
|
{
|
||||||
@ -109,7 +109,8 @@ pub trait CodeGenerator {
|
|||||||
ctx: &mut CodeGenContext<'ctx, 'a>,
|
ctx: &mut CodeGenContext<'ctx, 'a>,
|
||||||
target: &Expr<Option<Type>>,
|
target: &Expr<Option<Type>>,
|
||||||
value: ValueEnum<'ctx>,
|
value: ValueEnum<'ctx>,
|
||||||
) where
|
) -> Result<(), String>
|
||||||
|
where
|
||||||
Self: Sized,
|
Self: Sized,
|
||||||
{
|
{
|
||||||
gen_assign(self, ctx, target, value)
|
gen_assign(self, ctx, target, value)
|
||||||
@ -118,44 +119,49 @@ pub trait CodeGenerator {
|
|||||||
/// Generate code for a while expression.
|
/// Generate code for a while expression.
|
||||||
/// Return true if the while loop must early return
|
/// Return true if the while loop must early return
|
||||||
fn gen_while<'ctx, 'a>(&mut self, ctx: &mut CodeGenContext<'ctx, 'a>, stmt: &Stmt<Option<Type>>)
|
fn gen_while<'ctx, 'a>(&mut self, ctx: &mut CodeGenContext<'ctx, 'a>, stmt: &Stmt<Option<Type>>)
|
||||||
|
-> Result<(), String>
|
||||||
where
|
where
|
||||||
Self: Sized,
|
Self: Sized,
|
||||||
{
|
{
|
||||||
gen_while(self, ctx, stmt);
|
gen_while(self, ctx, stmt)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Generate code for a while expression.
|
/// Generate code for a while expression.
|
||||||
/// Return true if the while loop must early return
|
/// Return true if the while loop must early return
|
||||||
fn gen_for<'ctx, 'a>(&mut self, ctx: &mut CodeGenContext<'ctx, 'a>, stmt: &Stmt<Option<Type>>)
|
fn gen_for<'ctx, 'a>(&mut self, ctx: &mut CodeGenContext<'ctx, 'a>, stmt: &Stmt<Option<Type>>)
|
||||||
|
-> Result<(), String>
|
||||||
where
|
where
|
||||||
Self: Sized,
|
Self: Sized,
|
||||||
{
|
{
|
||||||
gen_for(self, ctx, stmt);
|
gen_for(self, ctx, stmt)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Generate code for an if expression.
|
/// Generate code for an if expression.
|
||||||
/// Return true if the statement must early return
|
/// Return true if the statement must early return
|
||||||
fn gen_if<'ctx, 'a>(&mut self, ctx: &mut CodeGenContext<'ctx, 'a>, stmt: &Stmt<Option<Type>>)
|
fn gen_if<'ctx, 'a>(&mut self, ctx: &mut CodeGenContext<'ctx, 'a>, stmt: &Stmt<Option<Type>>)
|
||||||
|
-> Result<(), String>
|
||||||
where
|
where
|
||||||
Self: Sized,
|
Self: Sized,
|
||||||
{
|
{
|
||||||
gen_if(self, ctx, stmt);
|
gen_if(self, ctx, stmt)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn gen_with<'ctx, 'a>(&mut self, ctx: &mut CodeGenContext<'ctx, 'a>, stmt: &Stmt<Option<Type>>)
|
fn gen_with<'ctx, 'a>(&mut self, ctx: &mut CodeGenContext<'ctx, 'a>, stmt: &Stmt<Option<Type>>)
|
||||||
|
-> Result<(), String>
|
||||||
where
|
where
|
||||||
Self: Sized,
|
Self: Sized,
|
||||||
{
|
{
|
||||||
gen_with(self, ctx, stmt);
|
gen_with(self, ctx, stmt)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Generate code for a statement
|
/// Generate code for a statement
|
||||||
/// Return true if the statement must early return
|
/// Return true if the statement must early return
|
||||||
fn gen_stmt<'ctx, 'a>(&mut self, ctx: &mut CodeGenContext<'ctx, 'a>, stmt: &Stmt<Option<Type>>)
|
fn gen_stmt<'ctx, 'a>(&mut self, ctx: &mut CodeGenContext<'ctx, 'a>, stmt: &Stmt<Option<Type>>)
|
||||||
|
-> Result<(), String>
|
||||||
where
|
where
|
||||||
Self: Sized,
|
Self: Sized,
|
||||||
{
|
{
|
||||||
gen_stmt(self, ctx, stmt);
|
gen_stmt(self, ctx, stmt)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -125,31 +125,31 @@ pub fn handle_slice_indices<'a, 'ctx, G: CodeGenerator>(
|
|||||||
ctx: &mut CodeGenContext<'ctx, 'a>,
|
ctx: &mut CodeGenContext<'ctx, 'a>,
|
||||||
generator: &mut G,
|
generator: &mut G,
|
||||||
list: PointerValue<'ctx>,
|
list: PointerValue<'ctx>,
|
||||||
) -> (IntValue<'ctx>, IntValue<'ctx>, IntValue<'ctx>) {
|
) -> Result<(IntValue<'ctx>, IntValue<'ctx>, IntValue<'ctx>), String> {
|
||||||
// TODO: throw exception when step is 0
|
// TODO: throw exception when step is 0
|
||||||
let int32 = ctx.ctx.i32_type();
|
let int32 = ctx.ctx.i32_type();
|
||||||
let zero = int32.const_zero();
|
let zero = int32.const_zero();
|
||||||
let one = int32.const_int(1, false);
|
let one = int32.const_int(1, false);
|
||||||
let length = ctx.build_gep_and_load(list, &[zero, one]).into_int_value();
|
let length = ctx.build_gep_and_load(list, &[zero, one]).into_int_value();
|
||||||
let length = ctx.builder.build_int_truncate_or_bit_cast(length, int32, "leni32");
|
let length = ctx.builder.build_int_truncate_or_bit_cast(length, int32, "leni32");
|
||||||
match (start, end, step) {
|
Ok(match (start, end, step) {
|
||||||
(s, e, None) => (
|
(s, e, None) => (
|
||||||
s.as_ref().map_or_else(
|
s.as_ref().map_or_else(
|
||||||
|| int32.const_zero(),
|
|| Ok(int32.const_zero()),
|
||||||
|s| handle_slice_index_bound(s, ctx, generator, length),
|
|s| handle_slice_index_bound(s, ctx, generator, length),
|
||||||
),
|
)?,
|
||||||
{
|
{
|
||||||
let e = e.as_ref().map_or_else(
|
let e = e.as_ref().map_or_else(
|
||||||
|| length,
|
|| Ok(length),
|
||||||
|e| handle_slice_index_bound(e, ctx, generator, length),
|
|e| handle_slice_index_bound(e, ctx, generator, length),
|
||||||
);
|
)?;
|
||||||
ctx.builder.build_int_sub(e, one, "final_end")
|
ctx.builder.build_int_sub(e, one, "final_end")
|
||||||
},
|
},
|
||||||
one,
|
one,
|
||||||
),
|
),
|
||||||
(s, e, Some(step)) => {
|
(s, e, Some(step)) => {
|
||||||
let step = generator
|
let step = generator
|
||||||
.gen_expr(ctx, step)
|
.gen_expr(ctx, step)?
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.to_basic_value_enum(ctx, generator)
|
.to_basic_value_enum(ctx, generator)
|
||||||
.into_int_value();
|
.into_int_value();
|
||||||
@ -158,7 +158,7 @@ pub fn handle_slice_indices<'a, 'ctx, G: CodeGenerator>(
|
|||||||
(
|
(
|
||||||
match s {
|
match s {
|
||||||
Some(s) => {
|
Some(s) => {
|
||||||
let s = handle_slice_index_bound(s, ctx, generator, length);
|
let s = handle_slice_index_bound(s, ctx, generator, length)?;
|
||||||
ctx.builder
|
ctx.builder
|
||||||
.build_select(
|
.build_select(
|
||||||
ctx.builder.build_and(
|
ctx.builder.build_and(
|
||||||
@ -181,7 +181,7 @@ pub fn handle_slice_indices<'a, 'ctx, G: CodeGenerator>(
|
|||||||
},
|
},
|
||||||
match e {
|
match e {
|
||||||
Some(e) => {
|
Some(e) => {
|
||||||
let e = handle_slice_index_bound(e, ctx, generator, length);
|
let e = handle_slice_index_bound(e, ctx, generator, length)?;
|
||||||
ctx.builder
|
ctx.builder
|
||||||
.build_select(
|
.build_select(
|
||||||
neg,
|
neg,
|
||||||
@ -196,7 +196,7 @@ pub fn handle_slice_indices<'a, 'ctx, G: CodeGenerator>(
|
|||||||
step,
|
step,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// this function allows index out of range, since python
|
/// this function allows index out of range, since python
|
||||||
@ -206,7 +206,7 @@ pub fn handle_slice_index_bound<'a, 'ctx, G: CodeGenerator>(
|
|||||||
ctx: &mut CodeGenContext<'ctx, 'a>,
|
ctx: &mut CodeGenContext<'ctx, 'a>,
|
||||||
generator: &mut G,
|
generator: &mut G,
|
||||||
length: IntValue<'ctx>,
|
length: IntValue<'ctx>,
|
||||||
) -> IntValue<'ctx> {
|
) -> Result<IntValue<'ctx>, String> {
|
||||||
const SYMBOL: &str = "__nac3_slice_index_bound";
|
const SYMBOL: &str = "__nac3_slice_index_bound";
|
||||||
let func = ctx.module.get_function(SYMBOL).unwrap_or_else(|| {
|
let func = ctx.module.get_function(SYMBOL).unwrap_or_else(|| {
|
||||||
let i32_t = ctx.ctx.i32_type();
|
let i32_t = ctx.ctx.i32_type();
|
||||||
@ -214,13 +214,13 @@ pub fn handle_slice_index_bound<'a, 'ctx, G: CodeGenerator>(
|
|||||||
ctx.module.add_function(SYMBOL, fn_t, None)
|
ctx.module.add_function(SYMBOL, fn_t, None)
|
||||||
});
|
});
|
||||||
|
|
||||||
let i = generator.gen_expr(ctx, i).unwrap().to_basic_value_enum(ctx, generator);
|
let i = generator.gen_expr(ctx, i)?.unwrap().to_basic_value_enum(ctx, generator);
|
||||||
ctx.builder
|
Ok(ctx.builder
|
||||||
.build_call(func, &[i.into(), length.into()], "bounded_ind")
|
.build_call(func, &[i.into(), length.into()], "bounded_ind")
|
||||||
.try_as_basic_value()
|
.try_as_basic_value()
|
||||||
.left()
|
.left()
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.into_int_value()
|
.into_int_value())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// This function handles 'end' **inclusively**.
|
/// This function handles 'end' **inclusively**.
|
||||||
|
@ -206,16 +206,26 @@ impl WorkerRegistry {
|
|||||||
let passes = PassManager::create(&module);
|
let passes = PassManager::create(&module);
|
||||||
pass_builder.populate_function_pass_manager(&passes);
|
pass_builder.populate_function_pass_manager(&passes);
|
||||||
|
|
||||||
|
let mut errors = Vec::new();
|
||||||
while let Some(task) = self.receiver.recv().unwrap() {
|
while let Some(task) = self.receiver.recv().unwrap() {
|
||||||
let tmp_module = context.create_module("tmp");
|
let tmp_module = context.create_module("tmp");
|
||||||
let result = gen_func(&context, generator, self, builder, tmp_module, task);
|
match gen_func(&context, generator, self, builder, tmp_module, task) {
|
||||||
|
Ok(result) => {
|
||||||
builder = result.0;
|
builder = result.0;
|
||||||
passes.run_on(&result.2);
|
passes.run_on(&result.2);
|
||||||
module.link_in_module(result.1).unwrap();
|
module.link_in_module(result.1).unwrap();
|
||||||
// module = result.1;
|
}
|
||||||
|
Err((old_builder, e)) => {
|
||||||
|
builder = old_builder;
|
||||||
|
errors.push(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
*self.task_count.lock() -= 1;
|
*self.task_count.lock() -= 1;
|
||||||
self.wait_condvar.notify_all();
|
self.wait_condvar.notify_all();
|
||||||
}
|
}
|
||||||
|
if !errors.is_empty() {
|
||||||
|
panic!("Codegen error: {}", errors.iter().join("\n----------\n"));
|
||||||
|
}
|
||||||
|
|
||||||
let result = module.verify();
|
let result = module.verify();
|
||||||
if let Err(err) = result {
|
if let Err(err) = result {
|
||||||
@ -267,7 +277,6 @@ fn get_llvm_type<'ctx>(
|
|||||||
let ty = if let TopLevelDef::Class { name, fields: fields_list, .. } = &*definition.read()
|
let ty = if let TopLevelDef::Class { name, fields: fields_list, .. } = &*definition.read()
|
||||||
{
|
{
|
||||||
let struct_type = ctx.opaque_struct_type(&name.to_string());
|
let struct_type = ctx.opaque_struct_type(&name.to_string());
|
||||||
let fields = fields.borrow();
|
|
||||||
let fields = fields_list
|
let fields = fields_list
|
||||||
.iter()
|
.iter()
|
||||||
.map(|f| get_llvm_type(ctx, generator, unifier, top_level, type_cache, fields[&f.0].0))
|
.map(|f| get_llvm_type(ctx, generator, unifier, top_level, type_cache, fields[&f.0].0))
|
||||||
@ -309,7 +318,7 @@ pub fn gen_func<'ctx, G: CodeGenerator>(
|
|||||||
builder: Builder<'ctx>,
|
builder: Builder<'ctx>,
|
||||||
module: Module<'ctx>,
|
module: Module<'ctx>,
|
||||||
task: CodeGenTask,
|
task: CodeGenTask,
|
||||||
) -> (Builder<'ctx>, Module<'ctx>, FunctionValue<'ctx>) {
|
) -> Result<(Builder<'ctx>, Module<'ctx>, FunctionValue<'ctx>), (Builder<'ctx>, String)> {
|
||||||
let top_level_ctx = registry.top_level_ctx.clone();
|
let top_level_ctx = registry.top_level_ctx.clone();
|
||||||
let static_value_store = registry.static_value_store.clone();
|
let static_value_store = registry.static_value_store.clone();
|
||||||
let (mut unifier, primitives) = {
|
let (mut unifier, primitives) = {
|
||||||
@ -478,8 +487,12 @@ pub fn gen_func<'ctx, G: CodeGenerator>(
|
|||||||
static_value_store,
|
static_value_store,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let mut err = None;
|
||||||
for stmt in task.body.iter() {
|
for stmt in task.body.iter() {
|
||||||
generator.gen_stmt(&mut code_gen_context, stmt);
|
if let Err(e) = generator.gen_stmt(&mut code_gen_context, stmt) {
|
||||||
|
err = Some(e);
|
||||||
|
break;
|
||||||
|
}
|
||||||
if code_gen_context.is_terminated() {
|
if code_gen_context.is_terminated() {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
@ -490,6 +503,9 @@ pub fn gen_func<'ctx, G: CodeGenerator>(
|
|||||||
}
|
}
|
||||||
|
|
||||||
let CodeGenContext { builder, module, .. } = code_gen_context;
|
let CodeGenContext { builder, module, .. } = code_gen_context;
|
||||||
|
if let Some(e) = err {
|
||||||
|
return Err((builder, e));
|
||||||
|
}
|
||||||
|
|
||||||
(builder, module, fn_val)
|
Ok((builder, module, fn_val))
|
||||||
}
|
}
|
||||||
|
@ -22,33 +22,33 @@ use std::convert::TryFrom;
|
|||||||
pub fn gen_var<'ctx, 'a>(
|
pub fn gen_var<'ctx, 'a>(
|
||||||
ctx: &mut CodeGenContext<'ctx, 'a>,
|
ctx: &mut CodeGenContext<'ctx, 'a>,
|
||||||
ty: BasicTypeEnum<'ctx>,
|
ty: BasicTypeEnum<'ctx>,
|
||||||
) -> PointerValue<'ctx> {
|
) -> Result<PointerValue<'ctx>, String> {
|
||||||
// put the alloca in init block
|
// put the alloca in init block
|
||||||
let current = ctx.builder.get_insert_block().unwrap();
|
let current = ctx.builder.get_insert_block().unwrap();
|
||||||
// position before the last branching instruction...
|
// position before the last branching instruction...
|
||||||
ctx.builder.position_before(&ctx.init_bb.get_last_instruction().unwrap());
|
ctx.builder.position_before(&ctx.init_bb.get_last_instruction().unwrap());
|
||||||
let ptr = ctx.builder.build_alloca(ty, "tmp");
|
let ptr = ctx.builder.build_alloca(ty, "tmp");
|
||||||
ctx.builder.position_at_end(current);
|
ctx.builder.position_at_end(current);
|
||||||
ptr
|
Ok(ptr)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn gen_store_target<'ctx, 'a, G: CodeGenerator>(
|
pub fn gen_store_target<'ctx, 'a, G: CodeGenerator>(
|
||||||
generator: &mut G,
|
generator: &mut G,
|
||||||
ctx: &mut CodeGenContext<'ctx, 'a>,
|
ctx: &mut CodeGenContext<'ctx, 'a>,
|
||||||
pattern: &Expr<Option<Type>>,
|
pattern: &Expr<Option<Type>>,
|
||||||
) -> PointerValue<'ctx> {
|
) -> Result<PointerValue<'ctx>, String> {
|
||||||
// very similar to gen_expr, but we don't do an extra load at the end
|
// very similar to gen_expr, but we don't do an extra load at the end
|
||||||
// and we flatten nested tuples
|
// and we flatten nested tuples
|
||||||
match &pattern.node {
|
Ok(match &pattern.node {
|
||||||
ExprKind::Name { id, .. } => ctx.var_assignment.get(id).map(|v| v.0).unwrap_or_else(|| {
|
ExprKind::Name { id, .. } => ctx.var_assignment.get(id).map(|v| Ok(v.0) as Result<_, String>).unwrap_or_else(|| {
|
||||||
let ptr_ty = ctx.get_llvm_type(generator, pattern.custom.unwrap());
|
let ptr_ty = ctx.get_llvm_type(generator, pattern.custom.unwrap());
|
||||||
let ptr = generator.gen_var_alloc(ctx, ptr_ty);
|
let ptr = generator.gen_var_alloc(ctx, ptr_ty)?;
|
||||||
ctx.var_assignment.insert(*id, (ptr, None, 0));
|
ctx.var_assignment.insert(*id, (ptr, None, 0));
|
||||||
ptr
|
Ok(ptr)
|
||||||
}),
|
})?,
|
||||||
ExprKind::Attribute { value, attr, .. } => {
|
ExprKind::Attribute { value, attr, .. } => {
|
||||||
let index = ctx.get_attr_index(value.custom.unwrap(), *attr);
|
let index = ctx.get_attr_index(value.custom.unwrap(), *attr);
|
||||||
let val = generator.gen_expr(ctx, value).unwrap().to_basic_value_enum(ctx, generator);
|
let val = generator.gen_expr(ctx, value)?.unwrap().to_basic_value_enum(ctx, generator);
|
||||||
let ptr = if let BasicValueEnum::PointerValue(v) = val {
|
let ptr = if let BasicValueEnum::PointerValue(v) = val {
|
||||||
v
|
v
|
||||||
} else {
|
} else {
|
||||||
@ -68,12 +68,12 @@ pub fn gen_store_target<'ctx, 'a, G: CodeGenerator>(
|
|||||||
ExprKind::Subscript { value, slice, .. } => {
|
ExprKind::Subscript { value, slice, .. } => {
|
||||||
let i32_type = ctx.ctx.i32_type();
|
let i32_type = ctx.ctx.i32_type();
|
||||||
let v = generator
|
let v = generator
|
||||||
.gen_expr(ctx, value)
|
.gen_expr(ctx, value)?
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.to_basic_value_enum(ctx, generator)
|
.to_basic_value_enum(ctx, generator)
|
||||||
.into_pointer_value();
|
.into_pointer_value();
|
||||||
let index = generator
|
let index = generator
|
||||||
.gen_expr(ctx, slice)
|
.gen_expr(ctx, slice)?
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.to_basic_value_enum(ctx, generator)
|
.to_basic_value_enum(ctx, generator)
|
||||||
.into_int_value();
|
.into_int_value();
|
||||||
@ -85,7 +85,7 @@ pub fn gen_store_target<'ctx, 'a, G: CodeGenerator>(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
}
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn gen_assign<'ctx, 'a, G: CodeGenerator>(
|
pub fn gen_assign<'ctx, 'a, G: CodeGenerator>(
|
||||||
@ -93,8 +93,8 @@ pub fn gen_assign<'ctx, 'a, G: CodeGenerator>(
|
|||||||
ctx: &mut CodeGenContext<'ctx, 'a>,
|
ctx: &mut CodeGenContext<'ctx, 'a>,
|
||||||
target: &Expr<Option<Type>>,
|
target: &Expr<Option<Type>>,
|
||||||
value: ValueEnum<'ctx>,
|
value: ValueEnum<'ctx>,
|
||||||
) {
|
) -> Result<(), String> {
|
||||||
match &target.node {
|
Ok(match &target.node {
|
||||||
ExprKind::Tuple { elts, .. } => {
|
ExprKind::Tuple { elts, .. } => {
|
||||||
if let BasicValueEnum::StructValue(v) = value.to_basic_value_enum(ctx, generator) {
|
if let BasicValueEnum::StructValue(v) = value.to_basic_value_enum(ctx, generator) {
|
||||||
for (i, elt) in elts.iter().enumerate() {
|
for (i, elt) in elts.iter().enumerate() {
|
||||||
@ -102,7 +102,7 @@ pub fn gen_assign<'ctx, 'a, G: CodeGenerator>(
|
|||||||
.builder
|
.builder
|
||||||
.build_extract_value(v, u32::try_from(i).unwrap(), "struct_elem")
|
.build_extract_value(v, u32::try_from(i).unwrap(), "struct_elem")
|
||||||
.unwrap();
|
.unwrap();
|
||||||
generator.gen_assign(ctx, elt, v.into());
|
generator.gen_assign(ctx, elt, v.into())?;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
unreachable!()
|
unreachable!()
|
||||||
@ -113,12 +113,12 @@ pub fn gen_assign<'ctx, 'a, G: CodeGenerator>(
|
|||||||
{
|
{
|
||||||
if let ExprKind::Slice { lower, upper, step } = &slice.node {
|
if let ExprKind::Slice { lower, upper, step } = &slice.node {
|
||||||
let ls = generator
|
let ls = generator
|
||||||
.gen_expr(ctx, ls)
|
.gen_expr(ctx, ls)?
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.to_basic_value_enum(ctx, generator)
|
.to_basic_value_enum(ctx, generator)
|
||||||
.into_pointer_value();
|
.into_pointer_value();
|
||||||
let (start, end, step) =
|
let (start, end, step) =
|
||||||
handle_slice_indices(lower, upper, step, ctx, generator, ls);
|
handle_slice_indices(lower, upper, step, ctx, generator, ls)?;
|
||||||
let value = value.to_basic_value_enum(ctx, generator).into_pointer_value();
|
let value = value.to_basic_value_enum(ctx, generator).into_pointer_value();
|
||||||
let ty = if let TypeEnum::TList { ty } =
|
let ty = if let TypeEnum::TList { ty } =
|
||||||
&*ctx.unifier.get_ty(target.custom.unwrap())
|
&*ctx.unifier.get_ty(target.custom.unwrap())
|
||||||
@ -127,7 +127,7 @@ pub fn gen_assign<'ctx, 'a, G: CodeGenerator>(
|
|||||||
} else {
|
} else {
|
||||||
unreachable!()
|
unreachable!()
|
||||||
};
|
};
|
||||||
let src_ind = handle_slice_indices(&None, &None, &None, ctx, generator, value);
|
let src_ind = handle_slice_indices(&None, &None, &None, ctx, generator, value)?;
|
||||||
list_slice_assignment(
|
list_slice_assignment(
|
||||||
ctx,
|
ctx,
|
||||||
generator.get_size_type(ctx.ctx),
|
generator.get_size_type(ctx.ctx),
|
||||||
@ -142,7 +142,7 @@ pub fn gen_assign<'ctx, 'a, G: CodeGenerator>(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
let ptr = generator.gen_store_target(ctx, target);
|
let ptr = generator.gen_store_target(ctx, target)?;
|
||||||
if let ExprKind::Name { id, .. } = &target.node {
|
if let ExprKind::Name { id, .. } = &target.node {
|
||||||
let (_, static_value, counter) = ctx.var_assignment.get_mut(id).unwrap();
|
let (_, static_value, counter) = ctx.var_assignment.get_mut(id).unwrap();
|
||||||
*counter += 1;
|
*counter += 1;
|
||||||
@ -153,14 +153,14 @@ pub fn gen_assign<'ctx, 'a, G: CodeGenerator>(
|
|||||||
let val = value.to_basic_value_enum(ctx, generator);
|
let val = value.to_basic_value_enum(ctx, generator);
|
||||||
ctx.builder.build_store(ptr, val);
|
ctx.builder.build_store(ptr, val);
|
||||||
}
|
}
|
||||||
}
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn gen_for<'ctx, 'a, G: CodeGenerator>(
|
pub fn gen_for<'ctx, 'a, G: CodeGenerator>(
|
||||||
generator: &mut G,
|
generator: &mut G,
|
||||||
ctx: &mut CodeGenContext<'ctx, 'a>,
|
ctx: &mut CodeGenContext<'ctx, 'a>,
|
||||||
stmt: &Stmt<Option<Type>>,
|
stmt: &Stmt<Option<Type>>,
|
||||||
) {
|
) -> Result<(), String> {
|
||||||
if let StmtKind::For { iter, target, body, orelse, .. } = &stmt.node {
|
if let StmtKind::For { iter, target, body, orelse, .. } = &stmt.node {
|
||||||
// var_assignment static values may be changed in another branch
|
// var_assignment static values may be changed in another branch
|
||||||
// if so, remove the static value as it may not be correct in this branch
|
// if so, remove the static value as it may not be correct in this branch
|
||||||
@ -179,11 +179,11 @@ pub fn gen_for<'ctx, 'a, G: CodeGenerator>(
|
|||||||
// store loop bb information and restore it later
|
// store loop bb information and restore it later
|
||||||
let loop_bb = ctx.loop_target.replace((test_bb, cont_bb));
|
let loop_bb = ctx.loop_target.replace((test_bb, cont_bb));
|
||||||
|
|
||||||
let iter_val = generator.gen_expr(ctx, iter).unwrap().to_basic_value_enum(ctx, generator);
|
let iter_val = generator.gen_expr(ctx, iter)?.unwrap().to_basic_value_enum(ctx, generator);
|
||||||
if ctx.unifier.unioned(iter.custom.unwrap(), ctx.primitives.range) {
|
if ctx.unifier.unioned(iter.custom.unwrap(), ctx.primitives.range) {
|
||||||
// setup
|
// setup
|
||||||
let iter_val = iter_val.into_pointer_value();
|
let iter_val = iter_val.into_pointer_value();
|
||||||
let i = generator.gen_store_target(ctx, target);
|
let i = generator.gen_store_target(ctx, target)?;
|
||||||
let (start, end, step) = destructure_range(ctx, iter_val);
|
let (start, end, step) = destructure_range(ctx, iter_val);
|
||||||
ctx.builder.build_store(i, ctx.builder.build_int_sub(start, step, "start_init"));
|
ctx.builder.build_store(i, ctx.builder.build_int_sub(start, step, "start_init"));
|
||||||
ctx.builder.build_unconditional_branch(test_bb);
|
ctx.builder.build_unconditional_branch(test_bb);
|
||||||
@ -214,7 +214,7 @@ pub fn gen_for<'ctx, 'a, G: CodeGenerator>(
|
|||||||
);
|
);
|
||||||
ctx.builder.position_at_end(body_bb);
|
ctx.builder.position_at_end(body_bb);
|
||||||
} else {
|
} else {
|
||||||
let counter = generator.gen_var_alloc(ctx, size_t.into());
|
let counter = generator.gen_var_alloc(ctx, size_t.into())?;
|
||||||
// counter = -1
|
// counter = -1
|
||||||
ctx.builder.build_store(counter, size_t.const_int(u64::max_value(), true));
|
ctx.builder.build_store(counter, size_t.const_int(u64::max_value(), true));
|
||||||
let len = ctx
|
let len = ctx
|
||||||
@ -235,10 +235,10 @@ pub fn gen_for<'ctx, 'a, G: CodeGenerator>(
|
|||||||
.build_gep_and_load(iter_val.into_pointer_value(), &[zero, zero])
|
.build_gep_and_load(iter_val.into_pointer_value(), &[zero, zero])
|
||||||
.into_pointer_value();
|
.into_pointer_value();
|
||||||
let val = ctx.build_gep_and_load(arr_ptr, &[tmp]);
|
let val = ctx.build_gep_and_load(arr_ptr, &[tmp]);
|
||||||
generator.gen_assign(ctx, target, val.into());
|
generator.gen_assign(ctx, target, val.into())?;
|
||||||
}
|
}
|
||||||
|
|
||||||
gen_block(generator, ctx, body.iter());
|
gen_block(generator, ctx, body.iter())?;
|
||||||
for (k, (_, _, counter)) in var_assignment.iter() {
|
for (k, (_, _, counter)) in var_assignment.iter() {
|
||||||
let (_, static_val, counter2) = ctx.var_assignment.get_mut(k).unwrap();
|
let (_, static_val, counter2) = ctx.var_assignment.get_mut(k).unwrap();
|
||||||
if counter != counter2 {
|
if counter != counter2 {
|
||||||
@ -250,7 +250,7 @@ pub fn gen_for<'ctx, 'a, G: CodeGenerator>(
|
|||||||
}
|
}
|
||||||
if !orelse.is_empty() {
|
if !orelse.is_empty() {
|
||||||
ctx.builder.position_at_end(orelse_bb);
|
ctx.builder.position_at_end(orelse_bb);
|
||||||
gen_block(generator, ctx, orelse.iter());
|
gen_block(generator, ctx, orelse.iter())?;
|
||||||
if !ctx.is_terminated() {
|
if !ctx.is_terminated() {
|
||||||
ctx.builder.build_unconditional_branch(cont_bb);
|
ctx.builder.build_unconditional_branch(cont_bb);
|
||||||
}
|
}
|
||||||
@ -266,13 +266,14 @@ pub fn gen_for<'ctx, 'a, G: CodeGenerator>(
|
|||||||
} else {
|
} else {
|
||||||
unreachable!()
|
unreachable!()
|
||||||
}
|
}
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn gen_while<'ctx, 'a, G: CodeGenerator>(
|
pub fn gen_while<'ctx, 'a, G: CodeGenerator>(
|
||||||
generator: &mut G,
|
generator: &mut G,
|
||||||
ctx: &mut CodeGenContext<'ctx, 'a>,
|
ctx: &mut CodeGenContext<'ctx, 'a>,
|
||||||
stmt: &Stmt<Option<Type>>,
|
stmt: &Stmt<Option<Type>>,
|
||||||
) {
|
) -> Result<(), String> {
|
||||||
if let StmtKind::While { test, body, orelse, .. } = &stmt.node {
|
if let StmtKind::While { test, body, orelse, .. } = &stmt.node {
|
||||||
// var_assignment static values may be changed in another branch
|
// var_assignment static values may be changed in another branch
|
||||||
// if so, remove the static value as it may not be correct in this branch
|
// if so, remove the static value as it may not be correct in this branch
|
||||||
@ -289,14 +290,14 @@ pub fn gen_while<'ctx, 'a, G: CodeGenerator>(
|
|||||||
let loop_bb = ctx.loop_target.replace((test_bb, cont_bb));
|
let loop_bb = ctx.loop_target.replace((test_bb, cont_bb));
|
||||||
ctx.builder.build_unconditional_branch(test_bb);
|
ctx.builder.build_unconditional_branch(test_bb);
|
||||||
ctx.builder.position_at_end(test_bb);
|
ctx.builder.position_at_end(test_bb);
|
||||||
let test = generator.gen_expr(ctx, test).unwrap().to_basic_value_enum(ctx, generator);
|
let test = generator.gen_expr(ctx, test)?.unwrap().to_basic_value_enum(ctx, generator);
|
||||||
if let BasicValueEnum::IntValue(test) = test {
|
if let BasicValueEnum::IntValue(test) = test {
|
||||||
ctx.builder.build_conditional_branch(test, body_bb, orelse_bb);
|
ctx.builder.build_conditional_branch(test, body_bb, orelse_bb);
|
||||||
} else {
|
} else {
|
||||||
unreachable!()
|
unreachable!()
|
||||||
};
|
};
|
||||||
ctx.builder.position_at_end(body_bb);
|
ctx.builder.position_at_end(body_bb);
|
||||||
gen_block(generator, ctx, body.iter());
|
gen_block(generator, ctx, body.iter())?;
|
||||||
for (k, (_, _, counter)) in var_assignment.iter() {
|
for (k, (_, _, counter)) in var_assignment.iter() {
|
||||||
let (_, static_val, counter2) = ctx.var_assignment.get_mut(k).unwrap();
|
let (_, static_val, counter2) = ctx.var_assignment.get_mut(k).unwrap();
|
||||||
if counter != counter2 {
|
if counter != counter2 {
|
||||||
@ -308,7 +309,7 @@ pub fn gen_while<'ctx, 'a, G: CodeGenerator>(
|
|||||||
}
|
}
|
||||||
if !orelse.is_empty() {
|
if !orelse.is_empty() {
|
||||||
ctx.builder.position_at_end(orelse_bb);
|
ctx.builder.position_at_end(orelse_bb);
|
||||||
gen_block(generator, ctx, orelse.iter());
|
gen_block(generator, ctx, orelse.iter())?;
|
||||||
if !ctx.is_terminated() {
|
if !ctx.is_terminated() {
|
||||||
ctx.builder.build_unconditional_branch(cont_bb);
|
ctx.builder.build_unconditional_branch(cont_bb);
|
||||||
}
|
}
|
||||||
@ -324,13 +325,14 @@ pub fn gen_while<'ctx, 'a, G: CodeGenerator>(
|
|||||||
} else {
|
} else {
|
||||||
unreachable!()
|
unreachable!()
|
||||||
}
|
}
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn gen_if<'ctx, 'a, G: CodeGenerator>(
|
pub fn gen_if<'ctx, 'a, G: CodeGenerator>(
|
||||||
generator: &mut G,
|
generator: &mut G,
|
||||||
ctx: &mut CodeGenContext<'ctx, 'a>,
|
ctx: &mut CodeGenContext<'ctx, 'a>,
|
||||||
stmt: &Stmt<Option<Type>>,
|
stmt: &Stmt<Option<Type>>,
|
||||||
) {
|
) -> Result<(), String> {
|
||||||
if let StmtKind::If { test, body, orelse, .. } = &stmt.node {
|
if let StmtKind::If { test, body, orelse, .. } = &stmt.node {
|
||||||
// var_assignment static values may be changed in another branch
|
// var_assignment static values may be changed in another branch
|
||||||
// if so, remove the static value as it may not be correct in this branch
|
// if so, remove the static value as it may not be correct in this branch
|
||||||
@ -349,14 +351,14 @@ pub fn gen_if<'ctx, 'a, G: CodeGenerator>(
|
|||||||
};
|
};
|
||||||
ctx.builder.build_unconditional_branch(test_bb);
|
ctx.builder.build_unconditional_branch(test_bb);
|
||||||
ctx.builder.position_at_end(test_bb);
|
ctx.builder.position_at_end(test_bb);
|
||||||
let test = generator.gen_expr(ctx, test).unwrap().to_basic_value_enum(ctx, generator);
|
let test = generator.gen_expr(ctx, test)?.unwrap().to_basic_value_enum(ctx, generator);
|
||||||
if let BasicValueEnum::IntValue(test) = test {
|
if let BasicValueEnum::IntValue(test) = test {
|
||||||
ctx.builder.build_conditional_branch(test, body_bb, orelse_bb);
|
ctx.builder.build_conditional_branch(test, body_bb, orelse_bb);
|
||||||
} else {
|
} else {
|
||||||
unreachable!()
|
unreachable!()
|
||||||
};
|
};
|
||||||
ctx.builder.position_at_end(body_bb);
|
ctx.builder.position_at_end(body_bb);
|
||||||
gen_block(generator, ctx, body.iter());
|
gen_block(generator, ctx, body.iter())?;
|
||||||
for (k, (_, _, counter)) in var_assignment.iter() {
|
for (k, (_, _, counter)) in var_assignment.iter() {
|
||||||
let (_, static_val, counter2) = ctx.var_assignment.get_mut(k).unwrap();
|
let (_, static_val, counter2) = ctx.var_assignment.get_mut(k).unwrap();
|
||||||
if counter != counter2 {
|
if counter != counter2 {
|
||||||
@ -372,7 +374,7 @@ pub fn gen_if<'ctx, 'a, G: CodeGenerator>(
|
|||||||
}
|
}
|
||||||
if !orelse.is_empty() {
|
if !orelse.is_empty() {
|
||||||
ctx.builder.position_at_end(orelse_bb);
|
ctx.builder.position_at_end(orelse_bb);
|
||||||
gen_block(generator, ctx, orelse.iter());
|
gen_block(generator, ctx, orelse.iter())?;
|
||||||
if !ctx.is_terminated() {
|
if !ctx.is_terminated() {
|
||||||
if cont_bb.is_none() {
|
if cont_bb.is_none() {
|
||||||
cont_bb = Some(ctx.ctx.append_basic_block(current, "cont"));
|
cont_bb = Some(ctx.ctx.append_basic_block(current, "cont"));
|
||||||
@ -392,6 +394,7 @@ pub fn gen_if<'ctx, 'a, G: CodeGenerator>(
|
|||||||
} else {
|
} else {
|
||||||
unreachable!()
|
unreachable!()
|
||||||
}
|
}
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn final_proxy<'ctx, 'a>(
|
pub fn final_proxy<'ctx, 'a>(
|
||||||
@ -442,7 +445,7 @@ pub fn exn_constructor<'ctx, 'a>(
|
|||||||
_fun: (&FunSignature, DefinitionId),
|
_fun: (&FunSignature, DefinitionId),
|
||||||
mut args: Vec<(Option<StrRef>, ValueEnum<'ctx>)>,
|
mut args: Vec<(Option<StrRef>, ValueEnum<'ctx>)>,
|
||||||
generator: &mut dyn CodeGenerator
|
generator: &mut dyn CodeGenerator
|
||||||
) -> Option<BasicValueEnum<'ctx>> {
|
) -> Result<Option<BasicValueEnum<'ctx>>, String> {
|
||||||
let (zelf_ty, zelf) = obj.unwrap();
|
let (zelf_ty, zelf) = obj.unwrap();
|
||||||
let zelf = zelf.to_basic_value_enum(ctx, generator).into_pointer_value();
|
let zelf = zelf.to_basic_value_enum(ctx, generator).into_pointer_value();
|
||||||
let int32 = ctx.ctx.i32_type();
|
let int32 = ctx.ctx.i32_type();
|
||||||
@ -498,7 +501,7 @@ pub fn exn_constructor<'ctx, 'a>(
|
|||||||
ctx.builder.build_store(ptr, zero);
|
ctx.builder.build_store(ptr, zero);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Some(zelf.into())
|
Ok(Some(zelf.into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn gen_raise<'ctx, 'a, G: CodeGenerator>(
|
pub fn gen_raise<'ctx, 'a, G: CodeGenerator>(
|
||||||
@ -540,7 +543,7 @@ pub fn gen_try<'ctx, 'a, G: CodeGenerator>(
|
|||||||
generator: &mut G,
|
generator: &mut G,
|
||||||
ctx: &mut CodeGenContext<'ctx, 'a>,
|
ctx: &mut CodeGenContext<'ctx, 'a>,
|
||||||
target: &Stmt<Option<Type>>,
|
target: &Stmt<Option<Type>>,
|
||||||
) {
|
) -> Result<(), String> {
|
||||||
if let StmtKind::Try { body, handlers, orelse, finalbody, .. } = &target.node {
|
if let StmtKind::Try { body, handlers, orelse, finalbody, .. } = &target.node {
|
||||||
// if we need to generate anything related to exception, we must have personality defined
|
// if we need to generate anything related to exception, we must have personality defined
|
||||||
let personality_symbol = ctx.top_level.personality_symbol.as_ref().unwrap();
|
let personality_symbol = ctx.top_level.personality_symbol.as_ref().unwrap();
|
||||||
@ -564,7 +567,7 @@ pub fn gen_try<'ctx, 'a, G: CodeGenerator>(
|
|||||||
let mut old_return = None;
|
let mut old_return = None;
|
||||||
let mut old_outer_final = None;
|
let mut old_outer_final = None;
|
||||||
let has_cleanup = if !finalbody.is_empty() {
|
let has_cleanup = if !finalbody.is_empty() {
|
||||||
let final_state = generator.gen_var_alloc(ctx, ptr_type.into());
|
let final_state = generator.gen_var_alloc(ctx, ptr_type.into())?;
|
||||||
old_outer_final = ctx.outer_final.replace((final_state, Vec::new(), Vec::new()));
|
old_outer_final = ctx.outer_final.replace((final_state, Vec::new(), Vec::new()));
|
||||||
if let Some((continue_target, break_target)) = ctx.loop_target {
|
if let Some((continue_target, break_target)) = ctx.loop_target {
|
||||||
let break_proxy = ctx.ctx.append_basic_block(current_fun, "try.break");
|
let break_proxy = ctx.ctx.append_basic_block(current_fun, "try.break");
|
||||||
@ -622,9 +625,9 @@ pub fn gen_try<'ctx, 'a, G: CodeGenerator>(
|
|||||||
}
|
}
|
||||||
let old_clauses = ctx.outer_catch_clauses.replace((all_clauses, dispatcher, exn));
|
let old_clauses = ctx.outer_catch_clauses.replace((all_clauses, dispatcher, exn));
|
||||||
let old_unwind = ctx.unwind_target.replace(landingpad);
|
let old_unwind = ctx.unwind_target.replace(landingpad);
|
||||||
gen_block(generator, ctx, body.iter());
|
gen_block(generator, ctx, body.iter())?;
|
||||||
if ctx.builder.get_insert_block().unwrap().get_terminator().is_none() {
|
if ctx.builder.get_insert_block().unwrap().get_terminator().is_none() {
|
||||||
gen_block(generator, ctx, orelse.iter());
|
gen_block(generator, ctx, orelse.iter())?;
|
||||||
}
|
}
|
||||||
let body = ctx.builder.get_insert_block().unwrap();
|
let body = ctx.builder.get_insert_block().unwrap();
|
||||||
// reset old_clauses and old_unwind
|
// reset old_clauses and old_unwind
|
||||||
@ -723,11 +726,11 @@ pub fn gen_try<'ctx, 'a, G: CodeGenerator>(
|
|||||||
ctx.builder.position_at_end(handler_bb);
|
ctx.builder.position_at_end(handler_bb);
|
||||||
if let Some(name) = name {
|
if let Some(name) = name {
|
||||||
let exn_ty = ctx.get_llvm_type(generator, type_.as_ref().unwrap().custom.unwrap());
|
let exn_ty = ctx.get_llvm_type(generator, type_.as_ref().unwrap().custom.unwrap());
|
||||||
let exn_store = generator.gen_var_alloc(ctx, exn_ty);
|
let exn_store = generator.gen_var_alloc(ctx, exn_ty)?;
|
||||||
ctx.var_assignment.insert(*name, (exn_store, None, 0));
|
ctx.var_assignment.insert(*name, (exn_store, None, 0));
|
||||||
ctx.builder.build_store(exn_store, exn.as_basic_value());
|
ctx.builder.build_store(exn_store, exn.as_basic_value());
|
||||||
}
|
}
|
||||||
gen_block(generator, ctx, body.iter());
|
gen_block(generator, ctx, body.iter())?;
|
||||||
let current = ctx.builder.get_insert_block().unwrap();
|
let current = ctx.builder.get_insert_block().unwrap();
|
||||||
// only need to call end catch if not terminated
|
// only need to call end catch if not terminated
|
||||||
// otherwise, we already handled in return/break/continue/raise
|
// otherwise, we already handled in return/break/continue/raise
|
||||||
@ -813,7 +816,7 @@ pub fn gen_try<'ctx, 'a, G: CodeGenerator>(
|
|||||||
// exception path
|
// exception path
|
||||||
let cleanup = cleanup.unwrap();
|
let cleanup = cleanup.unwrap();
|
||||||
ctx.builder.position_at_end(cleanup);
|
ctx.builder.position_at_end(cleanup);
|
||||||
gen_block(generator, ctx, finalbody.iter());
|
gen_block(generator, ctx, finalbody.iter())?;
|
||||||
if !ctx.is_terminated() {
|
if !ctx.is_terminated() {
|
||||||
ctx.build_call_or_invoke(resume, &[], "resume");
|
ctx.build_call_or_invoke(resume, &[], "resume");
|
||||||
ctx.builder.build_unreachable();
|
ctx.builder.build_unreachable();
|
||||||
@ -825,7 +828,7 @@ pub fn gen_try<'ctx, 'a, G: CodeGenerator>(
|
|||||||
final_targets.push(tail);
|
final_targets.push(tail);
|
||||||
let finalizer = ctx.ctx.append_basic_block(current_fun, "try.finally");
|
let finalizer = ctx.ctx.append_basic_block(current_fun, "try.finally");
|
||||||
ctx.builder.position_at_end(finalizer);
|
ctx.builder.position_at_end(finalizer);
|
||||||
gen_block(generator, ctx, finalbody.iter());
|
gen_block(generator, ctx, finalbody.iter())?;
|
||||||
if !ctx.is_terminated() {
|
if !ctx.is_terminated() {
|
||||||
let dest = ctx.builder.build_load(final_state, "final_dest");
|
let dest = ctx.builder.build_load(final_state, "final_dest");
|
||||||
ctx.builder.build_indirect_branch(dest, &final_targets);
|
ctx.builder.build_indirect_branch(dest, &final_targets);
|
||||||
@ -847,6 +850,7 @@ pub fn gen_try<'ctx, 'a, G: CodeGenerator>(
|
|||||||
}
|
}
|
||||||
ctx.builder.position_at_end(tail);
|
ctx.builder.position_at_end(tail);
|
||||||
}
|
}
|
||||||
|
Ok(())
|
||||||
} else {
|
} else {
|
||||||
unreachable!()
|
unreachable!()
|
||||||
}
|
}
|
||||||
@ -855,20 +859,21 @@ pub fn gen_try<'ctx, 'a, G: CodeGenerator>(
|
|||||||
pub fn gen_with<'ctx, 'a, G: CodeGenerator>(
|
pub fn gen_with<'ctx, 'a, G: CodeGenerator>(
|
||||||
_: &mut G,
|
_: &mut G,
|
||||||
_: &mut CodeGenContext<'ctx, 'a>,
|
_: &mut CodeGenContext<'ctx, 'a>,
|
||||||
_: &Stmt<Option<Type>>,
|
stmt: &Stmt<Option<Type>>,
|
||||||
) -> bool {
|
) -> Result<(), String> {
|
||||||
// TODO: Implement with statement after finishing exceptions
|
// TODO: Implement with statement after finishing exceptions
|
||||||
unimplemented!()
|
Err(format!("With statement with custom types is not yet supported (at {})", stmt.location))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn gen_return<'ctx, 'a, G: CodeGenerator>(
|
pub fn gen_return<'ctx, 'a, G: CodeGenerator>(
|
||||||
generator: &mut G,
|
generator: &mut G,
|
||||||
ctx: &mut CodeGenContext<'ctx, 'a>,
|
ctx: &mut CodeGenContext<'ctx, 'a>,
|
||||||
value: &Option<Box<Expr<Option<Type>>>>,
|
value: &Option<Box<Expr<Option<Type>>>>,
|
||||||
) {
|
) -> Result<(), String> {
|
||||||
let value = value
|
let value = value
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.map(|v| generator.gen_expr(ctx, v).unwrap().to_basic_value_enum(ctx, generator));
|
.map(|v| generator.gen_expr(ctx, v).map(|v| v.unwrap().to_basic_value_enum(ctx, generator)))
|
||||||
|
.transpose()?;
|
||||||
if let Some(return_target) = ctx.return_target {
|
if let Some(return_target) = ctx.return_target {
|
||||||
if let Some(value) = value {
|
if let Some(value) = value {
|
||||||
ctx.builder.build_store(ctx.return_buffer.unwrap(), value);
|
ctx.builder.build_store(ctx.return_buffer.unwrap(), value);
|
||||||
@ -878,31 +883,32 @@ pub fn gen_return<'ctx, 'a, G: CodeGenerator>(
|
|||||||
let value = value.as_ref().map(|v| v as &dyn BasicValue);
|
let value = value.as_ref().map(|v| v as &dyn BasicValue);
|
||||||
ctx.builder.build_return(value);
|
ctx.builder.build_return(value);
|
||||||
}
|
}
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn gen_stmt<'ctx, 'a, G: CodeGenerator>(
|
pub fn gen_stmt<'ctx, 'a, G: CodeGenerator>(
|
||||||
generator: &mut G,
|
generator: &mut G,
|
||||||
ctx: &mut CodeGenContext<'ctx, 'a>,
|
ctx: &mut CodeGenContext<'ctx, 'a>,
|
||||||
stmt: &Stmt<Option<Type>>,
|
stmt: &Stmt<Option<Type>>,
|
||||||
) {
|
) -> Result<(), String> {
|
||||||
match &stmt.node {
|
match &stmt.node {
|
||||||
StmtKind::Pass { .. } => {}
|
StmtKind::Pass { .. } => {}
|
||||||
StmtKind::Expr { value, .. } => {
|
StmtKind::Expr { value, .. } => {
|
||||||
generator.gen_expr(ctx, value);
|
generator.gen_expr(ctx, value)?;
|
||||||
}
|
}
|
||||||
StmtKind::Return { value, .. } => {
|
StmtKind::Return { value, .. } => {
|
||||||
gen_return(generator, ctx, value);
|
gen_return(generator, ctx, value)?;
|
||||||
}
|
}
|
||||||
StmtKind::AnnAssign { target, value, .. } => {
|
StmtKind::AnnAssign { target, value, .. } => {
|
||||||
if let Some(value) = value {
|
if let Some(value) = value {
|
||||||
let value = generator.gen_expr(ctx, value).unwrap();
|
let value = generator.gen_expr(ctx, value)?.unwrap();
|
||||||
generator.gen_assign(ctx, target, value);
|
generator.gen_assign(ctx, target, value)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
StmtKind::Assign { targets, value, .. } => {
|
StmtKind::Assign { targets, value, .. } => {
|
||||||
let value = generator.gen_expr(ctx, value).unwrap();
|
let value = generator.gen_expr(ctx, value)?.unwrap();
|
||||||
for target in targets.iter() {
|
for target in targets.iter() {
|
||||||
generator.gen_assign(ctx, target, value.clone());
|
generator.gen_assign(ctx, target, value.clone())?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
StmtKind::Continue { .. } => {
|
StmtKind::Continue { .. } => {
|
||||||
@ -911,32 +917,38 @@ pub fn gen_stmt<'ctx, 'a, G: CodeGenerator>(
|
|||||||
StmtKind::Break { .. } => {
|
StmtKind::Break { .. } => {
|
||||||
ctx.builder.build_unconditional_branch(ctx.loop_target.unwrap().1);
|
ctx.builder.build_unconditional_branch(ctx.loop_target.unwrap().1);
|
||||||
}
|
}
|
||||||
StmtKind::If { .. } => generator.gen_if(ctx, stmt),
|
StmtKind::If { .. } => generator.gen_if(ctx, stmt)?,
|
||||||
StmtKind::While { .. } => generator.gen_while(ctx, stmt),
|
StmtKind::While { .. } => generator.gen_while(ctx, stmt)?,
|
||||||
StmtKind::For { .. } => generator.gen_for(ctx, stmt),
|
StmtKind::For { .. } => generator.gen_for(ctx, stmt)?,
|
||||||
StmtKind::With { .. } => generator.gen_with(ctx, stmt),
|
StmtKind::With { .. } => generator.gen_with(ctx, stmt)?,
|
||||||
StmtKind::AugAssign { target, op, value, .. } => {
|
StmtKind::AugAssign { target, op, value, .. } => {
|
||||||
let value = gen_binop_expr(generator, ctx, target, op, value);
|
let value = gen_binop_expr(generator, ctx, target, op, value)?;
|
||||||
generator.gen_assign(ctx, target, value);
|
generator.gen_assign(ctx, target, value)?;
|
||||||
}
|
}
|
||||||
StmtKind::Try { .. } => gen_try(generator, ctx, stmt),
|
StmtKind::Try { .. } => gen_try(generator, ctx, stmt)?,
|
||||||
StmtKind::Raise { exc, .. } => {
|
StmtKind::Raise { exc, .. } => {
|
||||||
let exc = exc.as_ref().map(|exc| generator.gen_expr(ctx, exc).unwrap().to_basic_value_enum(ctx, generator));
|
if let Some(exc) = exc {
|
||||||
gen_raise(generator, ctx, exc.as_ref(), stmt.location)
|
let exc = generator.gen_expr(ctx, exc)?.unwrap().to_basic_value_enum(ctx, generator);
|
||||||
|
gen_raise(generator, ctx, Some(&exc), stmt.location);
|
||||||
|
} else {
|
||||||
|
gen_raise(generator, ctx, None, stmt.location);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
_ => unimplemented!(),
|
_ => unimplemented!(),
|
||||||
};
|
};
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn gen_block<'ctx, 'a, 'b, G: CodeGenerator, I: Iterator<Item = &'b Stmt<Option<Type>>>>(
|
pub fn gen_block<'ctx, 'a, 'b, G: CodeGenerator, I: Iterator<Item = &'b Stmt<Option<Type>>>>(
|
||||||
generator: &mut G,
|
generator: &mut G,
|
||||||
ctx: &mut CodeGenContext<'ctx, 'a>,
|
ctx: &mut CodeGenContext<'ctx, 'a>,
|
||||||
stmts: I,
|
stmts: I,
|
||||||
) {
|
) -> Result<(), String> {
|
||||||
for stmt in stmts {
|
for stmt in stmts {
|
||||||
generator.gen_stmt(ctx, stmt);
|
generator.gen_stmt(ctx, stmt)?;
|
||||||
if ctx.is_terminated() {
|
if ctx.is_terminated() {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -18,7 +18,6 @@ use nac3parser::{
|
|||||||
parser::parse_program,
|
parser::parse_program,
|
||||||
};
|
};
|
||||||
use parking_lot::RwLock;
|
use parking_lot::RwLock;
|
||||||
use std::cell::RefCell;
|
|
||||||
use std::collections::{HashMap, HashSet};
|
use std::collections::{HashMap, HashSet};
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
@ -57,8 +56,8 @@ impl SymbolResolver for Resolver {
|
|||||||
unimplemented!()
|
unimplemented!()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_identifier_def(&self, id: StrRef) -> Option<DefinitionId> {
|
fn get_identifier_def(&self, id: StrRef) -> Result<DefinitionId, String> {
|
||||||
self.id_to_def.read().get(&id).cloned()
|
self.id_to_def.read().get(&id).cloned().ok_or_else(|| format!("cannot find symbol `{}`", id))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_string_id(&self, _: &str) -> i32 {
|
fn get_string_id(&self, _: &str) -> i32 {
|
||||||
@ -211,7 +210,7 @@ fn test_simple_call() {
|
|||||||
ret: primitives.int32,
|
ret: primitives.int32,
|
||||||
vars: HashMap::new(),
|
vars: HashMap::new(),
|
||||||
};
|
};
|
||||||
let fun_ty = unifier.add_ty(TypeEnum::TFunc(RefCell::new(signature.clone())));
|
let fun_ty = unifier.add_ty(TypeEnum::TFunc(signature.clone()));
|
||||||
let mut store = ConcreteTypeStore::new();
|
let mut store = ConcreteTypeStore::new();
|
||||||
let mut cache = HashMap::new();
|
let mut cache = HashMap::new();
|
||||||
let signature = store.from_signature(&mut unifier, &primitives, &signature, &mut cache);
|
let signature = store.from_signature(&mut unifier, &primitives, &signature, &mut cache);
|
||||||
@ -227,6 +226,7 @@ fn test_simple_call() {
|
|||||||
instance_to_symbol: HashMap::new(),
|
instance_to_symbol: HashMap::new(),
|
||||||
resolver: None,
|
resolver: None,
|
||||||
codegen_callback: None,
|
codegen_callback: None,
|
||||||
|
loc: None,
|
||||||
})));
|
})));
|
||||||
|
|
||||||
let resolver = Resolver {
|
let resolver = Resolver {
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
use std::collections::HashMap;
|
use std::{collections::HashMap, fmt::Display};
|
||||||
use std::fmt::Debug;
|
use std::fmt::Debug;
|
||||||
use std::{cell::RefCell, sync::Arc};
|
use std::sync::Arc;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
codegen::CodeGenContext,
|
codegen::CodeGenContext,
|
||||||
@ -16,7 +16,7 @@ use crate::{
|
|||||||
use crate::typecheck::typedef::TypeEnum;
|
use crate::typecheck::typedef::TypeEnum;
|
||||||
use inkwell::values::{BasicValueEnum, FloatValue, IntValue, PointerValue};
|
use inkwell::values::{BasicValueEnum, FloatValue, IntValue, PointerValue};
|
||||||
use itertools::{chain, izip};
|
use itertools::{chain, izip};
|
||||||
use nac3parser::ast::{Expr, StrRef};
|
use nac3parser::ast::{Expr, Location, StrRef};
|
||||||
use parking_lot::RwLock;
|
use parking_lot::RwLock;
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Debug)]
|
#[derive(Clone, PartialEq, Debug)]
|
||||||
@ -29,6 +29,25 @@ pub enum SymbolValue {
|
|||||||
Tuple(Vec<SymbolValue>),
|
Tuple(Vec<SymbolValue>),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Display for SymbolValue {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
match self {
|
||||||
|
SymbolValue::I32(i) => write!(f, "{}", i),
|
||||||
|
SymbolValue::I64(i) => write!(f, "int64({})", i),
|
||||||
|
SymbolValue::Str(s) => write!(f, "\"{}\"", s),
|
||||||
|
SymbolValue::Double(d) => write!(f, "{}", d),
|
||||||
|
SymbolValue::Bool(b) => if *b {
|
||||||
|
write!(f, "True")
|
||||||
|
} else {
|
||||||
|
write!(f, "False")
|
||||||
|
},
|
||||||
|
SymbolValue::Tuple(t) => {
|
||||||
|
write!(f, "({})", t.iter().map(|v| format!("{}", v)).collect::<Vec<_>>().join(", "))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub trait StaticValue {
|
pub trait StaticValue {
|
||||||
fn get_unique_identifier(&self) -> u64;
|
fn get_unique_identifier(&self) -> u64;
|
||||||
|
|
||||||
@ -105,7 +124,7 @@ pub trait SymbolResolver {
|
|||||||
) -> Result<Type, String>;
|
) -> Result<Type, String>;
|
||||||
|
|
||||||
// get the top-level definition of identifiers
|
// get the top-level definition of identifiers
|
||||||
fn get_identifier_def(&self, str: StrRef) -> Option<DefinitionId>;
|
fn get_identifier_def(&self, str: StrRef) -> Result<DefinitionId, String>;
|
||||||
|
|
||||||
fn get_symbol_value<'ctx, 'a>(
|
fn get_symbol_value<'ctx, 'a>(
|
||||||
&self,
|
&self,
|
||||||
@ -154,7 +173,7 @@ pub fn parse_type_annotation<T>(
|
|||||||
let str_id = ids[8];
|
let str_id = ids[8];
|
||||||
let exn_id = ids[9];
|
let exn_id = ids[9];
|
||||||
|
|
||||||
let name_handling = |id: &StrRef, unifier: &mut Unifier| {
|
let name_handling = |id: &StrRef, loc: Location, unifier: &mut Unifier| {
|
||||||
if *id == int32_id {
|
if *id == int32_id {
|
||||||
Ok(primitives.int32)
|
Ok(primitives.int32)
|
||||||
} else if *id == int64_id {
|
} else if *id == int64_id {
|
||||||
@ -171,7 +190,8 @@ pub fn parse_type_annotation<T>(
|
|||||||
Ok(primitives.exception)
|
Ok(primitives.exception)
|
||||||
} else {
|
} else {
|
||||||
let obj_id = resolver.get_identifier_def(*id);
|
let obj_id = resolver.get_identifier_def(*id);
|
||||||
if let Some(obj_id) = obj_id {
|
match obj_id {
|
||||||
|
Ok(obj_id) => {
|
||||||
let def = top_level_defs[obj_id.0].read();
|
let def = top_level_defs[obj_id.0].read();
|
||||||
if let TopLevelDef::Class { fields, methods, type_vars, .. } = &*def {
|
if let TopLevelDef::Class { fields, methods, type_vars, .. } = &*def {
|
||||||
if !type_vars.is_empty() {
|
if !type_vars.is_empty() {
|
||||||
@ -180,28 +200,27 @@ pub fn parse_type_annotation<T>(
|
|||||||
type_vars.len()
|
type_vars.len()
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
let fields = RefCell::new(
|
let fields = chain(
|
||||||
chain(
|
|
||||||
fields.iter().map(|(k, v, m)| (*k, (*v, *m))),
|
fields.iter().map(|(k, v, m)| (*k, (*v, *m))),
|
||||||
methods.iter().map(|(k, v, _)| (*k, (*v, false))),
|
methods.iter().map(|(k, v, _)| (*k, (*v, false))),
|
||||||
)
|
).collect();
|
||||||
.collect(),
|
|
||||||
);
|
|
||||||
Ok(unifier.add_ty(TypeEnum::TObj {
|
Ok(unifier.add_ty(TypeEnum::TObj {
|
||||||
obj_id,
|
obj_id,
|
||||||
fields,
|
fields,
|
||||||
params: Default::default(),
|
params: Default::default(),
|
||||||
}))
|
}))
|
||||||
} else {
|
} else {
|
||||||
Err("Cannot use function name as type".into())
|
Err(format!("Cannot use function name as type at {}", loc))
|
||||||
}
|
}
|
||||||
} else {
|
}
|
||||||
// it could be a type variable
|
Err(e) => {
|
||||||
let ty = resolver.get_symbol_type(unifier, top_level_defs, primitives, *id)?;
|
let ty = resolver.get_symbol_type(unifier, top_level_defs, primitives, *id)
|
||||||
|
.map_err(|_| format!("Unknown type annotation at {}: {}", loc, e))?;
|
||||||
if let TypeEnum::TVar { .. } = &*unifier.get_ty(ty) {
|
if let TypeEnum::TVar { .. } = &*unifier.get_ty(ty) {
|
||||||
Ok(ty)
|
Ok(ty)
|
||||||
} else {
|
} else {
|
||||||
Err(format!("Unknown type annotation {}", id))
|
Err(format!("Unknown type annotation {} at {}", id, loc))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -238,8 +257,7 @@ pub fn parse_type_annotation<T>(
|
|||||||
};
|
};
|
||||||
|
|
||||||
let obj_id = resolver
|
let obj_id = resolver
|
||||||
.get_identifier_def(*id)
|
.get_identifier_def(*id)?;
|
||||||
.ok_or_else(|| format!("Unknown type annotation {}", id))?;
|
|
||||||
let def = top_level_defs[obj_id.0].read();
|
let def = top_level_defs[obj_id.0].read();
|
||||||
if let TopLevelDef::Class { fields, methods, type_vars, .. } = &*def {
|
if let TopLevelDef::Class { fields, methods, type_vars, .. } = &*def {
|
||||||
if types.len() != type_vars.len() {
|
if types.len() != type_vars.len() {
|
||||||
@ -271,8 +289,8 @@ pub fn parse_type_annotation<T>(
|
|||||||
}));
|
}));
|
||||||
Ok(unifier.add_ty(TypeEnum::TObj {
|
Ok(unifier.add_ty(TypeEnum::TObj {
|
||||||
obj_id,
|
obj_id,
|
||||||
fields: fields.into(),
|
fields,
|
||||||
params: subst.into(),
|
params: subst,
|
||||||
}))
|
}))
|
||||||
} else {
|
} else {
|
||||||
Err("Cannot use function name as type".into())
|
Err("Cannot use function name as type".into())
|
||||||
@ -281,7 +299,7 @@ pub fn parse_type_annotation<T>(
|
|||||||
};
|
};
|
||||||
|
|
||||||
match &expr.node {
|
match &expr.node {
|
||||||
Name { id, .. } => name_handling(id, unifier),
|
Name { id, .. } => name_handling(id, expr.location, unifier),
|
||||||
Subscript { value, slice, .. } => {
|
Subscript { value, slice, .. } => {
|
||||||
if let Name { id, .. } = &value.node {
|
if let Name { id, .. } = &value.node {
|
||||||
subscript_name_handle(id, slice, unifier)
|
subscript_name_handle(id, slice, unifier)
|
||||||
@ -310,7 +328,7 @@ impl dyn SymbolResolver + Send + Sync {
|
|||||||
unifier: &mut Unifier,
|
unifier: &mut Unifier,
|
||||||
ty: Type,
|
ty: Type,
|
||||||
) -> String {
|
) -> String {
|
||||||
unifier.stringify(
|
unifier.internal_stringify(
|
||||||
ty,
|
ty,
|
||||||
&mut |id| {
|
&mut |id| {
|
||||||
if let TopLevelDef::Class { name, .. } = &*top_level_defs[id].read() {
|
if let TopLevelDef::Class { name, .. } = &*top_level_defs[id].read() {
|
||||||
@ -320,6 +338,7 @@ impl dyn SymbolResolver + Send + Sync {
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
&mut |id| format!("var{}", id),
|
&mut |id| format!("var{}", id),
|
||||||
|
&mut None
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -4,7 +4,6 @@ use crate::{
|
|||||||
symbol_resolver::SymbolValue,
|
symbol_resolver::SymbolValue,
|
||||||
};
|
};
|
||||||
use inkwell::{FloatPredicate, IntPredicate};
|
use inkwell::{FloatPredicate, IntPredicate};
|
||||||
use std::cell::RefCell;
|
|
||||||
|
|
||||||
type BuiltinInfo = (
|
type BuiltinInfo = (
|
||||||
Vec<(Arc<RwLock<TopLevelDef>>, Option<Stmt>)>,
|
Vec<(Arc<RwLock<TopLevelDef>>, Option<Stmt>)>,
|
||||||
@ -18,7 +17,7 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
|
|||||||
let boolean = primitives.0.bool;
|
let boolean = primitives.0.bool;
|
||||||
let range = primitives.0.range;
|
let range = primitives.0.range;
|
||||||
let string = primitives.0.str;
|
let string = primitives.0.str;
|
||||||
let num_ty = primitives.1.get_fresh_var_with_range(&[int32, int64, float, boolean]);
|
let num_ty = primitives.1.get_fresh_var_with_range(&[int32, int64, float, boolean], Some("N".into()), None);
|
||||||
let var_map: HashMap<_, _> = vec![(num_ty.1, num_ty.0)].into_iter().collect();
|
let var_map: HashMap<_, _> = vec![(num_ty.1, num_ty.0)].into_iter().collect();
|
||||||
|
|
||||||
let exception_fields = vec![
|
let exception_fields = vec![
|
||||||
@ -34,12 +33,12 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
|
|||||||
];
|
];
|
||||||
let div_by_zero = primitives.1.add_ty(TypeEnum::TObj {
|
let div_by_zero = primitives.1.add_ty(TypeEnum::TObj {
|
||||||
obj_id: DefinitionId(10),
|
obj_id: DefinitionId(10),
|
||||||
fields: RefCell::new(exception_fields.iter().map(|(a, b, c)| (*a, (*b, *c))).collect()),
|
fields: exception_fields.iter().map(|(a, b, c)| (*a, (*b, *c))).collect(),
|
||||||
params: Default::default()
|
params: Default::default()
|
||||||
});
|
});
|
||||||
let index_error = primitives.1.add_ty(TypeEnum::TObj {
|
let index_error = primitives.1.add_ty(TypeEnum::TObj {
|
||||||
obj_id: DefinitionId(11),
|
obj_id: DefinitionId(11),
|
||||||
fields: RefCell::new(exception_fields.iter().map(|(a, b, c)| (*a, (*b, *c))).collect()),
|
fields: exception_fields.iter().map(|(a, b, c)| (*a, (*b, *c))).collect(),
|
||||||
params: Default::default()
|
params: Default::default()
|
||||||
});
|
});
|
||||||
let exn_cons_args = vec![
|
let exn_cons_args = vec![
|
||||||
@ -52,44 +51,48 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
|
|||||||
FuncArg { name: "param2".into(), ty: int64,
|
FuncArg { name: "param2".into(), ty: int64,
|
||||||
default_value: Some(SymbolValue::I64(0))},
|
default_value: Some(SymbolValue::I64(0))},
|
||||||
];
|
];
|
||||||
let div_by_zero_signature = primitives.1.add_ty(TypeEnum::TFunc(RefCell::new(FunSignature {
|
let div_by_zero_signature = primitives.1.add_ty(TypeEnum::TFunc(FunSignature {
|
||||||
args: exn_cons_args.clone(),
|
args: exn_cons_args.clone(),
|
||||||
ret: div_by_zero,
|
ret: div_by_zero,
|
||||||
vars: Default::default()
|
vars: Default::default()
|
||||||
})));
|
}));
|
||||||
let index_error_signature = primitives.1.add_ty(TypeEnum::TFunc(RefCell::new(FunSignature {
|
let index_error_signature = primitives.1.add_ty(TypeEnum::TFunc(FunSignature {
|
||||||
args: exn_cons_args,
|
args: exn_cons_args,
|
||||||
ret: index_error,
|
ret: index_error,
|
||||||
vars: Default::default()
|
vars: Default::default()
|
||||||
})));
|
}));
|
||||||
let top_level_def_list = vec![
|
let top_level_def_list = vec![
|
||||||
Arc::new(RwLock::new(TopLevelComposer::make_top_level_class_def(
|
Arc::new(RwLock::new(TopLevelComposer::make_top_level_class_def(
|
||||||
0,
|
0,
|
||||||
None,
|
None,
|
||||||
"int32".into(),
|
"int32".into(),
|
||||||
None,
|
None,
|
||||||
|
None,
|
||||||
))),
|
))),
|
||||||
Arc::new(RwLock::new(TopLevelComposer::make_top_level_class_def(
|
Arc::new(RwLock::new(TopLevelComposer::make_top_level_class_def(
|
||||||
1,
|
1,
|
||||||
None,
|
None,
|
||||||
"int64".into(),
|
"int64".into(),
|
||||||
None,
|
None,
|
||||||
|
None,
|
||||||
))),
|
))),
|
||||||
Arc::new(RwLock::new(TopLevelComposer::make_top_level_class_def(
|
Arc::new(RwLock::new(TopLevelComposer::make_top_level_class_def(
|
||||||
2,
|
2,
|
||||||
None,
|
None,
|
||||||
"float".into(),
|
"float".into(),
|
||||||
None,
|
None,
|
||||||
|
None,
|
||||||
))),
|
))),
|
||||||
Arc::new(RwLock::new(TopLevelComposer::make_top_level_class_def(3, None, "bool".into(), None))),
|
Arc::new(RwLock::new(TopLevelComposer::make_top_level_class_def(3, None, "bool".into(), None, None))),
|
||||||
Arc::new(RwLock::new(TopLevelComposer::make_top_level_class_def(4, None, "none".into(), None))),
|
Arc::new(RwLock::new(TopLevelComposer::make_top_level_class_def(4, None, "none".into(), None, None))),
|
||||||
Arc::new(RwLock::new(TopLevelComposer::make_top_level_class_def(
|
Arc::new(RwLock::new(TopLevelComposer::make_top_level_class_def(
|
||||||
5,
|
5,
|
||||||
None,
|
None,
|
||||||
"range".into(),
|
"range".into(),
|
||||||
None,
|
None,
|
||||||
|
None,
|
||||||
))),
|
))),
|
||||||
Arc::new(RwLock::new(TopLevelComposer::make_top_level_class_def(6, None, "str".into(), None))),
|
Arc::new(RwLock::new(TopLevelComposer::make_top_level_class_def(6, None, "str".into(), None, None))),
|
||||||
Arc::new(RwLock::new(TopLevelDef::Class {
|
Arc::new(RwLock::new(TopLevelDef::Class {
|
||||||
name: "Exception".into(),
|
name: "Exception".into(),
|
||||||
object_id: DefinitionId(7),
|
object_id: DefinitionId(7),
|
||||||
@ -99,6 +102,7 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
|
|||||||
ancestors: vec![],
|
ancestors: vec![],
|
||||||
constructor: None,
|
constructor: None,
|
||||||
resolver: None,
|
resolver: None,
|
||||||
|
loc: None,
|
||||||
})),
|
})),
|
||||||
Arc::new(RwLock::new(TopLevelDef::Function {
|
Arc::new(RwLock::new(TopLevelDef::Function {
|
||||||
name: "ZeroDivisionError.__init__".into(),
|
name: "ZeroDivisionError.__init__".into(),
|
||||||
@ -108,7 +112,8 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
|
|||||||
instance_to_symbol: Default::default(),
|
instance_to_symbol: Default::default(),
|
||||||
instance_to_stmt: Default::default(),
|
instance_to_stmt: Default::default(),
|
||||||
resolver: None,
|
resolver: None,
|
||||||
codegen_callback: Some(Arc::new(GenCall::new(Box::new(exn_constructor))))
|
codegen_callback: Some(Arc::new(GenCall::new(Box::new(exn_constructor)))),
|
||||||
|
loc: None,
|
||||||
})),
|
})),
|
||||||
Arc::new(RwLock::new(TopLevelDef::Function {
|
Arc::new(RwLock::new(TopLevelDef::Function {
|
||||||
name: "IndexError.__init__".into(),
|
name: "IndexError.__init__".into(),
|
||||||
@ -118,7 +123,8 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
|
|||||||
instance_to_symbol: Default::default(),
|
instance_to_symbol: Default::default(),
|
||||||
instance_to_stmt: Default::default(),
|
instance_to_stmt: Default::default(),
|
||||||
resolver: None,
|
resolver: None,
|
||||||
codegen_callback: Some(Arc::new(GenCall::new(Box::new(exn_constructor))))
|
codegen_callback: Some(Arc::new(GenCall::new(Box::new(exn_constructor)))),
|
||||||
|
loc: None,
|
||||||
})),
|
})),
|
||||||
Arc::new(RwLock::new(TopLevelDef::Class {
|
Arc::new(RwLock::new(TopLevelDef::Class {
|
||||||
name: "ZeroDivisionError".into(),
|
name: "ZeroDivisionError".into(),
|
||||||
@ -132,6 +138,7 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
|
|||||||
],
|
],
|
||||||
constructor: Some(div_by_zero_signature),
|
constructor: Some(div_by_zero_signature),
|
||||||
resolver: None,
|
resolver: None,
|
||||||
|
loc: None,
|
||||||
})),
|
})),
|
||||||
Arc::new(RwLock::new(TopLevelDef::Class {
|
Arc::new(RwLock::new(TopLevelDef::Class {
|
||||||
name: "IndexError".into(),
|
name: "IndexError".into(),
|
||||||
@ -145,15 +152,16 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
|
|||||||
],
|
],
|
||||||
constructor: Some(index_error_signature),
|
constructor: Some(index_error_signature),
|
||||||
resolver: None,
|
resolver: None,
|
||||||
|
loc: None,
|
||||||
})),
|
})),
|
||||||
Arc::new(RwLock::new(TopLevelDef::Function {
|
Arc::new(RwLock::new(TopLevelDef::Function {
|
||||||
name: "int32".into(),
|
name: "int32".into(),
|
||||||
simple_name: "int32".into(),
|
simple_name: "int32".into(),
|
||||||
signature: primitives.1.add_ty(TypeEnum::TFunc(RefCell::new(FunSignature {
|
signature: primitives.1.add_ty(TypeEnum::TFunc(FunSignature {
|
||||||
args: vec![FuncArg { name: "_".into(), ty: num_ty.0, default_value: None }],
|
args: vec![FuncArg { name: "n".into(), ty: num_ty.0, default_value: None }],
|
||||||
ret: int32,
|
ret: int32,
|
||||||
vars: var_map.clone(),
|
vars: var_map.clone(),
|
||||||
}))),
|
})),
|
||||||
var_id: Default::default(),
|
var_id: Default::default(),
|
||||||
instance_to_symbol: Default::default(),
|
instance_to_symbol: Default::default(),
|
||||||
instance_to_stmt: Default::default(),
|
instance_to_stmt: Default::default(),
|
||||||
@ -166,7 +174,7 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
|
|||||||
let boolean = ctx.primitives.bool;
|
let boolean = ctx.primitives.bool;
|
||||||
let arg_ty = fun.0.args[0].ty;
|
let arg_ty = fun.0.args[0].ty;
|
||||||
let arg = args[0].1.clone().to_basic_value_enum(ctx, generator);
|
let arg = args[0].1.clone().to_basic_value_enum(ctx, generator);
|
||||||
if ctx.unifier.unioned(arg_ty, boolean) {
|
Ok(if ctx.unifier.unioned(arg_ty, boolean) {
|
||||||
Some(
|
Some(
|
||||||
ctx.builder
|
ctx.builder
|
||||||
.build_int_z_extend(
|
.build_int_z_extend(
|
||||||
@ -200,18 +208,19 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
|
|||||||
Some(val)
|
Some(val)
|
||||||
} else {
|
} else {
|
||||||
unreachable!()
|
unreachable!()
|
||||||
}
|
})
|
||||||
},
|
},
|
||||||
)))),
|
)))),
|
||||||
|
loc: None,
|
||||||
})),
|
})),
|
||||||
Arc::new(RwLock::new(TopLevelDef::Function {
|
Arc::new(RwLock::new(TopLevelDef::Function {
|
||||||
name: "int64".into(),
|
name: "int64".into(),
|
||||||
simple_name: "int64".into(),
|
simple_name: "int64".into(),
|
||||||
signature: primitives.1.add_ty(TypeEnum::TFunc(RefCell::new(FunSignature {
|
signature: primitives.1.add_ty(TypeEnum::TFunc(FunSignature {
|
||||||
args: vec![FuncArg { name: "_".into(), ty: num_ty.0, default_value: None }],
|
args: vec![FuncArg { name: "n".into(), ty: num_ty.0, default_value: None }],
|
||||||
ret: int64,
|
ret: int64,
|
||||||
vars: var_map.clone(),
|
vars: var_map.clone(),
|
||||||
}))),
|
})),
|
||||||
var_id: Default::default(),
|
var_id: Default::default(),
|
||||||
instance_to_symbol: Default::default(),
|
instance_to_symbol: Default::default(),
|
||||||
instance_to_stmt: Default::default(),
|
instance_to_stmt: Default::default(),
|
||||||
@ -224,7 +233,7 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
|
|||||||
let boolean = ctx.primitives.bool;
|
let boolean = ctx.primitives.bool;
|
||||||
let arg_ty = fun.0.args[0].ty;
|
let arg_ty = fun.0.args[0].ty;
|
||||||
let arg = args[0].1.clone().to_basic_value_enum(ctx, generator);
|
let arg = args[0].1.clone().to_basic_value_enum(ctx, generator);
|
||||||
if ctx.unifier.unioned(arg_ty, boolean)
|
Ok(if ctx.unifier.unioned(arg_ty, boolean)
|
||||||
|| ctx.unifier.unioned(arg_ty, int32)
|
|| ctx.unifier.unioned(arg_ty, int32)
|
||||||
{
|
{
|
||||||
Some(
|
Some(
|
||||||
@ -250,18 +259,19 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
|
|||||||
Some(val)
|
Some(val)
|
||||||
} else {
|
} else {
|
||||||
unreachable!()
|
unreachable!()
|
||||||
}
|
})
|
||||||
},
|
},
|
||||||
)))),
|
)))),
|
||||||
|
loc: None,
|
||||||
})),
|
})),
|
||||||
Arc::new(RwLock::new(TopLevelDef::Function {
|
Arc::new(RwLock::new(TopLevelDef::Function {
|
||||||
name: "float".into(),
|
name: "float".into(),
|
||||||
simple_name: "float".into(),
|
simple_name: "float".into(),
|
||||||
signature: primitives.1.add_ty(TypeEnum::TFunc(RefCell::new(FunSignature {
|
signature: primitives.1.add_ty(TypeEnum::TFunc(FunSignature {
|
||||||
args: vec![FuncArg { name: "_".into(), ty: num_ty.0, default_value: None }],
|
args: vec![FuncArg { name: "n".into(), ty: num_ty.0, default_value: None }],
|
||||||
ret: float,
|
ret: float,
|
||||||
vars: var_map.clone(),
|
vars: var_map.clone(),
|
||||||
}))),
|
})),
|
||||||
var_id: Default::default(),
|
var_id: Default::default(),
|
||||||
instance_to_symbol: Default::default(),
|
instance_to_symbol: Default::default(),
|
||||||
instance_to_stmt: Default::default(),
|
instance_to_stmt: Default::default(),
|
||||||
@ -274,7 +284,7 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
|
|||||||
let float = ctx.primitives.float;
|
let float = ctx.primitives.float;
|
||||||
let arg_ty = fun.0.args[0].ty;
|
let arg_ty = fun.0.args[0].ty;
|
||||||
let arg = args[0].1.clone().to_basic_value_enum(ctx, generator);
|
let arg = args[0].1.clone().to_basic_value_enum(ctx, generator);
|
||||||
if ctx.unifier.unioned(arg_ty, boolean)
|
Ok(if ctx.unifier.unioned(arg_ty, boolean)
|
||||||
|| ctx.unifier.unioned(arg_ty, int32)
|
|| ctx.unifier.unioned(arg_ty, int32)
|
||||||
|| ctx.unifier.unioned(arg_ty, int64)
|
|| ctx.unifier.unioned(arg_ty, int64)
|
||||||
{
|
{
|
||||||
@ -288,18 +298,19 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
|
|||||||
Some(arg)
|
Some(arg)
|
||||||
} else {
|
} else {
|
||||||
unreachable!()
|
unreachable!()
|
||||||
}
|
})
|
||||||
},
|
},
|
||||||
)))),
|
)))),
|
||||||
|
loc: None,
|
||||||
})),
|
})),
|
||||||
Arc::new(RwLock::new(TopLevelDef::Function {
|
Arc::new(RwLock::new(TopLevelDef::Function {
|
||||||
name: "round".into(),
|
name: "round".into(),
|
||||||
simple_name: "round".into(),
|
simple_name: "round".into(),
|
||||||
signature: primitives.1.add_ty(TypeEnum::TFunc(RefCell::new(FunSignature {
|
signature: primitives.1.add_ty(TypeEnum::TFunc(FunSignature {
|
||||||
args: vec![FuncArg { name: "_".into(), ty: float, default_value: None }],
|
args: vec![FuncArg { name: "n".into(), ty: float, default_value: None }],
|
||||||
ret: int32,
|
ret: int32,
|
||||||
vars: Default::default(),
|
vars: Default::default(),
|
||||||
}))),
|
})),
|
||||||
var_id: Default::default(),
|
var_id: Default::default(),
|
||||||
instance_to_symbol: Default::default(),
|
instance_to_symbol: Default::default(),
|
||||||
instance_to_stmt: Default::default(),
|
instance_to_stmt: Default::default(),
|
||||||
@ -318,7 +329,7 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
|
|||||||
.try_as_basic_value()
|
.try_as_basic_value()
|
||||||
.left()
|
.left()
|
||||||
.unwrap();
|
.unwrap();
|
||||||
Some(
|
Ok(Some(
|
||||||
ctx.builder
|
ctx.builder
|
||||||
.build_float_to_signed_int(
|
.build_float_to_signed_int(
|
||||||
val.into_float_value(),
|
val.into_float_value(),
|
||||||
@ -326,17 +337,18 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
|
|||||||
"fptosi",
|
"fptosi",
|
||||||
)
|
)
|
||||||
.into(),
|
.into(),
|
||||||
)
|
))
|
||||||
})))),
|
})))),
|
||||||
|
loc: None,
|
||||||
})),
|
})),
|
||||||
Arc::new(RwLock::new(TopLevelDef::Function {
|
Arc::new(RwLock::new(TopLevelDef::Function {
|
||||||
name: "round64".into(),
|
name: "round64".into(),
|
||||||
simple_name: "round64".into(),
|
simple_name: "round64".into(),
|
||||||
signature: primitives.1.add_ty(TypeEnum::TFunc(RefCell::new(FunSignature {
|
signature: primitives.1.add_ty(TypeEnum::TFunc(FunSignature {
|
||||||
args: vec![FuncArg { name: "_".into(), ty: float, default_value: None }],
|
args: vec![FuncArg { name: "n".into(), ty: float, default_value: None }],
|
||||||
ret: int64,
|
ret: int64,
|
||||||
vars: Default::default(),
|
vars: Default::default(),
|
||||||
}))),
|
})),
|
||||||
var_id: Default::default(),
|
var_id: Default::default(),
|
||||||
instance_to_symbol: Default::default(),
|
instance_to_symbol: Default::default(),
|
||||||
instance_to_stmt: Default::default(),
|
instance_to_stmt: Default::default(),
|
||||||
@ -355,7 +367,7 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
|
|||||||
.try_as_basic_value()
|
.try_as_basic_value()
|
||||||
.left()
|
.left()
|
||||||
.unwrap();
|
.unwrap();
|
||||||
Some(
|
Ok(Some(
|
||||||
ctx.builder
|
ctx.builder
|
||||||
.build_float_to_signed_int(
|
.build_float_to_signed_int(
|
||||||
val.into_float_value(),
|
val.into_float_value(),
|
||||||
@ -363,13 +375,14 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
|
|||||||
"fptosi",
|
"fptosi",
|
||||||
)
|
)
|
||||||
.into(),
|
.into(),
|
||||||
)
|
))
|
||||||
})))),
|
})))),
|
||||||
|
loc: None,
|
||||||
})),
|
})),
|
||||||
Arc::new(RwLock::new(TopLevelDef::Function {
|
Arc::new(RwLock::new(TopLevelDef::Function {
|
||||||
name: "range".into(),
|
name: "range".into(),
|
||||||
simple_name: "range".into(),
|
simple_name: "range".into(),
|
||||||
signature: primitives.1.add_ty(TypeEnum::TFunc(RefCell::new(FunSignature {
|
signature: primitives.1.add_ty(TypeEnum::TFunc(FunSignature {
|
||||||
args: vec![
|
args: vec![
|
||||||
FuncArg { name: "start".into(), ty: int32, default_value: None },
|
FuncArg { name: "start".into(), ty: int32, default_value: None },
|
||||||
FuncArg {
|
FuncArg {
|
||||||
@ -386,7 +399,7 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
|
|||||||
],
|
],
|
||||||
ret: range,
|
ret: range,
|
||||||
vars: Default::default(),
|
vars: Default::default(),
|
||||||
}))),
|
})),
|
||||||
var_id: Default::default(),
|
var_id: Default::default(),
|
||||||
instance_to_symbol: Default::default(),
|
instance_to_symbol: Default::default(),
|
||||||
instance_to_stmt: Default::default(),
|
instance_to_stmt: Default::default(),
|
||||||
@ -438,33 +451,35 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
|
|||||||
ctx.builder.build_store(b, stop);
|
ctx.builder.build_store(b, stop);
|
||||||
ctx.builder.build_store(c, step);
|
ctx.builder.build_store(c, step);
|
||||||
}
|
}
|
||||||
Some(ptr.into())
|
Ok(Some(ptr.into()))
|
||||||
})))),
|
})))),
|
||||||
|
loc: None,
|
||||||
})),
|
})),
|
||||||
Arc::new(RwLock::new(TopLevelDef::Function {
|
Arc::new(RwLock::new(TopLevelDef::Function {
|
||||||
name: "str".into(),
|
name: "str".into(),
|
||||||
simple_name: "str".into(),
|
simple_name: "str".into(),
|
||||||
signature: primitives.1.add_ty(TypeEnum::TFunc(RefCell::new(FunSignature {
|
signature: primitives.1.add_ty(TypeEnum::TFunc(FunSignature {
|
||||||
args: vec![FuncArg { name: "_".into(), ty: string, default_value: None }],
|
args: vec![FuncArg { name: "s".into(), ty: string, default_value: None }],
|
||||||
ret: string,
|
ret: string,
|
||||||
vars: Default::default(),
|
vars: Default::default(),
|
||||||
}))),
|
})),
|
||||||
var_id: Default::default(),
|
var_id: Default::default(),
|
||||||
instance_to_symbol: Default::default(),
|
instance_to_symbol: Default::default(),
|
||||||
instance_to_stmt: Default::default(),
|
instance_to_stmt: Default::default(),
|
||||||
resolver: None,
|
resolver: None,
|
||||||
codegen_callback: Some(Arc::new(GenCall::new(Box::new(|ctx, _, _, args, generator| {
|
codegen_callback: Some(Arc::new(GenCall::new(Box::new(|ctx, _, _, args, generator| {
|
||||||
Some(args[0].1.clone().to_basic_value_enum(ctx, generator))
|
Ok(Some(args[0].1.clone().to_basic_value_enum(ctx, generator)))
|
||||||
})))),
|
})))),
|
||||||
|
loc: None,
|
||||||
})),
|
})),
|
||||||
Arc::new(RwLock::new(TopLevelDef::Function {
|
Arc::new(RwLock::new(TopLevelDef::Function {
|
||||||
name: "bool".into(),
|
name: "bool".into(),
|
||||||
simple_name: "bool".into(),
|
simple_name: "bool".into(),
|
||||||
signature: primitives.1.add_ty(TypeEnum::TFunc(RefCell::new(FunSignature {
|
signature: primitives.1.add_ty(TypeEnum::TFunc(FunSignature {
|
||||||
args: vec![FuncArg { name: "_".into(), ty: num_ty.0, default_value: None }],
|
args: vec![FuncArg { name: "n".into(), ty: num_ty.0, default_value: None }],
|
||||||
ret: primitives.0.bool,
|
ret: primitives.0.bool,
|
||||||
vars: var_map,
|
vars: var_map,
|
||||||
}))),
|
})),
|
||||||
var_id: Default::default(),
|
var_id: Default::default(),
|
||||||
instance_to_symbol: Default::default(),
|
instance_to_symbol: Default::default(),
|
||||||
instance_to_stmt: Default::default(),
|
instance_to_stmt: Default::default(),
|
||||||
@ -477,7 +492,7 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
|
|||||||
let boolean = ctx.primitives.bool;
|
let boolean = ctx.primitives.bool;
|
||||||
let arg_ty = fun.0.args[0].ty;
|
let arg_ty = fun.0.args[0].ty;
|
||||||
let arg = args[0].1.clone().to_basic_value_enum(ctx, generator);
|
let arg = args[0].1.clone().to_basic_value_enum(ctx, generator);
|
||||||
if ctx.unifier.unioned(arg_ty, boolean) {
|
Ok(if ctx.unifier.unioned(arg_ty, boolean) {
|
||||||
Some(arg)
|
Some(arg)
|
||||||
} else if ctx.unifier.unioned(arg_ty, int32) {
|
} else if ctx.unifier.unioned(arg_ty, int32) {
|
||||||
Some(ctx.builder.build_int_compare(
|
Some(ctx.builder.build_int_compare(
|
||||||
@ -505,18 +520,19 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
|
|||||||
Some(val)
|
Some(val)
|
||||||
} else {
|
} else {
|
||||||
unreachable!()
|
unreachable!()
|
||||||
}
|
})
|
||||||
},
|
},
|
||||||
)))),
|
)))),
|
||||||
|
loc: None,
|
||||||
})),
|
})),
|
||||||
Arc::new(RwLock::new(TopLevelDef::Function {
|
Arc::new(RwLock::new(TopLevelDef::Function {
|
||||||
name: "floor".into(),
|
name: "floor".into(),
|
||||||
simple_name: "floor".into(),
|
simple_name: "floor".into(),
|
||||||
signature: primitives.1.add_ty(TypeEnum::TFunc(RefCell::new(FunSignature {
|
signature: primitives.1.add_ty(TypeEnum::TFunc(FunSignature {
|
||||||
args: vec![FuncArg { name: "_".into(), ty: float, default_value: None }],
|
args: vec![FuncArg { name: "n".into(), ty: float, default_value: None }],
|
||||||
ret: int32,
|
ret: int32,
|
||||||
vars: Default::default(),
|
vars: Default::default(),
|
||||||
}))),
|
})),
|
||||||
var_id: Default::default(),
|
var_id: Default::default(),
|
||||||
instance_to_symbol: Default::default(),
|
instance_to_symbol: Default::default(),
|
||||||
instance_to_stmt: Default::default(),
|
instance_to_stmt: Default::default(),
|
||||||
@ -535,7 +551,7 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
|
|||||||
.try_as_basic_value()
|
.try_as_basic_value()
|
||||||
.left()
|
.left()
|
||||||
.unwrap();
|
.unwrap();
|
||||||
Some(
|
Ok(Some(
|
||||||
ctx.builder
|
ctx.builder
|
||||||
.build_float_to_signed_int(
|
.build_float_to_signed_int(
|
||||||
val.into_float_value(),
|
val.into_float_value(),
|
||||||
@ -543,17 +559,18 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
|
|||||||
"fptosi",
|
"fptosi",
|
||||||
)
|
)
|
||||||
.into(),
|
.into(),
|
||||||
)
|
))
|
||||||
})))),
|
})))),
|
||||||
|
loc: None,
|
||||||
})),
|
})),
|
||||||
Arc::new(RwLock::new(TopLevelDef::Function {
|
Arc::new(RwLock::new(TopLevelDef::Function {
|
||||||
name: "floor64".into(),
|
name: "floor64".into(),
|
||||||
simple_name: "floor64".into(),
|
simple_name: "floor64".into(),
|
||||||
signature: primitives.1.add_ty(TypeEnum::TFunc(RefCell::new(FunSignature {
|
signature: primitives.1.add_ty(TypeEnum::TFunc(FunSignature {
|
||||||
args: vec![FuncArg { name: "_".into(), ty: float, default_value: None }],
|
args: vec![FuncArg { name: "n".into(), ty: float, default_value: None }],
|
||||||
ret: int64,
|
ret: int64,
|
||||||
vars: Default::default(),
|
vars: Default::default(),
|
||||||
}))),
|
})),
|
||||||
var_id: Default::default(),
|
var_id: Default::default(),
|
||||||
instance_to_symbol: Default::default(),
|
instance_to_symbol: Default::default(),
|
||||||
instance_to_stmt: Default::default(),
|
instance_to_stmt: Default::default(),
|
||||||
@ -572,7 +589,7 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
|
|||||||
.try_as_basic_value()
|
.try_as_basic_value()
|
||||||
.left()
|
.left()
|
||||||
.unwrap();
|
.unwrap();
|
||||||
Some(
|
Ok(Some(
|
||||||
ctx.builder
|
ctx.builder
|
||||||
.build_float_to_signed_int(
|
.build_float_to_signed_int(
|
||||||
val.into_float_value(),
|
val.into_float_value(),
|
||||||
@ -580,17 +597,18 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
|
|||||||
"fptosi",
|
"fptosi",
|
||||||
)
|
)
|
||||||
.into(),
|
.into(),
|
||||||
)
|
))
|
||||||
})))),
|
})))),
|
||||||
|
loc: None,
|
||||||
})),
|
})),
|
||||||
Arc::new(RwLock::new(TopLevelDef::Function {
|
Arc::new(RwLock::new(TopLevelDef::Function {
|
||||||
name: "ceil".into(),
|
name: "ceil".into(),
|
||||||
simple_name: "ceil".into(),
|
simple_name: "ceil".into(),
|
||||||
signature: primitives.1.add_ty(TypeEnum::TFunc(RefCell::new(FunSignature {
|
signature: primitives.1.add_ty(TypeEnum::TFunc(FunSignature {
|
||||||
args: vec![FuncArg { name: "_".into(), ty: float, default_value: None }],
|
args: vec![FuncArg { name: "n".into(), ty: float, default_value: None }],
|
||||||
ret: int32,
|
ret: int32,
|
||||||
vars: Default::default(),
|
vars: Default::default(),
|
||||||
}))),
|
})),
|
||||||
var_id: Default::default(),
|
var_id: Default::default(),
|
||||||
instance_to_symbol: Default::default(),
|
instance_to_symbol: Default::default(),
|
||||||
instance_to_stmt: Default::default(),
|
instance_to_stmt: Default::default(),
|
||||||
@ -609,7 +627,7 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
|
|||||||
.try_as_basic_value()
|
.try_as_basic_value()
|
||||||
.left()
|
.left()
|
||||||
.unwrap();
|
.unwrap();
|
||||||
Some(
|
Ok(Some(
|
||||||
ctx.builder
|
ctx.builder
|
||||||
.build_float_to_signed_int(
|
.build_float_to_signed_int(
|
||||||
val.into_float_value(),
|
val.into_float_value(),
|
||||||
@ -617,17 +635,18 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
|
|||||||
"fptosi",
|
"fptosi",
|
||||||
)
|
)
|
||||||
.into(),
|
.into(),
|
||||||
)
|
))
|
||||||
})))),
|
})))),
|
||||||
|
loc: None,
|
||||||
})),
|
})),
|
||||||
Arc::new(RwLock::new(TopLevelDef::Function {
|
Arc::new(RwLock::new(TopLevelDef::Function {
|
||||||
name: "ceil64".into(),
|
name: "ceil64".into(),
|
||||||
simple_name: "ceil64".into(),
|
simple_name: "ceil64".into(),
|
||||||
signature: primitives.1.add_ty(TypeEnum::TFunc(RefCell::new(FunSignature {
|
signature: primitives.1.add_ty(TypeEnum::TFunc(FunSignature {
|
||||||
args: vec![FuncArg { name: "_".into(), ty: float, default_value: None }],
|
args: vec![FuncArg { name: "n".into(), ty: float, default_value: None }],
|
||||||
ret: int64,
|
ret: int64,
|
||||||
vars: Default::default(),
|
vars: Default::default(),
|
||||||
}))),
|
})),
|
||||||
var_id: Default::default(),
|
var_id: Default::default(),
|
||||||
instance_to_symbol: Default::default(),
|
instance_to_symbol: Default::default(),
|
||||||
instance_to_stmt: Default::default(),
|
instance_to_stmt: Default::default(),
|
||||||
@ -646,7 +665,7 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
|
|||||||
.try_as_basic_value()
|
.try_as_basic_value()
|
||||||
.left()
|
.left()
|
||||||
.unwrap();
|
.unwrap();
|
||||||
Some(
|
Ok(Some(
|
||||||
ctx.builder
|
ctx.builder
|
||||||
.build_float_to_signed_int(
|
.build_float_to_signed_int(
|
||||||
val.into_float_value(),
|
val.into_float_value(),
|
||||||
@ -654,25 +673,26 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
|
|||||||
"fptosi",
|
"fptosi",
|
||||||
)
|
)
|
||||||
.into(),
|
.into(),
|
||||||
)
|
))
|
||||||
})))),
|
})))),
|
||||||
|
loc: None,
|
||||||
})),
|
})),
|
||||||
Arc::new(RwLock::new({
|
Arc::new(RwLock::new({
|
||||||
let list_var = primitives.1.get_fresh_var();
|
let list_var = primitives.1.get_fresh_var(Some("L".into()), None);
|
||||||
let list = primitives.1.add_ty(TypeEnum::TList { ty: list_var.0 });
|
let list = primitives.1.add_ty(TypeEnum::TList { ty: list_var.0 });
|
||||||
let arg_ty = primitives.1.get_fresh_var_with_range(&[list, primitives.0.range]);
|
let arg_ty = primitives.1.get_fresh_var_with_range(&[list, primitives.0.range], Some("I".into()), None);
|
||||||
TopLevelDef::Function {
|
TopLevelDef::Function {
|
||||||
name: "len".into(),
|
name: "len".into(),
|
||||||
simple_name: "len".into(),
|
simple_name: "len".into(),
|
||||||
signature: primitives.1.add_ty(TypeEnum::TFunc(RefCell::new(FunSignature {
|
signature: primitives.1.add_ty(TypeEnum::TFunc(FunSignature {
|
||||||
args: vec![FuncArg {
|
args: vec![FuncArg {
|
||||||
name: "_".into(),
|
name: "ls".into(),
|
||||||
ty: arg_ty.0,
|
ty: arg_ty.0,
|
||||||
default_value: None
|
default_value: None
|
||||||
}],
|
}],
|
||||||
ret: int32,
|
ret: int32,
|
||||||
vars: vec![(list_var.1, list_var.0), (arg_ty.1, arg_ty.0)].into_iter().collect(),
|
vars: vec![(list_var.1, list_var.0), (arg_ty.1, arg_ty.0)].into_iter().collect(),
|
||||||
}))),
|
})),
|
||||||
var_id: vec![arg_ty.1],
|
var_id: vec![arg_ty.1],
|
||||||
instance_to_symbol: Default::default(),
|
instance_to_symbol: Default::default(),
|
||||||
instance_to_stmt: Default::default(),
|
instance_to_stmt: Default::default(),
|
||||||
@ -682,7 +702,7 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
|
|||||||
let range_ty = ctx.primitives.range;
|
let range_ty = ctx.primitives.range;
|
||||||
let arg_ty = fun.0.args[0].ty;
|
let arg_ty = fun.0.args[0].ty;
|
||||||
let arg = args[0].1.clone().to_basic_value_enum(ctx, generator);
|
let arg = args[0].1.clone().to_basic_value_enum(ctx, generator);
|
||||||
if ctx.unifier.unioned(arg_ty, range_ty) {
|
Ok(if ctx.unifier.unioned(arg_ty, range_ty) {
|
||||||
let arg = arg.into_pointer_value();
|
let arg = arg.into_pointer_value();
|
||||||
let (start, end, step) = destructure_range(ctx, arg);
|
let (start, end, step) = destructure_range(ctx, arg);
|
||||||
Some(calculate_len_for_slice_range(ctx, start, end, step).into())
|
Some(calculate_len_for_slice_range(ctx, start, end, step).into())
|
||||||
@ -695,9 +715,10 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
|
|||||||
} else {
|
} else {
|
||||||
Some(len.into())
|
Some(len.into())
|
||||||
}
|
}
|
||||||
}
|
})
|
||||||
},
|
},
|
||||||
)))),
|
)))),
|
||||||
|
loc: None,
|
||||||
}
|
}
|
||||||
}))
|
}))
|
||||||
];
|
];
|
||||||
|
@ -1,5 +1,3 @@
|
|||||||
use std::cell::RefCell;
|
|
||||||
|
|
||||||
use nac3parser::ast::fold::Fold;
|
use nac3parser::ast::fold::Fold;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
@ -102,7 +100,7 @@ impl TopLevelComposer {
|
|||||||
}
|
}
|
||||||
|
|
||||||
for (name, sig, codegen_callback) in builtins {
|
for (name, sig, codegen_callback) in builtins {
|
||||||
let fun_sig = unifier.add_ty(TypeEnum::TFunc(RefCell::new(sig)));
|
let fun_sig = unifier.add_ty(TypeEnum::TFunc(sig));
|
||||||
builtin_ty.insert(name, fun_sig);
|
builtin_ty.insert(name, fun_sig);
|
||||||
builtin_id.insert(name, DefinitionId(definition_ast_list.len()));
|
builtin_id.insert(name, DefinitionId(definition_ast_list.len()));
|
||||||
definition_ast_list.push((
|
definition_ast_list.push((
|
||||||
@ -115,6 +113,7 @@ impl TopLevelComposer {
|
|||||||
var_id: Default::default(),
|
var_id: Default::default(),
|
||||||
resolver: None,
|
resolver: None,
|
||||||
codegen_callback: Some(codegen_callback),
|
codegen_callback: Some(codegen_callback),
|
||||||
|
loc: None,
|
||||||
})),
|
})),
|
||||||
None,
|
None,
|
||||||
));
|
));
|
||||||
@ -192,13 +191,14 @@ impl TopLevelComposer {
|
|||||||
|
|
||||||
// since later when registering class method, ast will still be used,
|
// since later when registering class method, ast will still be used,
|
||||||
// here push None temporarily, later will move the ast inside
|
// here push None temporarily, later will move the ast inside
|
||||||
let constructor_ty = self.unifier.get_fresh_var().0;
|
let constructor_ty = self.unifier.get_dummy_var().0;
|
||||||
let mut class_def_ast = (
|
let mut class_def_ast = (
|
||||||
Arc::new(RwLock::new(Self::make_top_level_class_def(
|
Arc::new(RwLock::new(Self::make_top_level_class_def(
|
||||||
class_def_id,
|
class_def_id,
|
||||||
resolver.clone(),
|
resolver.clone(),
|
||||||
fully_qualified_class_name,
|
fully_qualified_class_name,
|
||||||
Some(constructor_ty),
|
Some(constructor_ty),
|
||||||
|
Some(ast.location)
|
||||||
))),
|
))),
|
||||||
None,
|
None,
|
||||||
);
|
);
|
||||||
@ -256,19 +256,20 @@ impl TopLevelComposer {
|
|||||||
};
|
};
|
||||||
|
|
||||||
// dummy method define here
|
// dummy method define here
|
||||||
let dummy_method_type = self.unifier.get_fresh_var();
|
let dummy_method_type = self.unifier.get_dummy_var().0;
|
||||||
class_method_name_def_ids.push((
|
class_method_name_def_ids.push((
|
||||||
*method_name,
|
*method_name,
|
||||||
RwLock::new(Self::make_top_level_function_def(
|
RwLock::new(Self::make_top_level_function_def(
|
||||||
global_class_method_name,
|
global_class_method_name,
|
||||||
*method_name,
|
*method_name,
|
||||||
// later unify with parsed type
|
// later unify with parsed type
|
||||||
dummy_method_type.0,
|
dummy_method_type,
|
||||||
resolver.clone(),
|
resolver.clone(),
|
||||||
|
Some(b.location),
|
||||||
))
|
))
|
||||||
.into(),
|
.into(),
|
||||||
DefinitionId(method_def_id),
|
DefinitionId(method_def_id),
|
||||||
dummy_method_type.0,
|
dummy_method_type,
|
||||||
b.clone(),
|
b.clone(),
|
||||||
));
|
));
|
||||||
} else {
|
} else {
|
||||||
@ -300,9 +301,6 @@ impl TopLevelComposer {
|
|||||||
}
|
}
|
||||||
|
|
||||||
ast::StmtKind::FunctionDef { name, .. } => {
|
ast::StmtKind::FunctionDef { name, .. } => {
|
||||||
// if self.keyword_list.contains(name) {
|
|
||||||
// return Err("cannot use keyword as a top level function name".into());
|
|
||||||
// }
|
|
||||||
let global_fun_name = if mod_path.is_empty() {
|
let global_fun_name = if mod_path.is_empty() {
|
||||||
name.to_string()
|
name.to_string()
|
||||||
} else {
|
} else {
|
||||||
@ -317,7 +315,7 @@ impl TopLevelComposer {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let fun_name = *name;
|
let fun_name = *name;
|
||||||
let ty_to_be_unified = self.unifier.get_fresh_var().0;
|
let ty_to_be_unified = self.unifier.get_dummy_var().0;
|
||||||
// add to the definition list
|
// add to the definition list
|
||||||
self.definition_ast_list.push((
|
self.definition_ast_list.push((
|
||||||
RwLock::new(Self::make_top_level_function_def(
|
RwLock::new(Self::make_top_level_function_def(
|
||||||
@ -326,6 +324,7 @@ impl TopLevelComposer {
|
|||||||
// dummy here, unify with correct type later
|
// dummy here, unify with correct type later
|
||||||
ty_to_be_unified,
|
ty_to_be_unified,
|
||||||
resolver,
|
resolver,
|
||||||
|
Some(ast.location)
|
||||||
))
|
))
|
||||||
.into(),
|
.into(),
|
||||||
Some(ast),
|
Some(ast),
|
||||||
@ -364,8 +363,7 @@ impl TopLevelComposer {
|
|||||||
let unifier = self.unifier.borrow_mut();
|
let unifier = self.unifier.borrow_mut();
|
||||||
let primitives_store = &self.primitives_ty;
|
let primitives_store = &self.primitives_ty;
|
||||||
|
|
||||||
// skip 5 to skip analyzing the primitives
|
let mut analyze = |class_def: &Arc<RwLock<TopLevelDef>>, class_ast: &Option<Stmt>| {
|
||||||
for (class_def, class_ast) in def_list.iter().skip(self.builtin_num) {
|
|
||||||
// only deal with class def here
|
// only deal with class def here
|
||||||
let mut class_def = class_def.write();
|
let mut class_def = class_def.write();
|
||||||
let (class_bases_ast, class_def_type_vars, class_resolver) = {
|
let (class_bases_ast, class_def_type_vars, class_resolver) = {
|
||||||
@ -379,7 +377,7 @@ impl TopLevelComposer {
|
|||||||
unreachable!("must be both class")
|
unreachable!("must be both class")
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
continue;
|
return Ok(())
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
let class_resolver = class_resolver.as_ref().unwrap();
|
let class_resolver = class_resolver.as_ref().unwrap();
|
||||||
@ -459,6 +457,16 @@ impl TopLevelComposer {
|
|||||||
_ => continue,
|
_ => continue,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Ok(())
|
||||||
|
};
|
||||||
|
let mut errors = HashSet::new();
|
||||||
|
for (class_def, class_ast) in def_list.iter().skip(self.builtin_num) {
|
||||||
|
if let Err(e) = analyze(class_def, class_ast) {
|
||||||
|
errors.insert(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !errors.is_empty() {
|
||||||
|
return Err(errors.iter().join("\n----------\n"));
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
@ -474,9 +482,9 @@ impl TopLevelComposer {
|
|||||||
|
|
||||||
let temp_def_list = self.extract_def_list();
|
let temp_def_list = self.extract_def_list();
|
||||||
let unifier = self.unifier.borrow_mut();
|
let unifier = self.unifier.borrow_mut();
|
||||||
|
let primitive_types = self.primitives_ty;
|
||||||
|
|
||||||
// first, only push direct parent into the list
|
let mut get_direct_parents = |class_def: &Arc<RwLock<TopLevelDef>>, class_ast: &Option<Stmt>| {
|
||||||
for (class_def, class_ast) in self.definition_ast_list.iter_mut().skip(self.builtin_num) {
|
|
||||||
let mut class_def = class_def.write();
|
let mut class_def = class_def.write();
|
||||||
let (class_def_id, class_bases, class_ancestors, class_resolver, class_type_vars) = {
|
let (class_def_id, class_bases, class_ancestors, class_resolver, class_type_vars) = {
|
||||||
if let TopLevelDef::Class { ancestors, resolver, object_id, type_vars, .. } =
|
if let TopLevelDef::Class { ancestors, resolver, object_id, type_vars, .. } =
|
||||||
@ -491,7 +499,7 @@ impl TopLevelComposer {
|
|||||||
unreachable!("must be both class")
|
unreachable!("must be both class")
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
continue;
|
return Ok(());
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
let class_resolver = class_resolver.as_ref().unwrap();
|
let class_resolver = class_resolver.as_ref().unwrap();
|
||||||
@ -526,7 +534,7 @@ impl TopLevelComposer {
|
|||||||
class_resolver,
|
class_resolver,
|
||||||
&temp_def_list,
|
&temp_def_list,
|
||||||
unifier,
|
unifier,
|
||||||
&self.primitives_ty,
|
&primitive_types,
|
||||||
b,
|
b,
|
||||||
vec![(*class_def_id, class_type_vars.clone())].into_iter().collect(),
|
vec![(*class_def_id, class_type_vars.clone())].into_iter().collect(),
|
||||||
)?;
|
)?;
|
||||||
@ -540,17 +548,29 @@ impl TopLevelComposer {
|
|||||||
));
|
));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Ok(())
|
||||||
|
};
|
||||||
|
|
||||||
|
// first, only push direct parent into the list
|
||||||
|
let mut errors = HashSet::new();
|
||||||
|
for (class_def, class_ast) in self.definition_ast_list.iter_mut().skip(self.builtin_num) {
|
||||||
|
if let Err(e) = get_direct_parents(class_def, class_ast) {
|
||||||
|
errors.insert(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !errors.is_empty() {
|
||||||
|
return Err(errors.iter().join("\n----------\n"));
|
||||||
}
|
}
|
||||||
|
|
||||||
// second, get all ancestors
|
// second, get all ancestors
|
||||||
let mut ancestors_store: HashMap<DefinitionId, Vec<TypeAnnotation>> = Default::default();
|
let mut ancestors_store: HashMap<DefinitionId, Vec<TypeAnnotation>> = Default::default();
|
||||||
for (class_def, _) in self.definition_ast_list.iter().skip(self.builtin_num) {
|
let mut get_all_ancestors = |class_def: &Arc<RwLock<TopLevelDef>>| {
|
||||||
let class_def = class_def.read();
|
let class_def = class_def.read();
|
||||||
let (class_ancestors, class_id) = {
|
let (class_ancestors, class_id) = {
|
||||||
if let TopLevelDef::Class { ancestors, object_id, .. } = class_def.deref() {
|
if let TopLevelDef::Class { ancestors, object_id, .. } = class_def.deref() {
|
||||||
(ancestors, *object_id)
|
(ancestors, *object_id)
|
||||||
} else {
|
} else {
|
||||||
continue;
|
return Ok(())
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
ancestors_store.insert(
|
ancestors_store.insert(
|
||||||
@ -562,6 +582,15 @@ impl TopLevelComposer {
|
|||||||
Self::get_all_ancestors_helper(&class_ancestors[0], temp_def_list.as_slice())?
|
Self::get_all_ancestors_helper(&class_ancestors[0], temp_def_list.as_slice())?
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
Ok(())
|
||||||
|
};
|
||||||
|
for (class_def, _) in self.definition_ast_list.iter().skip(self.builtin_num) {
|
||||||
|
if let Err(e) = get_all_ancestors(class_def) {
|
||||||
|
errors.insert(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !errors.is_empty() {
|
||||||
|
return Err(errors.iter().join("\n----------\n"));
|
||||||
}
|
}
|
||||||
|
|
||||||
// insert the ancestors to the def list
|
// insert the ancestors to the def list
|
||||||
@ -619,9 +648,10 @@ impl TopLevelComposer {
|
|||||||
|
|
||||||
let mut type_var_to_concrete_def: HashMap<Type, TypeAnnotation> = HashMap::new();
|
let mut type_var_to_concrete_def: HashMap<Type, TypeAnnotation> = HashMap::new();
|
||||||
|
|
||||||
|
let mut errors = HashSet::new();
|
||||||
for (class_def, class_ast) in def_ast_list.iter().skip(self.builtin_num) {
|
for (class_def, class_ast) in def_ast_list.iter().skip(self.builtin_num) {
|
||||||
if matches!(&*class_def.read(), TopLevelDef::Class { .. }) {
|
if matches!(&*class_def.read(), TopLevelDef::Class { .. }) {
|
||||||
Self::analyze_single_class_methods_fields(
|
if let Err(e) = Self::analyze_single_class_methods_fields(
|
||||||
class_def.clone(),
|
class_def.clone(),
|
||||||
&class_ast.as_ref().unwrap().node,
|
&class_ast.as_ref().unwrap().node,
|
||||||
&temp_def_list,
|
&temp_def_list,
|
||||||
@ -629,11 +659,19 @@ impl TopLevelComposer {
|
|||||||
primitives,
|
primitives,
|
||||||
&mut type_var_to_concrete_def,
|
&mut type_var_to_concrete_def,
|
||||||
(&self.keyword_list, &self.core_config)
|
(&self.keyword_list, &self.core_config)
|
||||||
)?
|
) {
|
||||||
|
errors.insert(e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
if !errors.is_empty() {
|
||||||
|
return Err(errors.iter().join("\n----------\n"));
|
||||||
|
}
|
||||||
|
|
||||||
// handle the inheritanced methods and fields
|
// handle the inheritanced methods and fields
|
||||||
|
// Note: we cannot defer error handling til the end of the loop, because there is loop
|
||||||
|
// carried dependency, ignoring the error (temporarily) will cause all assumptions to break
|
||||||
|
// and produce weird error messages
|
||||||
let mut current_ancestor_depth: usize = 2;
|
let mut current_ancestor_depth: usize = 2;
|
||||||
loop {
|
loop {
|
||||||
let mut finished = true;
|
let mut finished = true;
|
||||||
@ -668,10 +706,19 @@ impl TopLevelComposer {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// unification of previously assigned typevar
|
// unification of previously assigned typevar
|
||||||
for (ty, def) in type_var_to_concrete_def {
|
let mut unification_helper = |ty, def| {
|
||||||
let target_ty =
|
let target_ty =
|
||||||
get_type_from_type_annotation_kinds(&temp_def_list, unifier, primitives, &def)?;
|
get_type_from_type_annotation_kinds(&temp_def_list, unifier, primitives, &def)?;
|
||||||
unifier.unify(ty, target_ty)?;
|
unifier.unify(ty, target_ty).map_err(|e| e.to_display(unifier).to_string())?;
|
||||||
|
Ok(()) as Result<(), String>
|
||||||
|
};
|
||||||
|
for (ty, def) in type_var_to_concrete_def {
|
||||||
|
if let Err(e) = unification_helper(ty, def) {
|
||||||
|
errors.insert(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !errors.is_empty() {
|
||||||
|
return Err(errors.iter().join("\n----------\n"));
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
@ -685,15 +732,15 @@ impl TopLevelComposer {
|
|||||||
let unifier = self.unifier.borrow_mut();
|
let unifier = self.unifier.borrow_mut();
|
||||||
let primitives_store = &self.primitives_ty;
|
let primitives_store = &self.primitives_ty;
|
||||||
|
|
||||||
// skip 5 to skip analyzing the primitives
|
let mut errors = HashSet::new();
|
||||||
for (function_def, function_ast) in def_list.iter().skip(self.builtin_num) {
|
let mut analyze = |function_def: &Arc<RwLock<TopLevelDef>>, function_ast: &Option<Stmt>| {
|
||||||
let mut function_def = function_def.write();
|
let mut function_def = function_def.write();
|
||||||
let function_def = function_def.deref_mut();
|
let function_def = function_def.deref_mut();
|
||||||
let function_ast = if let Some(x) = function_ast.as_ref() {
|
let function_ast = if let Some(x) = function_ast.as_ref() {
|
||||||
x
|
x
|
||||||
} else {
|
} else {
|
||||||
// if let TopLevelDef::Function { name, .. } = ``
|
// if let TopLevelDef::Function { name, .. } = ``
|
||||||
continue;
|
return Ok(())
|
||||||
};
|
};
|
||||||
|
|
||||||
if let TopLevelDef::Function { signature: dummy_ty, resolver, var_id, .. } =
|
if let TopLevelDef::Function { signature: dummy_ty, resolver, var_id, .. } =
|
||||||
@ -701,7 +748,7 @@ impl TopLevelComposer {
|
|||||||
{
|
{
|
||||||
if matches!(unifier.get_ty(*dummy_ty).as_ref(), TypeEnum::TFunc(_)) {
|
if matches!(unifier.get_ty(*dummy_ty).as_ref(), TypeEnum::TFunc(_)) {
|
||||||
// already have a function type, is class method, skip
|
// already have a function type, is class method, skip
|
||||||
continue;
|
return Ok(());
|
||||||
}
|
}
|
||||||
if let ast::StmtKind::FunctionDef { args, returns, .. } = &function_ast.node {
|
if let ast::StmtKind::FunctionDef { args, returns, .. } = &function_ast.node {
|
||||||
let resolver = resolver.as_ref();
|
let resolver = resolver.as_ref();
|
||||||
@ -854,7 +901,7 @@ impl TopLevelComposer {
|
|||||||
var_id.extend_from_slice(function_var_map
|
var_id.extend_from_slice(function_var_map
|
||||||
.iter()
|
.iter()
|
||||||
.filter_map(|(id, ty)| {
|
.filter_map(|(id, ty)| {
|
||||||
if matches!(&*unifier.get_ty(*ty), TypeEnum::TVar { range, .. } if range.borrow().is_empty()) {
|
if matches!(&*unifier.get_ty(*ty), TypeEnum::TVar { range, .. } if range.is_empty()) {
|
||||||
None
|
None
|
||||||
} else {
|
} else {
|
||||||
Some(*id)
|
Some(*id)
|
||||||
@ -865,18 +912,26 @@ impl TopLevelComposer {
|
|||||||
);
|
);
|
||||||
let function_ty = unifier.add_ty(TypeEnum::TFunc(
|
let function_ty = unifier.add_ty(TypeEnum::TFunc(
|
||||||
FunSignature { args: arg_types, ret: return_ty, vars: function_var_map }
|
FunSignature { args: arg_types, ret: return_ty, vars: function_var_map }
|
||||||
.into(),
|
|
||||||
));
|
));
|
||||||
unifier
|
unifier
|
||||||
.unify(*dummy_ty, function_ty)
|
.unify(*dummy_ty, function_ty)
|
||||||
.map_err(|old| format!("{} (at {})", old, function_ast.location))?;
|
.map_err(|e| e.at(Some(function_ast.location)).to_display(unifier).to_string())?;
|
||||||
} else {
|
} else {
|
||||||
unreachable!("must be both function");
|
unreachable!("must be both function");
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// not top level function def, skip
|
// not top level function def, skip
|
||||||
continue;
|
return Ok(())
|
||||||
}
|
}
|
||||||
|
Ok(())
|
||||||
|
};
|
||||||
|
for (function_def, function_ast) in def_list.iter().skip(self.builtin_num) {
|
||||||
|
if let Err(e) = analyze(function_def, function_ast) {
|
||||||
|
errors.insert(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !errors.is_empty() {
|
||||||
|
return Err(errors.iter().join("\n----------\n"))
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
@ -1022,7 +1077,7 @@ impl TopLevelComposer {
|
|||||||
// finish handling type vars
|
// finish handling type vars
|
||||||
let dummy_func_arg = FuncArg {
|
let dummy_func_arg = FuncArg {
|
||||||
name,
|
name,
|
||||||
ty: unifier.get_fresh_var().0,
|
ty: unifier.get_dummy_var().0,
|
||||||
default_value: match default {
|
default_value: match default {
|
||||||
None => None,
|
None => None,
|
||||||
Some(default) => {
|
Some(default) => {
|
||||||
@ -1074,13 +1129,13 @@ impl TopLevelComposer {
|
|||||||
unreachable!("must be type var annotation");
|
unreachable!("must be type var annotation");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let dummy_return_type = unifier.get_fresh_var().0;
|
let dummy_return_type = unifier.get_dummy_var().0;
|
||||||
type_var_to_concrete_def.insert(dummy_return_type, annotation.clone());
|
type_var_to_concrete_def.insert(dummy_return_type, annotation.clone());
|
||||||
dummy_return_type
|
dummy_return_type
|
||||||
} else {
|
} else {
|
||||||
// if do not have return annotation, return none
|
// if do not have return annotation, return none
|
||||||
// for uniform handling, still use type annoatation
|
// for uniform handling, still use type annoatation
|
||||||
let dummy_return_type = unifier.get_fresh_var().0;
|
let dummy_return_type = unifier.get_dummy_var().0;
|
||||||
type_var_to_concrete_def.insert(
|
type_var_to_concrete_def.insert(
|
||||||
dummy_return_type,
|
dummy_return_type,
|
||||||
TypeAnnotation::Primitive(primitives.none),
|
TypeAnnotation::Primitive(primitives.none),
|
||||||
@ -1095,7 +1150,7 @@ impl TopLevelComposer {
|
|||||||
var_id.extend_from_slice(method_var_map
|
var_id.extend_from_slice(method_var_map
|
||||||
.iter()
|
.iter()
|
||||||
.filter_map(|(id, ty)| {
|
.filter_map(|(id, ty)| {
|
||||||
if matches!(&*unifier.get_ty(*ty), TypeEnum::TVar { range, .. } if range.borrow().is_empty()) {
|
if matches!(&*unifier.get_ty(*ty), TypeEnum::TVar { range, .. } if range.is_empty()) {
|
||||||
None
|
None
|
||||||
} else {
|
} else {
|
||||||
Some(*id)
|
Some(*id)
|
||||||
@ -1114,12 +1169,12 @@ impl TopLevelComposer {
|
|||||||
|
|
||||||
// unify now since function type is not in type annotation define
|
// unify now since function type is not in type annotation define
|
||||||
// which should be fine since type within method_type will be subst later
|
// which should be fine since type within method_type will be subst later
|
||||||
unifier.unify(method_dummy_ty, method_type)?;
|
unifier.unify(method_dummy_ty, method_type).map_err(|e| e.to_display(unifier).to_string())?;
|
||||||
}
|
}
|
||||||
ast::StmtKind::AnnAssign { target, annotation, value: None, .. } => {
|
ast::StmtKind::AnnAssign { target, annotation, value: None, .. } => {
|
||||||
if let ast::ExprKind::Name { id: attr, .. } = &target.node {
|
if let ast::ExprKind::Name { id: attr, .. } = &target.node {
|
||||||
if defined_fields.insert(attr.to_string()) {
|
if defined_fields.insert(attr.to_string()) {
|
||||||
let dummy_field_type = unifier.get_fresh_var().0;
|
let dummy_field_type = unifier.get_dummy_var().0;
|
||||||
|
|
||||||
// handle Kernel[T], KernelInvariant[T]
|
// handle Kernel[T], KernelInvariant[T]
|
||||||
let (annotation, mutable) = match &annotation.node {
|
let (annotation, mutable) = match &annotation.node {
|
||||||
@ -1314,7 +1369,12 @@ impl TopLevelComposer {
|
|||||||
let init_str_id = "__init__".into();
|
let init_str_id = "__init__".into();
|
||||||
let mut definition_extension = Vec::new();
|
let mut definition_extension = Vec::new();
|
||||||
let mut constructors = Vec::new();
|
let mut constructors = Vec::new();
|
||||||
for (i, (def, ast)) in self.definition_ast_list.iter().enumerate().skip(self.builtin_num) {
|
let def_list = self.extract_def_list();
|
||||||
|
let primitives_ty = &self.primitives_ty;
|
||||||
|
let definition_ast_list = &self.definition_ast_list;
|
||||||
|
let unifier = &mut self.unifier;
|
||||||
|
let mut errors = HashSet::new();
|
||||||
|
let mut analyze = |i, def: &Arc<RwLock<TopLevelDef>>, ast: &Option<Stmt>| {
|
||||||
let class_def = def.read();
|
let class_def = def.read();
|
||||||
if let TopLevelDef::Class {
|
if let TopLevelDef::Class {
|
||||||
constructor,
|
constructor,
|
||||||
@ -1329,16 +1389,16 @@ impl TopLevelComposer {
|
|||||||
} = &*class_def
|
} = &*class_def
|
||||||
{
|
{
|
||||||
let self_type = get_type_from_type_annotation_kinds(
|
let self_type = get_type_from_type_annotation_kinds(
|
||||||
self.extract_def_list().as_slice(),
|
&def_list,
|
||||||
&mut self.unifier,
|
unifier,
|
||||||
&self.primitives_ty,
|
primitives_ty,
|
||||||
&make_self_type_annotation(type_vars, *object_id),
|
&make_self_type_annotation(type_vars, *object_id),
|
||||||
)?;
|
)?;
|
||||||
if ancestors.iter().any(|ann| matches!(ann, TypeAnnotation::CustomClass { id, .. } if id.0 == 7)) {
|
if ancestors.iter().any(|ann| matches!(ann, TypeAnnotation::CustomClass { id, .. } if id.0 == 7)) {
|
||||||
// create constructor for these classes
|
// create constructor for these classes
|
||||||
let string = self.primitives_ty.str;
|
let string = primitives_ty.str;
|
||||||
let int64 = self.primitives_ty.int64;
|
let int64 = primitives_ty.int64;
|
||||||
let signature = self.unifier.add_ty(TypeEnum::TFunc(RefCell::new(FunSignature {
|
let signature = unifier.add_ty(TypeEnum::TFunc(FunSignature {
|
||||||
args: vec![
|
args: vec![
|
||||||
FuncArg { name: "msg".into(), ty: string,
|
FuncArg { name: "msg".into(), ty: string,
|
||||||
default_value: Some(SymbolValue::Str("".into()))},
|
default_value: Some(SymbolValue::Str("".into()))},
|
||||||
@ -1351,7 +1411,7 @@ impl TopLevelComposer {
|
|||||||
],
|
],
|
||||||
ret: self_type,
|
ret: self_type,
|
||||||
vars: Default::default()
|
vars: Default::default()
|
||||||
})));
|
}));
|
||||||
let cons_fun = TopLevelDef::Function {
|
let cons_fun = TopLevelDef::Function {
|
||||||
name: format!("{}.{}", class_name, "__init__"),
|
name: format!("{}.{}", class_name, "__init__"),
|
||||||
simple_name: init_str_id,
|
simple_name: init_str_id,
|
||||||
@ -1360,14 +1420,16 @@ impl TopLevelComposer {
|
|||||||
instance_to_symbol: Default::default(),
|
instance_to_symbol: Default::default(),
|
||||||
instance_to_stmt: Default::default(),
|
instance_to_stmt: Default::default(),
|
||||||
resolver: None,
|
resolver: None,
|
||||||
codegen_callback: Some(Arc::new(GenCall::new(Box::new(exn_constructor))))
|
codegen_callback: Some(Arc::new(GenCall::new(Box::new(exn_constructor)))),
|
||||||
|
loc: None
|
||||||
};
|
};
|
||||||
constructors.push((i, signature, definition_extension.len()));
|
constructors.push((i, signature, definition_extension.len()));
|
||||||
definition_extension.push((Arc::new(RwLock::new(cons_fun)), None));
|
definition_extension.push((Arc::new(RwLock::new(cons_fun)), None));
|
||||||
self.unifier
|
unifier
|
||||||
.unify(constructor.unwrap(), signature)
|
.unify(constructor.unwrap(), signature)
|
||||||
.map_err(|old| format!("{} (at {})", old, ast.as_ref().unwrap().location))?;
|
.map_err(|e| e.at(Some(ast.as_ref().unwrap().location))
|
||||||
continue;
|
.to_display(unifier).to_string())?;
|
||||||
|
return Ok(());
|
||||||
}
|
}
|
||||||
let mut init_id: Option<DefinitionId> = None;
|
let mut init_id: Option<DefinitionId> = None;
|
||||||
// get the class contructor type correct
|
// get the class contructor type correct
|
||||||
@ -1377,8 +1439,7 @@ impl TopLevelComposer {
|
|||||||
for (name, func_sig, id) in methods {
|
for (name, func_sig, id) in methods {
|
||||||
if *name == init_str_id {
|
if *name == init_str_id {
|
||||||
init_id = Some(*id);
|
init_id = Some(*id);
|
||||||
if let TypeEnum::TFunc(sig) = self.unifier.get_ty(*func_sig).as_ref() {
|
if let TypeEnum::TFunc(FunSignature { args, vars, ..}) = unifier.get_ty(*func_sig).as_ref() {
|
||||||
let FunSignature { args, vars, .. } = &*sig.borrow();
|
|
||||||
constructor_args.extend_from_slice(args);
|
constructor_args.extend_from_slice(args);
|
||||||
type_vars.extend(vars);
|
type_vars.extend(vars);
|
||||||
} else {
|
} else {
|
||||||
@ -1388,18 +1449,17 @@ impl TopLevelComposer {
|
|||||||
}
|
}
|
||||||
(constructor_args, type_vars)
|
(constructor_args, type_vars)
|
||||||
};
|
};
|
||||||
let contor_type = self.unifier.add_ty(TypeEnum::TFunc(
|
let contor_type = unifier.add_ty(TypeEnum::TFunc(
|
||||||
FunSignature { args: contor_args, ret: self_type, vars: contor_type_vars }
|
FunSignature { args: contor_args, ret: self_type, vars: contor_type_vars }
|
||||||
.into(),
|
|
||||||
));
|
));
|
||||||
self.unifier
|
unifier
|
||||||
.unify(constructor.unwrap(), contor_type)
|
.unify(constructor.unwrap(), contor_type)
|
||||||
.map_err(|old| format!("{} (at {})", old, ast.as_ref().unwrap().location))?;
|
.map_err(|e| e.at(Some(ast.as_ref().unwrap().location)).to_display(&unifier).to_string())?;
|
||||||
|
|
||||||
// class field instantiation check
|
// class field instantiation check
|
||||||
if let (Some(init_id), false) = (init_id, fields.is_empty()) {
|
if let (Some(init_id), false) = (init_id, fields.is_empty()) {
|
||||||
let init_ast =
|
let init_ast =
|
||||||
self.definition_ast_list.get(init_id.0).unwrap().1.as_ref().unwrap();
|
definition_ast_list.get(init_id.0).unwrap().1.as_ref().unwrap();
|
||||||
if let ast::StmtKind::FunctionDef { name, body, .. } = &init_ast.node {
|
if let ast::StmtKind::FunctionDef { name, body, .. } = &init_ast.node {
|
||||||
if *name != init_str_id {
|
if *name != init_str_id {
|
||||||
unreachable!("must be init function here")
|
unreachable!("must be init function here")
|
||||||
@ -1418,7 +1478,17 @@ impl TopLevelComposer {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Ok(())
|
||||||
|
};
|
||||||
|
for (i, (def, ast)) in definition_ast_list.iter().enumerate().skip(self.builtin_num) {
|
||||||
|
if let Err(e) = analyze(i, def, ast) {
|
||||||
|
errors.insert(e);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
if !errors.is_empty() {
|
||||||
|
return Err(errors.iter().join("\n---------\n"));
|
||||||
|
}
|
||||||
|
|
||||||
for (i, signature, id) in constructors.into_iter() {
|
for (i, signature, id) in constructors.into_iter() {
|
||||||
if let TopLevelDef::Class { methods, .. } = &mut *self.definition_ast_list[i].0.write() {
|
if let TopLevelDef::Class { methods, .. } = &mut *self.definition_ast_list[i].0.write() {
|
||||||
methods.push((init_str_id, signature,
|
methods.push((init_str_id, signature,
|
||||||
@ -1431,10 +1501,14 @@ impl TopLevelComposer {
|
|||||||
|
|
||||||
let ctx = Arc::new(self.make_top_level_context());
|
let ctx = Arc::new(self.make_top_level_context());
|
||||||
// type inference inside function body
|
// type inference inside function body
|
||||||
for (id, (def, ast)) in self.definition_ast_list.iter().enumerate().skip(self.builtin_num)
|
let def_list = self.extract_def_list();
|
||||||
{
|
let primitives_ty = &self.primitives_ty;
|
||||||
|
let definition_ast_list = &self.definition_ast_list;
|
||||||
|
let unifier = &mut self.unifier;
|
||||||
|
let method_class = &mut self.method_class;
|
||||||
|
let mut analyze_2 = |id, def: &Arc<RwLock<TopLevelDef>>, ast: &Option<Stmt>| {
|
||||||
if ast.is_none() {
|
if ast.is_none() {
|
||||||
continue;
|
return Ok(())
|
||||||
}
|
}
|
||||||
let mut function_def = def.write();
|
let mut function_def = def.write();
|
||||||
if let TopLevelDef::Function {
|
if let TopLevelDef::Function {
|
||||||
@ -1448,19 +1522,18 @@ impl TopLevelComposer {
|
|||||||
..
|
..
|
||||||
} = &mut *function_def
|
} = &mut *function_def
|
||||||
{
|
{
|
||||||
if let TypeEnum::TFunc(func_sig) = self.unifier.get_ty(*signature).as_ref() {
|
if let TypeEnum::TFunc(FunSignature { args, ret, vars }) = unifier.get_ty(*signature).as_ref() {
|
||||||
let FunSignature { args, ret, vars } = &*func_sig.borrow();
|
|
||||||
// None if is not class method
|
// None if is not class method
|
||||||
let uninst_self_type = {
|
let uninst_self_type = {
|
||||||
if let Some(class_id) = self.method_class.get(&DefinitionId(id)) {
|
if let Some(class_id) = method_class.get(&DefinitionId(id)) {
|
||||||
let class_def = self.definition_ast_list.get(class_id.0).unwrap();
|
let class_def = definition_ast_list.get(class_id.0).unwrap();
|
||||||
let class_def = class_def.0.read();
|
let class_def = class_def.0.read();
|
||||||
if let TopLevelDef::Class { type_vars, .. } = &*class_def {
|
if let TopLevelDef::Class { type_vars, .. } = &*class_def {
|
||||||
let ty_ann = make_self_type_annotation(type_vars, *class_id);
|
let ty_ann = make_self_type_annotation(type_vars, *class_id);
|
||||||
let self_ty = get_type_from_type_annotation_kinds(
|
let self_ty = get_type_from_type_annotation_kinds(
|
||||||
self.extract_def_list().as_slice(),
|
&def_list,
|
||||||
&mut self.unifier,
|
unifier,
|
||||||
&self.primitives_ty,
|
primitives_ty,
|
||||||
&ty_ann,
|
&ty_ann,
|
||||||
)?;
|
)?;
|
||||||
Some((self_ty, type_vars.clone()))
|
Some((self_ty, type_vars.clone()))
|
||||||
@ -1474,16 +1547,19 @@ impl TopLevelComposer {
|
|||||||
// carefully handle those with bounds, without bounds and no typevars
|
// carefully handle those with bounds, without bounds and no typevars
|
||||||
// if class methods, `vars` also contains all class typevars here
|
// if class methods, `vars` also contains all class typevars here
|
||||||
let (type_var_subst_comb, no_range_vars) = {
|
let (type_var_subst_comb, no_range_vars) = {
|
||||||
let unifier = &mut self.unifier;
|
|
||||||
let mut no_ranges: Vec<Type> = Vec::new();
|
let mut no_ranges: Vec<Type> = Vec::new();
|
||||||
let var_ids = vars.keys().copied().collect_vec();
|
let var_ids = vars.keys().copied().collect_vec();
|
||||||
let var_combs = vars
|
let var_combs = vars
|
||||||
.iter()
|
.iter()
|
||||||
.map(|(_, ty)| {
|
.map(|(_, ty)| {
|
||||||
unifier.get_instantiations(*ty).unwrap_or_else(|| {
|
unifier.get_instantiations(*ty).unwrap_or_else(|| {
|
||||||
let rigid = unifier.get_fresh_rigid_var().0;
|
if let TypeEnum::TVar { name, loc, .. } = &*unifier.get_ty(*ty) {
|
||||||
|
let rigid = unifier.get_fresh_rigid_var(*name, *loc).0;
|
||||||
no_ranges.push(rigid);
|
no_ranges.push(rigid);
|
||||||
vec![rigid]
|
vec![rigid]
|
||||||
|
} else {
|
||||||
|
unreachable!()
|
||||||
|
}
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
.multi_cartesian_product()
|
.multi_cartesian_product()
|
||||||
@ -1501,9 +1577,8 @@ impl TopLevelComposer {
|
|||||||
|
|
||||||
for subst in type_var_subst_comb {
|
for subst in type_var_subst_comb {
|
||||||
// for each instance
|
// for each instance
|
||||||
let inst_ret = self.unifier.subst(*ret, &subst).unwrap_or(*ret);
|
let inst_ret = unifier.subst(*ret, &subst).unwrap_or(*ret);
|
||||||
let inst_args = {
|
let inst_args = {
|
||||||
let unifier = &mut self.unifier;
|
|
||||||
args.iter()
|
args.iter()
|
||||||
.map(|a| FuncArg {
|
.map(|a| FuncArg {
|
||||||
name: a.name,
|
name: a.name,
|
||||||
@ -1513,7 +1588,6 @@ impl TopLevelComposer {
|
|||||||
.collect_vec()
|
.collect_vec()
|
||||||
};
|
};
|
||||||
let self_type = {
|
let self_type = {
|
||||||
let unifier = &mut self.unifier;
|
|
||||||
uninst_self_type
|
uninst_self_type
|
||||||
.clone()
|
.clone()
|
||||||
.map(|(self_type, type_vars)| {
|
.map(|(self_type, type_vars)| {
|
||||||
@ -1558,9 +1632,8 @@ impl TopLevelComposer {
|
|||||||
defined_identifiers: identifiers.clone(),
|
defined_identifiers: identifiers.clone(),
|
||||||
function_data: &mut FunctionData {
|
function_data: &mut FunctionData {
|
||||||
resolver: resolver.as_ref().unwrap().clone(),
|
resolver: resolver.as_ref().unwrap().clone(),
|
||||||
return_type: if self
|
return_type: if unifier
|
||||||
.unifier
|
.unioned(inst_ret, primitives_ty.none)
|
||||||
.unioned(inst_ret, self.primitives_ty.none)
|
|
||||||
{
|
{
|
||||||
None
|
None
|
||||||
} else {
|
} else {
|
||||||
@ -1569,18 +1642,18 @@ impl TopLevelComposer {
|
|||||||
// NOTE: allowed type vars
|
// NOTE: allowed type vars
|
||||||
bound_variables: no_range_vars.clone(),
|
bound_variables: no_range_vars.clone(),
|
||||||
},
|
},
|
||||||
unifier: &mut self.unifier,
|
unifier,
|
||||||
variable_mapping: {
|
variable_mapping: {
|
||||||
// NOTE: none and function args?
|
// NOTE: none and function args?
|
||||||
let mut result: HashMap<StrRef, Type> = HashMap::new();
|
let mut result: HashMap<StrRef, Type> = HashMap::new();
|
||||||
result.insert("None".into(), self.primitives_ty.none);
|
result.insert("None".into(), primitives_ty.none);
|
||||||
if let Some(self_ty) = self_type {
|
if let Some(self_ty) = self_type {
|
||||||
result.insert("self".into(), self_ty);
|
result.insert("self".into(), self_ty);
|
||||||
}
|
}
|
||||||
result.extend(inst_args.iter().map(|x| (x.name, x.ty)));
|
result.extend(inst_args.iter().map(|x| (x.name, x.ty)));
|
||||||
result
|
result
|
||||||
},
|
},
|
||||||
primitives: &self.primitives_ty,
|
primitives: primitives_ty,
|
||||||
virtual_checks: &mut Vec::new(),
|
virtual_checks: &mut Vec::new(),
|
||||||
calls: &mut calls,
|
calls: &mut calls,
|
||||||
in_handler: false
|
in_handler: false
|
||||||
@ -1631,8 +1704,8 @@ impl TopLevelComposer {
|
|||||||
if let TypeEnum::TObj { obj_id, .. } = &*ty {
|
if let TypeEnum::TObj { obj_id, .. } = &*ty {
|
||||||
*obj_id
|
*obj_id
|
||||||
} else {
|
} else {
|
||||||
let base_repr = inferencer.unifier.default_stringify(*base);
|
let base_repr = inferencer.unifier.stringify(*base);
|
||||||
let subtype_repr = inferencer.unifier.default_stringify(*subtype);
|
let subtype_repr = inferencer.unifier.stringify(*subtype);
|
||||||
return Err(format!("Expected a subtype of {}, but got {} (at {})", base_repr, subtype_repr, loc))
|
return Err(format!("Expected a subtype of {}, but got {} (at {})", base_repr, subtype_repr, loc))
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -1641,8 +1714,8 @@ impl TopLevelComposer {
|
|||||||
let m = ancestors.iter()
|
let m = ancestors.iter()
|
||||||
.find(|kind| matches!(kind, TypeAnnotation::CustomClass { id, .. } if *id == base_id));
|
.find(|kind| matches!(kind, TypeAnnotation::CustomClass { id, .. } if *id == base_id));
|
||||||
if m.is_none() {
|
if m.is_none() {
|
||||||
let base_repr = inferencer.unifier.default_stringify(*base);
|
let base_repr = inferencer.unifier.stringify(*base);
|
||||||
let subtype_repr = inferencer.unifier.default_stringify(*subtype);
|
let subtype_repr = inferencer.unifier.stringify(*subtype);
|
||||||
return Err(format!("Expected a subtype of {}, but got {} (at {})", base_repr, subtype_repr, loc))
|
return Err(format!("Expected a subtype of {}, but got {} (at {})", base_repr, subtype_repr, loc))
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
@ -1650,9 +1723,9 @@ impl TopLevelComposer {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if !self.unifier.unioned(inst_ret, self.primitives_ty.none) && !returned {
|
if !unifier.unioned(inst_ret, primitives_ty.none) && !returned {
|
||||||
let def_ast_list = &self.definition_ast_list;
|
let def_ast_list = &definition_ast_list;
|
||||||
let ret_str = self.unifier.stringify(
|
let ret_str = unifier.internal_stringify(
|
||||||
inst_ret,
|
inst_ret,
|
||||||
&mut |id| {
|
&mut |id| {
|
||||||
if let TopLevelDef::Class { name, .. } =
|
if let TopLevelDef::Class { name, .. } =
|
||||||
@ -1664,6 +1737,7 @@ impl TopLevelComposer {
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
&mut |id| format!("tvar{}", id),
|
&mut |id| format!("tvar{}", id),
|
||||||
|
&mut None,
|
||||||
);
|
);
|
||||||
return Err(format!(
|
return Err(format!(
|
||||||
"expected return type of `{}` in function `{}` (at {})",
|
"expected return type of `{}` in function `{}` (at {})",
|
||||||
@ -1675,7 +1749,7 @@ impl TopLevelComposer {
|
|||||||
|
|
||||||
instance_to_stmt.insert(
|
instance_to_stmt.insert(
|
||||||
get_subst_key(
|
get_subst_key(
|
||||||
&mut self.unifier,
|
unifier,
|
||||||
self_type,
|
self_type,
|
||||||
&subst,
|
&subst,
|
||||||
Some(insted_vars),
|
Some(insted_vars),
|
||||||
@ -1691,9 +1765,16 @@ impl TopLevelComposer {
|
|||||||
} else {
|
} else {
|
||||||
unreachable!("must be typeenum::tfunc")
|
unreachable!("must be typeenum::tfunc")
|
||||||
}
|
}
|
||||||
} else {
|
|
||||||
continue;
|
|
||||||
}
|
}
|
||||||
|
Ok(())
|
||||||
|
};
|
||||||
|
for (id, (def, ast)) in self.definition_ast_list.iter().enumerate().skip(self.builtin_num) {
|
||||||
|
if let Err(e) = analyze_2(id, def, ast) {
|
||||||
|
errors.insert(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !errors.is_empty() {
|
||||||
|
return Err(errors.iter().join("\n----------\n"));
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -18,14 +18,14 @@ impl TopLevelDef {
|
|||||||
let fields_str = fields
|
let fields_str = fields
|
||||||
.iter()
|
.iter()
|
||||||
.map(|(n, ty, _)| {
|
.map(|(n, ty, _)| {
|
||||||
(n.to_string(), unifier.default_stringify(*ty))
|
(n.to_string(), unifier.stringify(*ty))
|
||||||
})
|
})
|
||||||
.collect_vec();
|
.collect_vec();
|
||||||
|
|
||||||
let methods_str = methods
|
let methods_str = methods
|
||||||
.iter()
|
.iter()
|
||||||
.map(|(n, ty, id)| {
|
.map(|(n, ty, id)| {
|
||||||
(n.to_string(), unifier.default_stringify(*ty), *id)
|
(n.to_string(), unifier.stringify(*ty), *id)
|
||||||
})
|
})
|
||||||
.collect_vec();
|
.collect_vec();
|
||||||
format!(
|
format!(
|
||||||
@ -34,13 +34,13 @@ impl TopLevelDef {
|
|||||||
ancestors.iter().map(|ancestor| ancestor.stringify(unifier)).collect_vec(),
|
ancestors.iter().map(|ancestor| ancestor.stringify(unifier)).collect_vec(),
|
||||||
fields_str.iter().map(|(a, _)| a).collect_vec(),
|
fields_str.iter().map(|(a, _)| a).collect_vec(),
|
||||||
methods_str.iter().map(|(a, b, _)| (a, b)).collect_vec(),
|
methods_str.iter().map(|(a, b, _)| (a, b)).collect_vec(),
|
||||||
type_vars.iter().map(|id| unifier.default_stringify(*id)).collect_vec(),
|
type_vars.iter().map(|id| unifier.stringify(*id)).collect_vec(),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
TopLevelDef::Function { name, signature, var_id, .. } => format!(
|
TopLevelDef::Function { name, signature, var_id, .. } => format!(
|
||||||
"Function {{\nname: {:?},\nsig: {:?},\nvar_id: {:?}\n}}",
|
"Function {{\nname: {:?},\nsig: {:?},\nvar_id: {:?}\n}}",
|
||||||
name,
|
name,
|
||||||
unifier.default_stringify(*signature),
|
unifier.stringify(*signature),
|
||||||
{
|
{
|
||||||
// preserve the order for debug output and test
|
// preserve the order for debug output and test
|
||||||
let mut r = var_id.clone();
|
let mut r = var_id.clone();
|
||||||
@ -117,6 +117,7 @@ impl TopLevelComposer {
|
|||||||
resolver: Option<Arc<dyn SymbolResolver + Send + Sync>>,
|
resolver: Option<Arc<dyn SymbolResolver + Send + Sync>>,
|
||||||
name: StrRef,
|
name: StrRef,
|
||||||
constructor: Option<Type>,
|
constructor: Option<Type>,
|
||||||
|
loc: Option<Location>
|
||||||
) -> TopLevelDef {
|
) -> TopLevelDef {
|
||||||
TopLevelDef::Class {
|
TopLevelDef::Class {
|
||||||
name,
|
name,
|
||||||
@ -127,6 +128,7 @@ impl TopLevelComposer {
|
|||||||
ancestors: Default::default(),
|
ancestors: Default::default(),
|
||||||
constructor,
|
constructor,
|
||||||
resolver,
|
resolver,
|
||||||
|
loc,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -136,6 +138,7 @@ impl TopLevelComposer {
|
|||||||
simple_name: StrRef,
|
simple_name: StrRef,
|
||||||
ty: Type,
|
ty: Type,
|
||||||
resolver: Option<Arc<dyn SymbolResolver + Send + Sync>>,
|
resolver: Option<Arc<dyn SymbolResolver + Send + Sync>>,
|
||||||
|
loc: Option<Location>
|
||||||
) -> TopLevelDef {
|
) -> TopLevelDef {
|
||||||
TopLevelDef::Function {
|
TopLevelDef::Function {
|
||||||
name,
|
name,
|
||||||
@ -146,6 +149,7 @@ impl TopLevelComposer {
|
|||||||
instance_to_stmt: Default::default(),
|
instance_to_stmt: Default::default(),
|
||||||
resolver,
|
resolver,
|
||||||
codegen_callback: None,
|
codegen_callback: None,
|
||||||
|
loc,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -244,12 +248,8 @@ impl TopLevelComposer {
|
|||||||
let this = this.as_ref();
|
let this = this.as_ref();
|
||||||
let other = unifier.get_ty(other);
|
let other = unifier.get_ty(other);
|
||||||
let other = other.as_ref();
|
let other = other.as_ref();
|
||||||
if let (TypeEnum::TFunc(this_sig), TypeEnum::TFunc(other_sig)) = (this, other) {
|
if let (TypeEnum::TFunc(FunSignature { args: this_args, ret: this_ret, ..}),
|
||||||
let (this_sig, other_sig) = (&*this_sig.borrow(), &*other_sig.borrow());
|
TypeEnum::TFunc(FunSignature { args: other_args, ret: other_ret, .. })) = (this, other) {
|
||||||
let (
|
|
||||||
FunSignature { args: this_args, ret: this_ret, vars: _this_vars },
|
|
||||||
FunSignature { args: other_args, ret: other_ret, vars: _other_vars },
|
|
||||||
) = (this_sig, other_sig);
|
|
||||||
// check args
|
// check args
|
||||||
let args_ok = this_args
|
let args_ok = this_args
|
||||||
.iter()
|
.iter()
|
||||||
|
@ -17,7 +17,7 @@ use crate::{
|
|||||||
};
|
};
|
||||||
use inkwell::values::BasicValueEnum;
|
use inkwell::values::BasicValueEnum;
|
||||||
use itertools::{izip, Itertools};
|
use itertools::{izip, Itertools};
|
||||||
use nac3parser::ast::{self, Stmt, StrRef};
|
use nac3parser::ast::{self, Location, Stmt, StrRef};
|
||||||
use parking_lot::RwLock;
|
use parking_lot::RwLock;
|
||||||
|
|
||||||
#[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Hash, Debug)]
|
#[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Hash, Debug)]
|
||||||
@ -39,7 +39,7 @@ type GenCallCallback = Box<
|
|||||||
(&FunSignature, DefinitionId),
|
(&FunSignature, DefinitionId),
|
||||||
Vec<(Option<StrRef>, ValueEnum<'ctx>)>,
|
Vec<(Option<StrRef>, ValueEnum<'ctx>)>,
|
||||||
&mut dyn CodeGenerator,
|
&mut dyn CodeGenerator,
|
||||||
) -> Option<BasicValueEnum<'ctx>>
|
) -> Result<Option<BasicValueEnum<'ctx>>, String>
|
||||||
+ Send
|
+ Send
|
||||||
+ Sync,
|
+ Sync,
|
||||||
>;
|
>;
|
||||||
@ -60,7 +60,7 @@ impl GenCall {
|
|||||||
fun: (&FunSignature, DefinitionId),
|
fun: (&FunSignature, DefinitionId),
|
||||||
args: Vec<(Option<StrRef>, ValueEnum<'ctx>)>,
|
args: Vec<(Option<StrRef>, ValueEnum<'ctx>)>,
|
||||||
generator: &mut dyn CodeGenerator,
|
generator: &mut dyn CodeGenerator,
|
||||||
) -> Option<BasicValueEnum<'ctx>> {
|
) -> Result<Option<BasicValueEnum<'ctx>>, String> {
|
||||||
(self.fp)(ctx, obj, fun, args, generator)
|
(self.fp)(ctx, obj, fun, args, generator)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -99,6 +99,8 @@ pub enum TopLevelDef {
|
|||||||
resolver: Option<Arc<dyn SymbolResolver + Send + Sync>>,
|
resolver: Option<Arc<dyn SymbolResolver + Send + Sync>>,
|
||||||
// constructor type
|
// constructor type
|
||||||
constructor: Option<Type>,
|
constructor: Option<Type>,
|
||||||
|
// definition location
|
||||||
|
loc: Option<Location>,
|
||||||
},
|
},
|
||||||
Function {
|
Function {
|
||||||
// prefix for symbol, should be unique globally
|
// prefix for symbol, should be unique globally
|
||||||
@ -124,6 +126,8 @@ pub enum TopLevelDef {
|
|||||||
resolver: Option<Arc<dyn SymbolResolver + Send + Sync>>,
|
resolver: Option<Arc<dyn SymbolResolver + Send + Sync>>,
|
||||||
// custom codegen callback
|
// custom codegen callback
|
||||||
codegen_callback: Option<Arc<GenCall>>,
|
codegen_callback: Option<Arc<GenCall>>,
|
||||||
|
// definition location
|
||||||
|
loc: Option<Location>,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,14 +1,14 @@
|
|||||||
---
|
---
|
||||||
source: nac3core/src/toplevel/test.rs
|
source: nac3core/src/toplevel/test.rs
|
||||||
assertion_line: 541
|
assertion_line: 540
|
||||||
expression: res_vec
|
expression: res_vec
|
||||||
|
|
||||||
---
|
---
|
||||||
[
|
[
|
||||||
"Class {\nname: \"Generic_A\",\nancestors: [\"{class: Generic_A, params: [\\\"var6\\\"]}\", \"{class: B, params: []}\"],\nfields: [\"aa\", \"a\"],\nmethods: [(\"__init__\", \"fn[[], none]\"), (\"foo\", \"fn[[b=var5], none]\"), (\"fun\", \"fn[[a=int32], var6]\")],\ntype_vars: [\"var6\"]\n}\n",
|
"Class {\nname: \"Generic_A\",\nancestors: [\"{class: Generic_A, params: [\\\"V\\\"]}\", \"{class: B, params: []}\"],\nfields: [\"aa\", \"a\"],\nmethods: [(\"__init__\", \"fn[[], none]\"), (\"foo\", \"fn[[b:T], none]\"), (\"fun\", \"fn[[a:int32], V]\")],\ntype_vars: [\"V\"]\n}\n",
|
||||||
"Function {\nname: \"Generic_A.__init__\",\nsig: \"fn[[], none]\",\nvar_id: [6]\n}\n",
|
"Function {\nname: \"Generic_A.__init__\",\nsig: \"fn[[], none]\",\nvar_id: [6]\n}\n",
|
||||||
"Function {\nname: \"Generic_A.fun\",\nsig: \"fn[[a=int32], var6]\",\nvar_id: [6]\n}\n",
|
"Function {\nname: \"Generic_A.fun\",\nsig: \"fn[[a:int32], V]\",\nvar_id: [6, 17]\n}\n",
|
||||||
"Class {\nname: \"B\",\nancestors: [\"{class: B, params: []}\"],\nfields: [\"aa\"],\nmethods: [(\"__init__\", \"fn[[], none]\"), (\"foo\", \"fn[[b=var5], none]\")],\ntype_vars: []\n}\n",
|
"Class {\nname: \"B\",\nancestors: [\"{class: B, params: []}\"],\nfields: [\"aa\"],\nmethods: [(\"__init__\", \"fn[[], none]\"), (\"foo\", \"fn[[b:T], none]\")],\ntype_vars: []\n}\n",
|
||||||
"Function {\nname: \"B.__init__\",\nsig: \"fn[[], none]\",\nvar_id: []\n}\n",
|
"Function {\nname: \"B.__init__\",\nsig: \"fn[[], none]\",\nvar_id: []\n}\n",
|
||||||
"Function {\nname: \"B.foo\",\nsig: \"fn[[b=var5], none]\",\nvar_id: []\n}\n",
|
"Function {\nname: \"B.foo\",\nsig: \"fn[[b:T], none]\",\nvar_id: []\n}\n",
|
||||||
]
|
]
|
||||||
|
@ -1,17 +1,17 @@
|
|||||||
---
|
---
|
||||||
source: nac3core/src/toplevel/test.rs
|
source: nac3core/src/toplevel/test.rs
|
||||||
assertion_line: 541
|
assertion_line: 540
|
||||||
expression: res_vec
|
expression: res_vec
|
||||||
|
|
||||||
---
|
---
|
||||||
[
|
[
|
||||||
"Class {\nname: \"A\",\nancestors: [\"{class: A, params: [\\\"var5\\\"]}\"],\nfields: [\"a\", \"b\", \"c\"],\nmethods: [(\"__init__\", \"fn[[t=var5], none]\"), (\"fun\", \"fn[[a=int32, b=var5], list[virtual[B[6->bool]]]]\"), (\"foo\", \"fn[[c=C], none]\")],\ntype_vars: [\"var5\"]\n}\n",
|
"Class {\nname: \"A\",\nancestors: [\"{class: A, params: [\\\"T\\\"]}\"],\nfields: [\"a\", \"b\", \"c\"],\nmethods: [(\"__init__\", \"fn[[t:T], none]\"), (\"fun\", \"fn[[a:int32, b:T], list[virtual[B[bool]]]]\"), (\"foo\", \"fn[[c:C], none]\")],\ntype_vars: [\"T\"]\n}\n",
|
||||||
"Function {\nname: \"A.__init__\",\nsig: \"fn[[t=var5], none]\",\nvar_id: []\n}\n",
|
"Function {\nname: \"A.__init__\",\nsig: \"fn[[t:T], none]\",\nvar_id: []\n}\n",
|
||||||
"Function {\nname: \"A.fun\",\nsig: \"fn[[a=int32, b=var5], list[virtual[B[6->bool]]]]\",\nvar_id: []\n}\n",
|
"Function {\nname: \"A.fun\",\nsig: \"fn[[a:int32, b:T], list[virtual[B[bool]]]]\",\nvar_id: []\n}\n",
|
||||||
"Function {\nname: \"A.foo\",\nsig: \"fn[[c=C], none]\",\nvar_id: []\n}\n",
|
"Function {\nname: \"A.foo\",\nsig: \"fn[[c:C], none]\",\nvar_id: []\n}\n",
|
||||||
"Class {\nname: \"B\",\nancestors: [\"{class: B, params: [\\\"var6\\\"]}\", \"{class: A, params: [\\\"float\\\"]}\"],\nfields: [\"a\", \"b\", \"c\", \"d\"],\nmethods: [(\"__init__\", \"fn[[], none]\"), (\"fun\", \"fn[[a=int32, b=var5], list[virtual[B[6->bool]]]]\"), (\"foo\", \"fn[[c=C], none]\")],\ntype_vars: [\"var6\"]\n}\n",
|
"Class {\nname: \"B\",\nancestors: [\"{class: B, params: [\\\"var6\\\"]}\", \"{class: A, params: [\\\"float\\\"]}\"],\nfields: [\"a\", \"b\", \"c\", \"d\"],\nmethods: [(\"__init__\", \"fn[[], none]\"), (\"fun\", \"fn[[a:int32, b:T], list[virtual[B[bool]]]]\"), (\"foo\", \"fn[[c:C], none]\")],\ntype_vars: [\"var6\"]\n}\n",
|
||||||
"Function {\nname: \"B.__init__\",\nsig: \"fn[[], none]\",\nvar_id: [6]\n}\n",
|
"Function {\nname: \"B.__init__\",\nsig: \"fn[[], none]\",\nvar_id: [6]\n}\n",
|
||||||
"Function {\nname: \"B.fun\",\nsig: \"fn[[a=int32, b=var5], list[virtual[B[6->bool]]]]\",\nvar_id: [6]\n}\n",
|
"Function {\nname: \"B.fun\",\nsig: \"fn[[a:int32, b:T], list[virtual[B[bool]]]]\",\nvar_id: [6]\n}\n",
|
||||||
"Class {\nname: \"C\",\nancestors: [\"{class: C, params: []}\", \"{class: B, params: [\\\"bool\\\"]}\", \"{class: A, params: [\\\"float\\\"]}\"],\nfields: [\"a\", \"b\", \"c\", \"d\", \"e\"],\nmethods: [(\"__init__\", \"fn[[], none]\"), (\"fun\", \"fn[[a=int32, b=var5], list[virtual[B[6->bool]]]]\"), (\"foo\", \"fn[[c=C], none]\")],\ntype_vars: []\n}\n",
|
"Class {\nname: \"C\",\nancestors: [\"{class: C, params: []}\", \"{class: B, params: [\\\"bool\\\"]}\", \"{class: A, params: [\\\"float\\\"]}\"],\nfields: [\"a\", \"b\", \"c\", \"d\", \"e\"],\nmethods: [(\"__init__\", \"fn[[], none]\"), (\"fun\", \"fn[[a:int32, b:T], list[virtual[B[bool]]]]\"), (\"foo\", \"fn[[c:C], none]\")],\ntype_vars: []\n}\n",
|
||||||
"Function {\nname: \"C.__init__\",\nsig: \"fn[[], none]\",\nvar_id: []\n}\n",
|
"Function {\nname: \"C.__init__\",\nsig: \"fn[[], none]\",\nvar_id: []\n}\n",
|
||||||
]
|
]
|
||||||
|
@ -1,15 +1,15 @@
|
|||||||
---
|
---
|
||||||
source: nac3core/src/toplevel/test.rs
|
source: nac3core/src/toplevel/test.rs
|
||||||
assertion_line: 541
|
assertion_line: 540
|
||||||
expression: res_vec
|
expression: res_vec
|
||||||
|
|
||||||
---
|
---
|
||||||
[
|
[
|
||||||
"Function {\nname: \"foo\",\nsig: \"fn[[a=list[int32], b=tuple[var5, float]], A[5->B, 6->bool]]\",\nvar_id: []\n}\n",
|
"Function {\nname: \"foo\",\nsig: \"fn[[a:list[int32], b:tuple[T, float]], A[B, bool]]\",\nvar_id: []\n}\n",
|
||||||
"Class {\nname: \"A\",\nancestors: [\"{class: A, params: [\\\"var5\\\", \\\"var6\\\"]}\"],\nfields: [\"a\", \"b\"],\nmethods: [(\"__init__\", \"fn[[v=var6], none]\"), (\"fun\", \"fn[[a=var5], var6]\")],\ntype_vars: [\"var5\", \"var6\"]\n}\n",
|
"Class {\nname: \"A\",\nancestors: [\"{class: A, params: [\\\"T\\\", \\\"V\\\"]}\"],\nfields: [\"a\", \"b\"],\nmethods: [(\"__init__\", \"fn[[v:V], none]\"), (\"fun\", \"fn[[a:T], V]\")],\ntype_vars: [\"T\", \"V\"]\n}\n",
|
||||||
"Function {\nname: \"A.__init__\",\nsig: \"fn[[v=var6], none]\",\nvar_id: [6]\n}\n",
|
"Function {\nname: \"A.__init__\",\nsig: \"fn[[v:V], none]\",\nvar_id: [18, 19]\n}\n",
|
||||||
"Function {\nname: \"A.fun\",\nsig: \"fn[[a=var5], var6]\",\nvar_id: [6]\n}\n",
|
"Function {\nname: \"A.fun\",\nsig: \"fn[[a:T], V]\",\nvar_id: [19, 24]\n}\n",
|
||||||
"Function {\nname: \"gfun\",\nsig: \"fn[[a=A[5->list[float], 6->int32]], none]\",\nvar_id: []\n}\n",
|
"Function {\nname: \"gfun\",\nsig: \"fn[[a:A[int32, list[float]]], none]\",\nvar_id: []\n}\n",
|
||||||
"Class {\nname: \"B\",\nancestors: [\"{class: B, params: []}\"],\nfields: [],\nmethods: [(\"__init__\", \"fn[[], none]\")],\ntype_vars: []\n}\n",
|
"Class {\nname: \"B\",\nancestors: [\"{class: B, params: []}\"],\nfields: [],\nmethods: [(\"__init__\", \"fn[[], none]\")],\ntype_vars: []\n}\n",
|
||||||
"Function {\nname: \"B.__init__\",\nsig: \"fn[[], none]\",\nvar_id: []\n}\n",
|
"Function {\nname: \"B.__init__\",\nsig: \"fn[[], none]\",\nvar_id: []\n}\n",
|
||||||
]
|
]
|
||||||
|
@ -1,15 +1,15 @@
|
|||||||
---
|
---
|
||||||
source: nac3core/src/toplevel/test.rs
|
source: nac3core/src/toplevel/test.rs
|
||||||
assertion_line: 541
|
assertion_line: 540
|
||||||
expression: res_vec
|
expression: res_vec
|
||||||
|
|
||||||
---
|
---
|
||||||
[
|
[
|
||||||
"Class {\nname: \"A\",\nancestors: [\"{class: A, params: [\\\"var5\\\", \\\"var6\\\"]}\"],\nfields: [\"a\", \"b\"],\nmethods: [(\"__init__\", \"fn[[a=A[5->float, 6->bool], b=B], none]\"), (\"fun\", \"fn[[a=A[5->float, 6->bool]], A[5->bool, 6->int32]]\")],\ntype_vars: [\"var5\", \"var6\"]\n}\n",
|
"Class {\nname: \"A\",\nancestors: [\"{class: A, params: [\\\"var5\\\", \\\"var6\\\"]}\"],\nfields: [\"a\", \"b\"],\nmethods: [(\"__init__\", \"fn[[a:A[bool, float], b:B], none]\"), (\"fun\", \"fn[[a:A[bool, float]], A[bool, int32]]\")],\ntype_vars: [\"var5\", \"var6\"]\n}\n",
|
||||||
"Function {\nname: \"A.__init__\",\nsig: \"fn[[a=A[5->float, 6->bool], b=B], none]\",\nvar_id: [6]\n}\n",
|
"Function {\nname: \"A.__init__\",\nsig: \"fn[[a:A[bool, float], b:B], none]\",\nvar_id: [6]\n}\n",
|
||||||
"Function {\nname: \"A.fun\",\nsig: \"fn[[a=A[5->float, 6->bool]], A[5->bool, 6->int32]]\",\nvar_id: [6]\n}\n",
|
"Function {\nname: \"A.fun\",\nsig: \"fn[[a:A[bool, float]], A[bool, int32]]\",\nvar_id: [6]\n}\n",
|
||||||
"Class {\nname: \"B\",\nancestors: [\"{class: B, params: []}\", \"{class: A, params: [\\\"int64\\\", \\\"bool\\\"]}\"],\nfields: [\"a\", \"b\"],\nmethods: [(\"__init__\", \"fn[[], none]\"), (\"fun\", \"fn[[a=A[5->float, 6->bool]], A[5->bool, 6->int32]]\"), (\"foo\", \"fn[[b=B], B]\"), (\"bar\", \"fn[[a=A[5->list[B], 6->int32]], tuple[A[5->virtual[A[5->B, 6->int32]], 6->bool], B]]\")],\ntype_vars: []\n}\n",
|
"Class {\nname: \"B\",\nancestors: [\"{class: B, params: []}\", \"{class: A, params: [\\\"int64\\\", \\\"bool\\\"]}\"],\nfields: [\"a\", \"b\"],\nmethods: [(\"__init__\", \"fn[[], none]\"), (\"fun\", \"fn[[a:A[bool, float]], A[bool, int32]]\"), (\"foo\", \"fn[[b:B], B]\"), (\"bar\", \"fn[[a:A[int32, list[B]]], tuple[A[bool, virtual[A[B, int32]]], B]]\")],\ntype_vars: []\n}\n",
|
||||||
"Function {\nname: \"B.__init__\",\nsig: \"fn[[], none]\",\nvar_id: []\n}\n",
|
"Function {\nname: \"B.__init__\",\nsig: \"fn[[], none]\",\nvar_id: []\n}\n",
|
||||||
"Function {\nname: \"B.foo\",\nsig: \"fn[[b=B], B]\",\nvar_id: []\n}\n",
|
"Function {\nname: \"B.foo\",\nsig: \"fn[[b:B], B]\",\nvar_id: []\n}\n",
|
||||||
"Function {\nname: \"B.bar\",\nsig: \"fn[[a=A[5->list[B], 6->int32]], tuple[A[5->virtual[A[5->B, 6->int32]], 6->bool], B]]\",\nvar_id: []\n}\n",
|
"Function {\nname: \"B.bar\",\nsig: \"fn[[a:A[int32, list[B]]], tuple[A[bool, virtual[A[B, int32]]], B]]\",\nvar_id: []\n}\n",
|
||||||
]
|
]
|
||||||
|
@ -1,19 +1,19 @@
|
|||||||
---
|
---
|
||||||
source: nac3core/src/toplevel/test.rs
|
source: nac3core/src/toplevel/test.rs
|
||||||
assertion_line: 541
|
assertion_line: 540
|
||||||
expression: res_vec
|
expression: res_vec
|
||||||
|
|
||||||
---
|
---
|
||||||
[
|
[
|
||||||
"Class {\nname: \"A\",\nancestors: [\"{class: A, params: []}\"],\nfields: [\"a\"],\nmethods: [(\"__init__\", \"fn[[], none]\"), (\"fun\", \"fn[[b=B], none]\"), (\"foo\", \"fn[[a=var5, b=var6], none]\")],\ntype_vars: []\n}\n",
|
"Class {\nname: \"A\",\nancestors: [\"{class: A, params: []}\"],\nfields: [\"a\"],\nmethods: [(\"__init__\", \"fn[[], none]\"), (\"fun\", \"fn[[b:B], none]\"), (\"foo\", \"fn[[a:T, b:V], none]\")],\ntype_vars: []\n}\n",
|
||||||
"Function {\nname: \"A.__init__\",\nsig: \"fn[[], none]\",\nvar_id: []\n}\n",
|
"Function {\nname: \"A.__init__\",\nsig: \"fn[[], none]\",\nvar_id: []\n}\n",
|
||||||
"Function {\nname: \"A.fun\",\nsig: \"fn[[b=B], none]\",\nvar_id: []\n}\n",
|
"Function {\nname: \"A.fun\",\nsig: \"fn[[b:B], none]\",\nvar_id: []\n}\n",
|
||||||
"Function {\nname: \"A.foo\",\nsig: \"fn[[a=var5, b=var6], none]\",\nvar_id: [6]\n}\n",
|
"Function {\nname: \"A.foo\",\nsig: \"fn[[a:T, b:V], none]\",\nvar_id: [25]\n}\n",
|
||||||
"Class {\nname: \"B\",\nancestors: [\"{class: B, params: []}\", \"{class: C, params: []}\", \"{class: A, params: []}\"],\nfields: [\"a\"],\nmethods: [(\"__init__\", \"fn[[], none]\"), (\"fun\", \"fn[[b=B], none]\"), (\"foo\", \"fn[[a=var5, b=var6], none]\")],\ntype_vars: []\n}\n",
|
"Class {\nname: \"B\",\nancestors: [\"{class: B, params: []}\", \"{class: C, params: []}\", \"{class: A, params: []}\"],\nfields: [\"a\"],\nmethods: [(\"__init__\", \"fn[[], none]\"), (\"fun\", \"fn[[b:B], none]\"), (\"foo\", \"fn[[a:T, b:V], none]\")],\ntype_vars: []\n}\n",
|
||||||
"Function {\nname: \"B.__init__\",\nsig: \"fn[[], none]\",\nvar_id: []\n}\n",
|
"Function {\nname: \"B.__init__\",\nsig: \"fn[[], none]\",\nvar_id: []\n}\n",
|
||||||
"Class {\nname: \"C\",\nancestors: [\"{class: C, params: []}\", \"{class: A, params: []}\"],\nfields: [\"a\"],\nmethods: [(\"__init__\", \"fn[[], none]\"), (\"fun\", \"fn[[b=B], none]\"), (\"foo\", \"fn[[a=var5, b=var6], none]\")],\ntype_vars: []\n}\n",
|
"Class {\nname: \"C\",\nancestors: [\"{class: C, params: []}\", \"{class: A, params: []}\"],\nfields: [\"a\"],\nmethods: [(\"__init__\", \"fn[[], none]\"), (\"fun\", \"fn[[b:B], none]\"), (\"foo\", \"fn[[a:T, b:V], none]\")],\ntype_vars: []\n}\n",
|
||||||
"Function {\nname: \"C.__init__\",\nsig: \"fn[[], none]\",\nvar_id: []\n}\n",
|
"Function {\nname: \"C.__init__\",\nsig: \"fn[[], none]\",\nvar_id: []\n}\n",
|
||||||
"Function {\nname: \"C.fun\",\nsig: \"fn[[b=B], none]\",\nvar_id: []\n}\n",
|
"Function {\nname: \"C.fun\",\nsig: \"fn[[b:B], none]\",\nvar_id: []\n}\n",
|
||||||
"Function {\nname: \"foo\",\nsig: \"fn[[a=A], none]\",\nvar_id: []\n}\n",
|
"Function {\nname: \"foo\",\nsig: \"fn[[a:A], none]\",\nvar_id: []\n}\n",
|
||||||
"Function {\nname: \"ff\",\nsig: \"fn[[a=var5], var6]\",\nvar_id: [6]\n}\n",
|
"Function {\nname: \"ff\",\nsig: \"fn[[a:T], V]\",\nvar_id: [33]\n}\n",
|
||||||
]
|
]
|
||||||
|
@ -61,8 +61,8 @@ impl SymbolResolver for Resolver {
|
|||||||
unimplemented!()
|
unimplemented!()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_identifier_def(&self, id: StrRef) -> Option<DefinitionId> {
|
fn get_identifier_def(&self, id: StrRef) -> Result<DefinitionId, String> {
|
||||||
self.0.id_to_def.lock().get(&id).cloned()
|
self.0.id_to_def.lock().get(&id).cloned().ok_or("Unknown identifier".to_string())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_string_id(&self, _: &str) -> i32 {
|
fn get_string_id(&self, _: &str) -> i32 {
|
||||||
@ -129,9 +129,9 @@ fn test_simple_register(source: Vec<&str>) {
|
|||||||
"},
|
"},
|
||||||
],
|
],
|
||||||
vec![
|
vec![
|
||||||
"fn[[a=0], 0]",
|
"fn[[a:0], 0]",
|
||||||
"fn[[a=2], 4]",
|
"fn[[a:2], 4]",
|
||||||
"fn[[b=1], 0]",
|
"fn[[b:1], 0]",
|
||||||
],
|
],
|
||||||
vec![
|
vec![
|
||||||
"fun",
|
"fun",
|
||||||
@ -172,7 +172,7 @@ fn test_simple_function_analyze(source: Vec<&str>, tys: Vec<&str>, names: Vec<&s
|
|||||||
let ty_str =
|
let ty_str =
|
||||||
composer
|
composer
|
||||||
.unifier
|
.unifier
|
||||||
.stringify(*signature, &mut |id| id.to_string(), &mut |id| id.to_string());
|
.internal_stringify(*signature, &mut |id| id.to_string(), &mut |id| id.to_string(), &mut None);
|
||||||
assert_eq!(ty_str, tys[i]);
|
assert_eq!(ty_str, tys[i]);
|
||||||
assert_eq!(name, names[i]);
|
assert_eq!(name, names[i]);
|
||||||
}
|
}
|
||||||
@ -752,7 +752,7 @@ fn make_internal_resolver_with_tvar(
|
|||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|(name, range)| {
|
.map(|(name, range)| {
|
||||||
(name, {
|
(name, {
|
||||||
let (ty, id) = unifier.get_fresh_var_with_range(range.as_slice());
|
let (ty, id) = unifier.get_fresh_var_with_range(range.as_slice(), None, None);
|
||||||
if print {
|
if print {
|
||||||
println!("{}: {:?}, tvar{}", name, ty, id);
|
println!("{}: {:?}, tvar{}", name, ty, id);
|
||||||
}
|
}
|
||||||
@ -779,9 +779,9 @@ impl<'a> Fold<Option<Type>> for TypeToStringFolder<'a> {
|
|||||||
type Error = String;
|
type Error = String;
|
||||||
fn map_user(&mut self, user: Option<Type>) -> Result<Self::TargetU, Self::Error> {
|
fn map_user(&mut self, user: Option<Type>) -> Result<Self::TargetU, Self::Error> {
|
||||||
Ok(if let Some(ty) = user {
|
Ok(if let Some(ty) = user {
|
||||||
self.unifier.stringify(ty, &mut |id| format!("class{}", id.to_string()), &mut |id| {
|
self.unifier.internal_stringify(ty, &mut |id| format!("class{}", id.to_string()), &mut |id| {
|
||||||
format!("tvar{}", id.to_string())
|
format!("tvar{}", id.to_string())
|
||||||
})
|
}, &mut None)
|
||||||
} else {
|
} else {
|
||||||
"None".into()
|
"None".into()
|
||||||
})
|
})
|
||||||
|
@ -1,6 +1,3 @@
|
|||||||
use std::cell::RefCell;
|
|
||||||
|
|
||||||
use crate::typecheck::typedef::TypeVarMeta;
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
@ -23,7 +20,7 @@ impl TypeAnnotation {
|
|||||||
pub fn stringify(&self, unifier: &mut Unifier) -> String {
|
pub fn stringify(&self, unifier: &mut Unifier) -> String {
|
||||||
use TypeAnnotation::*;
|
use TypeAnnotation::*;
|
||||||
match self {
|
match self {
|
||||||
Primitive(ty) | TypeVar(ty) => unifier.default_stringify(*ty),
|
Primitive(ty) | TypeVar(ty) => unifier.stringify(*ty),
|
||||||
CustomClass { id, params } => {
|
CustomClass { id, params } => {
|
||||||
let class_name = match unifier.top_level {
|
let class_name = match unifier.top_level {
|
||||||
Some(ref top) => if let TopLevelDef::Class { name, .. } = &*top.definitions.read()[id.0].read() {
|
Some(ref top) => if let TopLevelDef::Class { name, .. } = &*top.definitions.read()[id.0].read() {
|
||||||
@ -65,7 +62,7 @@ pub fn parse_ast_to_type_annotation_kinds<T>(
|
|||||||
Ok(TypeAnnotation::Primitive(primitives.str))
|
Ok(TypeAnnotation::Primitive(primitives.str))
|
||||||
} else if id == &"Exception".into() {
|
} else if id == &"Exception".into() {
|
||||||
Ok(TypeAnnotation::CustomClass { id: DefinitionId(7), params: Default::default() })
|
Ok(TypeAnnotation::CustomClass { id: DefinitionId(7), params: Default::default() })
|
||||||
} else if let Some(obj_id) = resolver.get_identifier_def(*id) {
|
} else if let Ok(obj_id) = resolver.get_identifier_def(*id) {
|
||||||
let type_vars = {
|
let type_vars = {
|
||||||
let def_read = top_level_defs[obj_id.0].try_read();
|
let def_read = top_level_defs[obj_id.0].try_read();
|
||||||
if let Some(def_read) = def_read {
|
if let Some(def_read) = def_read {
|
||||||
@ -92,6 +89,8 @@ pub fn parse_ast_to_type_annotation_kinds<T>(
|
|||||||
Ok(TypeAnnotation::CustomClass { id: obj_id, params: vec![] })
|
Ok(TypeAnnotation::CustomClass { id: obj_id, params: vec![] })
|
||||||
} else if let Ok(ty) = resolver.get_symbol_type(unifier, top_level_defs, primitives, *id) {
|
} else if let Ok(ty) = resolver.get_symbol_type(unifier, top_level_defs, primitives, *id) {
|
||||||
if let TypeEnum::TVar { .. } = unifier.get_ty(ty).as_ref() {
|
if let TypeEnum::TVar { .. } = unifier.get_ty(ty).as_ref() {
|
||||||
|
let var = unifier.get_fresh_var(Some(*id), Some(expr.location)).0;
|
||||||
|
unifier.unify(var, ty).unwrap();
|
||||||
Ok(TypeAnnotation::TypeVar(ty))
|
Ok(TypeAnnotation::TypeVar(ty))
|
||||||
} else {
|
} else {
|
||||||
Err(format!(
|
Err(format!(
|
||||||
@ -113,8 +112,7 @@ pub fn parse_ast_to_type_annotation_kinds<T>(
|
|||||||
return Err(format!("keywords cannot be class name (at {})", expr.location));
|
return Err(format!("keywords cannot be class name (at {})", expr.location));
|
||||||
}
|
}
|
||||||
let obj_id = resolver
|
let obj_id = resolver
|
||||||
.get_identifier_def(*id)
|
.get_identifier_def(*id)?;
|
||||||
.ok_or_else(|| "unknown class name".to_string())?;
|
|
||||||
let type_vars = {
|
let type_vars = {
|
||||||
let def_read = top_level_defs[obj_id.0].try_read();
|
let def_read = top_level_defs[obj_id.0].try_read();
|
||||||
if let Some(def_read) = def_read {
|
if let Some(def_read) = def_read {
|
||||||
@ -293,14 +291,14 @@ pub fn get_type_from_type_annotation_kinds(
|
|||||||
// TODO: if allow type var to be applied(now this disallowed in the parse_to_type_annotation), need more check
|
// TODO: if allow type var to be applied(now this disallowed in the parse_to_type_annotation), need more check
|
||||||
let mut result: HashMap<u32, Type> = HashMap::new();
|
let mut result: HashMap<u32, Type> = HashMap::new();
|
||||||
for (tvar, p) in type_vars.iter().zip(param_ty) {
|
for (tvar, p) in type_vars.iter().zip(param_ty) {
|
||||||
if let TypeEnum::TVar { id, range, meta: TypeVarMeta::Generic } =
|
if let TypeEnum::TVar { id, range, fields: None, name, loc } =
|
||||||
unifier.get_ty(*tvar).as_ref()
|
unifier.get_ty(*tvar).as_ref()
|
||||||
{
|
{
|
||||||
let ok: bool = {
|
let ok: bool = {
|
||||||
// create a temp type var and unify to check compatibility
|
// create a temp type var and unify to check compatibility
|
||||||
p == *tvar || {
|
p == *tvar || {
|
||||||
let temp =
|
let temp =
|
||||||
unifier.get_fresh_var_with_range(range.borrow().as_slice());
|
unifier.get_fresh_var_with_range(range.as_slice(), *name, *loc);
|
||||||
unifier.unify(temp.0, p).is_ok()
|
unifier.unify(temp.0, p).is_ok()
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -309,10 +307,11 @@ pub fn get_type_from_type_annotation_kinds(
|
|||||||
} else {
|
} else {
|
||||||
return Err(format!(
|
return Err(format!(
|
||||||
"cannot apply type {} to type variable with id {:?}",
|
"cannot apply type {} to type variable with id {:?}",
|
||||||
unifier.stringify(
|
unifier.internal_stringify(
|
||||||
p,
|
p,
|
||||||
&mut |id| format!("class{}", id),
|
&mut |id| format!("class{}", id),
|
||||||
&mut |id| format!("tvar{}", id)
|
&mut |id| format!("tvar{}", id),
|
||||||
|
&mut None
|
||||||
),
|
),
|
||||||
*id
|
*id
|
||||||
));
|
));
|
||||||
@ -338,7 +337,7 @@ pub fn get_type_from_type_annotation_kinds(
|
|||||||
|
|
||||||
Ok(unifier.add_ty(TypeEnum::TObj {
|
Ok(unifier.add_ty(TypeEnum::TObj {
|
||||||
obj_id: *obj_id,
|
obj_id: *obj_id,
|
||||||
fields: RefCell::new(tobj_fields),
|
fields: tobj_fields,
|
||||||
params: subst.into(),
|
params: subst.into(),
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
@ -438,8 +437,8 @@ pub fn check_overload_type_annotation_compatible(
|
|||||||
let b = unifier.get_ty(*b);
|
let b = unifier.get_ty(*b);
|
||||||
let b = b.deref();
|
let b = b.deref();
|
||||||
if let (
|
if let (
|
||||||
TypeEnum::TVar { id: a, meta: TypeVarMeta::Generic, .. },
|
TypeEnum::TVar { id: a, fields: None, .. },
|
||||||
TypeEnum::TVar { id: b, meta: TypeVarMeta::Generic, .. },
|
TypeEnum::TVar { id: b, fields: None, .. },
|
||||||
) = (a, b)
|
) = (a, b)
|
||||||
{
|
{
|
||||||
a == b
|
a == b
|
||||||
|
@ -2,10 +2,10 @@ use crate::typecheck::{
|
|||||||
type_inferencer::*,
|
type_inferencer::*,
|
||||||
typedef::{FunSignature, FuncArg, Type, TypeEnum, Unifier},
|
typedef::{FunSignature, FuncArg, Type, TypeEnum, Unifier},
|
||||||
};
|
};
|
||||||
use nac3parser::ast;
|
use nac3parser::ast::{self, StrRef};
|
||||||
use nac3parser::ast::{Cmpop, Operator, Unaryop};
|
use nac3parser::ast::{Cmpop, Operator, Unaryop};
|
||||||
use std::borrow::Borrow;
|
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
use std::rc::Rc;
|
||||||
|
|
||||||
pub fn binop_name(op: &Operator) -> &'static str {
|
pub fn binop_name(op: &Operator) -> &'static str {
|
||||||
match op {
|
match op {
|
||||||
@ -64,6 +64,25 @@ pub fn comparison_name(op: &Cmpop) -> Option<&'static str> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(super) fn with_fields<F>(unifier: &mut Unifier, ty: Type, f: F)
|
||||||
|
where F: FnOnce(&mut Unifier, &mut HashMap<StrRef, (Type, bool)>)
|
||||||
|
{
|
||||||
|
let (id, mut fields, params) = if let TypeEnum::TObj { obj_id, fields, params } = &*unifier.get_ty(ty) {
|
||||||
|
(*obj_id, fields.clone(), params.clone())
|
||||||
|
} else {
|
||||||
|
unreachable!()
|
||||||
|
};
|
||||||
|
f(unifier, &mut fields);
|
||||||
|
unsafe {
|
||||||
|
let unification_table = unifier.get_unification_table();
|
||||||
|
unification_table.set_value(ty, Rc::new(TypeEnum::TObj {
|
||||||
|
obj_id: id,
|
||||||
|
fields,
|
||||||
|
params,
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn impl_binop(
|
pub fn impl_binop(
|
||||||
unifier: &mut Unifier,
|
unifier: &mut Unifier,
|
||||||
store: &PrimitiveStore,
|
store: &PrimitiveStore,
|
||||||
@ -72,11 +91,11 @@ pub fn impl_binop(
|
|||||||
ret_ty: Type,
|
ret_ty: Type,
|
||||||
ops: &[ast::Operator],
|
ops: &[ast::Operator],
|
||||||
) {
|
) {
|
||||||
if let TypeEnum::TObj { fields, .. } = unifier.get_ty(ty).borrow() {
|
with_fields(unifier, ty, |unifier, fields| {
|
||||||
let (other_ty, other_var_id) = if other_ty.len() == 1 {
|
let (other_ty, other_var_id) = if other_ty.len() == 1 {
|
||||||
(other_ty[0], None)
|
(other_ty[0], None)
|
||||||
} else {
|
} else {
|
||||||
let (ty, var_id) = unifier.get_fresh_var_with_range(other_ty);
|
let (ty, var_id) = unifier.get_fresh_var_with_range(other_ty, Some("N".into()), None);
|
||||||
(ty, Some(var_id))
|
(ty, Some(var_id))
|
||||||
};
|
};
|
||||||
let function_vars = if let Some(var_id) = other_var_id {
|
let function_vars = if let Some(var_id) = other_var_id {
|
||||||
@ -85,7 +104,7 @@ pub fn impl_binop(
|
|||||||
HashMap::new()
|
HashMap::new()
|
||||||
};
|
};
|
||||||
for op in ops {
|
for op in ops {
|
||||||
fields.borrow_mut().insert(binop_name(op).into(), {
|
fields.insert(binop_name(op).into(), {
|
||||||
(
|
(
|
||||||
unifier.add_ty(TypeEnum::TFunc(
|
unifier.add_ty(TypeEnum::TFunc(
|
||||||
FunSignature {
|
FunSignature {
|
||||||
@ -97,13 +116,12 @@ pub fn impl_binop(
|
|||||||
name: "other".into(),
|
name: "other".into(),
|
||||||
}],
|
}],
|
||||||
}
|
}
|
||||||
.into(),
|
|
||||||
)),
|
)),
|
||||||
false,
|
false,
|
||||||
)
|
)
|
||||||
});
|
});
|
||||||
|
|
||||||
fields.borrow_mut().insert(binop_assign_name(op).into(), {
|
fields.insert(binop_assign_name(op).into(), {
|
||||||
(
|
(
|
||||||
unifier.add_ty(TypeEnum::TFunc(
|
unifier.add_ty(TypeEnum::TFunc(
|
||||||
FunSignature {
|
FunSignature {
|
||||||
@ -115,39 +133,33 @@ pub fn impl_binop(
|
|||||||
name: "other".into(),
|
name: "other".into(),
|
||||||
}],
|
}],
|
||||||
}
|
}
|
||||||
.into(),
|
|
||||||
)),
|
)),
|
||||||
false,
|
false,
|
||||||
)
|
)
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
} else {
|
});
|
||||||
unreachable!("")
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn impl_unaryop(
|
pub fn impl_unaryop(
|
||||||
unifier: &mut Unifier,
|
unifier: &mut Unifier,
|
||||||
_store: &PrimitiveStore,
|
|
||||||
ty: Type,
|
ty: Type,
|
||||||
ret_ty: Type,
|
ret_ty: Type,
|
||||||
ops: &[ast::Unaryop],
|
ops: &[ast::Unaryop],
|
||||||
) {
|
) {
|
||||||
if let TypeEnum::TObj { fields, .. } = unifier.get_ty(ty).borrow() {
|
with_fields(unifier, ty, |unifier, fields| {
|
||||||
for op in ops {
|
for op in ops {
|
||||||
fields.borrow_mut().insert(
|
fields.insert(
|
||||||
unaryop_name(op).into(),
|
unaryop_name(op).into(),
|
||||||
(
|
(
|
||||||
unifier.add_ty(TypeEnum::TFunc(
|
unifier.add_ty(TypeEnum::TFunc(
|
||||||
FunSignature { ret: ret_ty, vars: HashMap::new(), args: vec![] }.into(),
|
FunSignature { ret: ret_ty, vars: HashMap::new(), args: vec![] }
|
||||||
)),
|
)),
|
||||||
false,
|
false,
|
||||||
),
|
),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
} else {
|
});
|
||||||
unreachable!()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn impl_cmpop(
|
pub fn impl_cmpop(
|
||||||
@ -157,9 +169,9 @@ pub fn impl_cmpop(
|
|||||||
other_ty: Type,
|
other_ty: Type,
|
||||||
ops: &[ast::Cmpop],
|
ops: &[ast::Cmpop],
|
||||||
) {
|
) {
|
||||||
if let TypeEnum::TObj { fields, .. } = unifier.get_ty(ty).borrow() {
|
with_fields(unifier, ty, |unifier, fields| {
|
||||||
for op in ops {
|
for op in ops {
|
||||||
fields.borrow_mut().insert(
|
fields.insert(
|
||||||
comparison_name(op).unwrap().into(),
|
comparison_name(op).unwrap().into(),
|
||||||
(
|
(
|
||||||
unifier.add_ty(TypeEnum::TFunc(
|
unifier.add_ty(TypeEnum::TFunc(
|
||||||
@ -172,15 +184,12 @@ pub fn impl_cmpop(
|
|||||||
name: "other".into(),
|
name: "other".into(),
|
||||||
}],
|
}],
|
||||||
}
|
}
|
||||||
.into(),
|
|
||||||
)),
|
)),
|
||||||
false,
|
false,
|
||||||
),
|
),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
} else {
|
});
|
||||||
unreachable!()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Add, Sub, Mult
|
/// Add, Sub, Mult
|
||||||
@ -257,18 +266,18 @@ pub fn impl_mod(
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// UAdd, USub
|
/// UAdd, USub
|
||||||
pub fn impl_sign(unifier: &mut Unifier, store: &PrimitiveStore, ty: Type) {
|
pub fn impl_sign(unifier: &mut Unifier, _store: &PrimitiveStore, ty: Type) {
|
||||||
impl_unaryop(unifier, store, ty, ty, &[ast::Unaryop::UAdd, ast::Unaryop::USub])
|
impl_unaryop(unifier, ty, ty, &[ast::Unaryop::UAdd, ast::Unaryop::USub])
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Invert
|
/// Invert
|
||||||
pub fn impl_invert(unifier: &mut Unifier, store: &PrimitiveStore, ty: Type) {
|
pub fn impl_invert(unifier: &mut Unifier, _store: &PrimitiveStore, ty: Type) {
|
||||||
impl_unaryop(unifier, store, ty, ty, &[ast::Unaryop::Invert])
|
impl_unaryop(unifier, ty, ty, &[ast::Unaryop::Invert])
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Not
|
/// Not
|
||||||
pub fn impl_not(unifier: &mut Unifier, store: &PrimitiveStore, ty: Type) {
|
pub fn impl_not(unifier: &mut Unifier, store: &PrimitiveStore, ty: Type) {
|
||||||
impl_unaryop(unifier, store, ty, store.bool, &[ast::Unaryop::Not])
|
impl_unaryop(unifier, ty, store.bool, &[ast::Unaryop::Not])
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Lt, LtE, Gt, GtE
|
/// Lt, LtE, Gt, GtE
|
||||||
|
@ -2,4 +2,5 @@ mod function_check;
|
|||||||
pub mod magic_methods;
|
pub mod magic_methods;
|
||||||
pub mod type_inferencer;
|
pub mod type_inferencer;
|
||||||
pub mod typedef;
|
pub mod typedef;
|
||||||
|
pub mod type_error;
|
||||||
mod unification_table;
|
mod unification_table;
|
||||||
|
177
nac3core/src/typecheck/type_error.rs
Normal file
177
nac3core/src/typecheck/type_error.rs
Normal file
@ -0,0 +1,177 @@
|
|||||||
|
use std::fmt::Display;
|
||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
use crate::typecheck::typedef::TypeEnum;
|
||||||
|
|
||||||
|
use super::typedef::{Type, Unifier, RecordKey};
|
||||||
|
use nac3parser::ast::{Location, StrRef};
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub enum TypeErrorKind {
|
||||||
|
TooManyArguments {
|
||||||
|
expected: usize,
|
||||||
|
got: usize,
|
||||||
|
},
|
||||||
|
MissingArgs(String),
|
||||||
|
UnknownArgName(StrRef),
|
||||||
|
IncorrectArgType {
|
||||||
|
name: StrRef,
|
||||||
|
expected: Type,
|
||||||
|
got: Type,
|
||||||
|
},
|
||||||
|
FieldUnificationError {
|
||||||
|
field: RecordKey,
|
||||||
|
types: (Type, Type),
|
||||||
|
loc: (Option<Location>, Option<Location>),
|
||||||
|
},
|
||||||
|
IncompatibleRange(Type, Vec<Type>),
|
||||||
|
IncompatibleTypes(Type, Type),
|
||||||
|
MutationError(RecordKey, Type),
|
||||||
|
NoSuchField(RecordKey, Type),
|
||||||
|
TupleIndexOutOfBounds {
|
||||||
|
index: i32,
|
||||||
|
len: i32,
|
||||||
|
},
|
||||||
|
RequiresTypeAnn,
|
||||||
|
PolymorphicFunctionPointer,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct TypeError {
|
||||||
|
pub kind: TypeErrorKind,
|
||||||
|
pub loc: Option<Location>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TypeError {
|
||||||
|
pub fn new(kind: TypeErrorKind, loc: Option<Location>) -> TypeError {
|
||||||
|
TypeError { kind, loc }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn at(mut self, loc: Option<Location>) -> TypeError {
|
||||||
|
self.loc = self.loc.or(loc);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn to_display(self, unifier: &Unifier) -> DisplayTypeError {
|
||||||
|
DisplayTypeError {
|
||||||
|
err: self,
|
||||||
|
unifier
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct DisplayTypeError<'a> {
|
||||||
|
pub err: TypeError,
|
||||||
|
pub unifier: &'a Unifier
|
||||||
|
}
|
||||||
|
|
||||||
|
fn loc_to_str(loc: Option<Location>) -> String {
|
||||||
|
match loc {
|
||||||
|
Some(loc) => format!("(in {})", loc),
|
||||||
|
None => "".to_string(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> Display for DisplayTypeError<'a> {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||||
|
use TypeErrorKind::*;
|
||||||
|
let mut notes = Some(HashMap::new());
|
||||||
|
match &self.err.kind {
|
||||||
|
TooManyArguments { expected, got } => {
|
||||||
|
write!(f, "Too many arguments. Expected {} but got {}", expected, got)
|
||||||
|
}
|
||||||
|
MissingArgs(args) => {
|
||||||
|
write!(f, "Missing arguments: {}", args)
|
||||||
|
}
|
||||||
|
UnknownArgName(name) => {
|
||||||
|
write!(f, "Unknown argument name: {}", name)
|
||||||
|
}
|
||||||
|
IncorrectArgType {
|
||||||
|
name,
|
||||||
|
expected,
|
||||||
|
got,
|
||||||
|
} => {
|
||||||
|
let expected = self.unifier.stringify_with_notes(*expected, &mut notes);
|
||||||
|
let got = self.unifier.stringify_with_notes(*got, &mut notes);
|
||||||
|
write!(
|
||||||
|
f,
|
||||||
|
"Incorrect argument type for {}. Expected {}, but got {}",
|
||||||
|
name, expected, got
|
||||||
|
)
|
||||||
|
},
|
||||||
|
FieldUnificationError { field, types, loc } => {
|
||||||
|
let lhs = self.unifier.stringify_with_notes(types.0, &mut notes);
|
||||||
|
let rhs = self.unifier.stringify_with_notes(types.1, &mut notes);
|
||||||
|
write!(
|
||||||
|
f,
|
||||||
|
"Unable to unify field {}: Got types {}{} and {}{}",
|
||||||
|
field, lhs, loc_to_str(loc.0), rhs, loc_to_str(loc.1)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
IncompatibleRange(t, ts) => {
|
||||||
|
let t = self.unifier.stringify_with_notes(*t, &mut notes);
|
||||||
|
let ts = ts.iter().map(|t| self.unifier.stringify_with_notes(*t, &mut notes)).collect::<Vec<_>>();
|
||||||
|
write!(f, "Expected any one of these types: {}, but got {}", ts.join(", "), t)
|
||||||
|
}
|
||||||
|
IncompatibleTypes(t1, t2) => {
|
||||||
|
let type1 = self.unifier.get_ty_immutable(*t1);
|
||||||
|
let type2 = self.unifier.get_ty_immutable(*t2);
|
||||||
|
match (&*type1, &*type2) {
|
||||||
|
(TypeEnum::TCall(calls), _) => {
|
||||||
|
let loc = self.unifier.calls[calls[0].0].loc;
|
||||||
|
let result = write!(f, "{} is not callable", self.unifier.stringify_with_notes(*t2, &mut notes));
|
||||||
|
if let Some(loc) = loc {
|
||||||
|
result?;
|
||||||
|
write!(f, " (in {})", loc)?;
|
||||||
|
return Ok(())
|
||||||
|
}
|
||||||
|
result
|
||||||
|
}
|
||||||
|
(TypeEnum::TTuple { ty: ty1 }, TypeEnum::TTuple { ty: ty2 }) if ty1.len() != ty2.len() => {
|
||||||
|
let t1 = self.unifier.stringify_with_notes(*t1, &mut notes);
|
||||||
|
let t2 = self.unifier.stringify_with_notes(*t2, &mut notes);
|
||||||
|
write!(f, "Tuple length mismatch: got {} and {}", t1, t2)
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
let t1 = self.unifier.stringify_with_notes(*t1, &mut notes);
|
||||||
|
let t2 = self.unifier.stringify_with_notes(*t2, &mut notes);
|
||||||
|
write!(f, "Incompatible types: {} and {}", t1, t2)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
MutationError(name, t) => {
|
||||||
|
if let TypeEnum::TTuple { .. } = &*self.unifier.get_ty_immutable(*t) {
|
||||||
|
write!(f, "Cannot assign to an element of a tuple")
|
||||||
|
} else {
|
||||||
|
let t = self.unifier.stringify_with_notes(*t, &mut notes);
|
||||||
|
write!(f, "Cannot assign to field {} of {}, which is immutable", name, t)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
NoSuchField(name, t) => {
|
||||||
|
let t = self.unifier.stringify_with_notes(*t, &mut notes);
|
||||||
|
write!(f, "`{}::{}` field does not exist", t, name)
|
||||||
|
}
|
||||||
|
TupleIndexOutOfBounds { index, len } => {
|
||||||
|
write!(f, "Tuple index out of bounds. Got {} but tuple has only {} elements", index, len)
|
||||||
|
}
|
||||||
|
RequiresTypeAnn => {
|
||||||
|
write!(f, "Unable to infer virtual object type: Type annotation required")
|
||||||
|
}
|
||||||
|
PolymorphicFunctionPointer => {
|
||||||
|
write!(f, "Polymorphic function pointers is not supported")
|
||||||
|
}
|
||||||
|
}?;
|
||||||
|
if let Some(loc) = self.err.loc {
|
||||||
|
write!(f, " at {}", loc)?;
|
||||||
|
}
|
||||||
|
let notes = notes.unwrap();
|
||||||
|
if !notes.is_empty() {
|
||||||
|
write!(f, "\n\nNotes:")?;
|
||||||
|
for line in notes.values() {
|
||||||
|
write!(f, "\n {}", line)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -3,7 +3,7 @@ use std::convert::{From, TryInto};
|
|||||||
use std::iter::once;
|
use std::iter::once;
|
||||||
use std::{cell::RefCell, sync::Arc};
|
use std::{cell::RefCell, sync::Arc};
|
||||||
|
|
||||||
use super::typedef::{Call, FunSignature, FuncArg, Type, TypeEnum, Unifier};
|
use super::typedef::{Call, FunSignature, FuncArg, Type, TypeEnum, Unifier, RecordField};
|
||||||
use super::{magic_methods::*, typedef::CallId};
|
use super::{magic_methods::*, typedef::CallId};
|
||||||
use crate::{symbol_resolver::SymbolResolver, toplevel::TopLevelContext};
|
use crate::{symbol_resolver::SymbolResolver, toplevel::TopLevelContext};
|
||||||
use itertools::izip;
|
use itertools::izip;
|
||||||
@ -147,7 +147,9 @@ impl<'a> fold::Fold<()> for Inferencer<'a> {
|
|||||||
self.defined_identifiers.insert(name);
|
self.defined_identifiers.insert(name);
|
||||||
}
|
}
|
||||||
if let Some(old_typ) = self.variable_mapping.insert(name, typ) {
|
if let Some(old_typ) = self.variable_mapping.insert(name, typ) {
|
||||||
self.unifier.unify(old_typ, typ)?;
|
let loc = handler.location;
|
||||||
|
self.unifier.unify(old_typ, typ).map_err(|e| e.at(Some(loc))
|
||||||
|
.to_display(self.unifier).to_string())?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let mut type_ = naive_folder.fold_expr(*type_)?;
|
let mut type_ = naive_folder.fold_expr(*type_)?;
|
||||||
@ -249,7 +251,8 @@ impl<'a> fold::Fold<()> for Inferencer<'a> {
|
|||||||
}
|
}
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
let targets = targets?;
|
let loc = node.location;
|
||||||
|
let targets = targets.map_err(|e| e.at(Some(loc)).to_display(self.unifier).to_string())?;
|
||||||
return Ok(Located {
|
return Ok(Located {
|
||||||
location: node.location,
|
location: node.location,
|
||||||
node: ast::StmtKind::Assign {
|
node: ast::StmtKind::Assign {
|
||||||
@ -310,11 +313,9 @@ impl<'a> fold::Fold<()> for Inferencer<'a> {
|
|||||||
// if we can simply unify without creating new types...
|
// if we can simply unify without creating new types...
|
||||||
let mut fast_path = false;
|
let mut fast_path = false;
|
||||||
if let TypeEnum::TObj { fields, .. } = &*self.unifier.get_ty(ty) {
|
if let TypeEnum::TObj { fields, .. } = &*self.unifier.get_ty(ty) {
|
||||||
let fields = fields.borrow();
|
|
||||||
fast_path = true;
|
fast_path = true;
|
||||||
if let Some(enter) = fields.get(&"__enter__".into()).cloned() {
|
if let Some(enter) = fields.get(&"__enter__".into()).cloned() {
|
||||||
if let TypeEnum::TFunc(signature) = &*self.unifier.get_ty(enter.0) {
|
if let TypeEnum::TFunc(signature) = &*self.unifier.get_ty(enter.0) {
|
||||||
let signature = signature.borrow();
|
|
||||||
if !signature.args.is_empty() {
|
if !signature.args.is_empty() {
|
||||||
return report_error(
|
return report_error(
|
||||||
"__enter__ method should take no argument other than self",
|
"__enter__ method should take no argument other than self",
|
||||||
@ -343,7 +344,6 @@ impl<'a> fold::Fold<()> for Inferencer<'a> {
|
|||||||
}
|
}
|
||||||
if let Some(exit) = fields.get(&"__exit__".into()).cloned() {
|
if let Some(exit) = fields.get(&"__exit__".into()).cloned() {
|
||||||
if let TypeEnum::TFunc(signature) = &*self.unifier.get_ty(exit.0) {
|
if let TypeEnum::TFunc(signature) = &*self.unifier.get_ty(exit.0) {
|
||||||
let signature = signature.borrow();
|
|
||||||
if !signature.args.is_empty() {
|
if !signature.args.is_empty() {
|
||||||
return report_error(
|
return report_error(
|
||||||
"__exit__ method should take no argument other than self",
|
"__exit__ method should take no argument other than self",
|
||||||
@ -361,24 +361,24 @@ impl<'a> fold::Fold<()> for Inferencer<'a> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
if !fast_path {
|
if !fast_path {
|
||||||
let enter = TypeEnum::TFunc(RefCell::new(FunSignature {
|
let enter = TypeEnum::TFunc(FunSignature {
|
||||||
args: vec![],
|
args: vec![],
|
||||||
ret: item.optional_vars.as_ref().map_or_else(
|
ret: item.optional_vars.as_ref().map_or_else(
|
||||||
|| self.unifier.get_fresh_var().0,
|
|| self.unifier.get_dummy_var().0,
|
||||||
|var| var.custom.unwrap(),
|
|var| var.custom.unwrap(),
|
||||||
),
|
),
|
||||||
vars: Default::default(),
|
vars: Default::default(),
|
||||||
}));
|
});
|
||||||
let enter = self.unifier.add_ty(enter);
|
let enter = self.unifier.add_ty(enter);
|
||||||
let exit = TypeEnum::TFunc(RefCell::new(FunSignature {
|
let exit = TypeEnum::TFunc(FunSignature {
|
||||||
args: vec![],
|
args: vec![],
|
||||||
ret: self.unifier.get_fresh_var().0,
|
ret: self.unifier.get_dummy_var().0,
|
||||||
vars: Default::default(),
|
vars: Default::default(),
|
||||||
}));
|
});
|
||||||
let exit = self.unifier.add_ty(exit);
|
let exit = self.unifier.add_ty(exit);
|
||||||
let mut fields = HashMap::new();
|
let mut fields = HashMap::new();
|
||||||
fields.insert("__enter__".into(), (enter, false));
|
fields.insert("__enter__".into(), RecordField::new(enter, false, None));
|
||||||
fields.insert("__exit__".into(), (exit, false));
|
fields.insert("__exit__".into(), RecordField::new(exit, false, None));
|
||||||
let record = self.unifier.add_record(fields);
|
let record = self.unifier.add_record(fields);
|
||||||
self.unify(ty, record, &stmt.location)?;
|
self.unify(ty, record, &stmt.location)?;
|
||||||
}
|
}
|
||||||
@ -455,8 +455,8 @@ impl<'a> fold::Fold<()> for Inferencer<'a> {
|
|||||||
ast::ExprKind::Compare { left, ops, comparators } => {
|
ast::ExprKind::Compare { left, ops, comparators } => {
|
||||||
Some(self.infer_compare(left, ops, comparators)?)
|
Some(self.infer_compare(left, ops, comparators)?)
|
||||||
}
|
}
|
||||||
ast::ExprKind::Subscript { value, slice, .. } => {
|
ast::ExprKind::Subscript { value, slice, ctx, .. } => {
|
||||||
Some(self.infer_subscript(value.as_ref(), slice.as_ref())?)
|
Some(self.infer_subscript(value.as_ref(), slice.as_ref(), ctx)?)
|
||||||
}
|
}
|
||||||
ast::ExprKind::IfExp { test, body, orelse } => {
|
ast::ExprKind::IfExp { test, body, orelse } => {
|
||||||
Some(self.infer_if_expr(test, body.as_ref(), orelse.as_ref())?)
|
Some(self.infer_if_expr(test, body.as_ref(), orelse.as_ref())?)
|
||||||
@ -477,11 +477,11 @@ impl<'a> Inferencer<'a> {
|
|||||||
/// Constrain a <: b
|
/// Constrain a <: b
|
||||||
/// Currently implemented as unification
|
/// Currently implemented as unification
|
||||||
fn constrain(&mut self, a: Type, b: Type, location: &Location) -> Result<(), String> {
|
fn constrain(&mut self, a: Type, b: Type, location: &Location) -> Result<(), String> {
|
||||||
self.unifier.unify(a, b).map_err(|old| format!("{} at {}", old, location))
|
self.unify(a, b, location)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn unify(&mut self, a: Type, b: Type, location: &Location) -> Result<(), String> {
|
fn unify(&mut self, a: Type, b: Type, location: &Location) -> Result<(), String> {
|
||||||
self.unifier.unify(a, b).map_err(|old| format!("{} at {}", old, location))
|
self.unifier.unify(a, b).map_err(|e| e.at(Some(*location)).to_display(self.unifier).to_string())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn infer_pattern(&mut self, pattern: &ast::Expr<()>) -> Result<(), String> {
|
fn infer_pattern(&mut self, pattern: &ast::Expr<()>) -> Result<(), String> {
|
||||||
@ -511,17 +511,17 @@ impl<'a> Inferencer<'a> {
|
|||||||
ret: Option<Type>,
|
ret: Option<Type>,
|
||||||
) -> InferenceResult {
|
) -> InferenceResult {
|
||||||
if let TypeEnum::TObj { params: class_params, fields, .. } = &*self.unifier.get_ty(obj) {
|
if let TypeEnum::TObj { params: class_params, fields, .. } = &*self.unifier.get_ty(obj) {
|
||||||
if class_params.borrow().is_empty() {
|
if class_params.is_empty() {
|
||||||
if let Some(ty) = fields.borrow().get(&method) {
|
if let Some(ty) = fields.get(&method) {
|
||||||
let ty = ty.0;
|
let ty = ty.0;
|
||||||
if let TypeEnum::TFunc(sign) = &*self.unifier.get_ty(ty) {
|
if let TypeEnum::TFunc(sign) = &*self.unifier.get_ty(ty) {
|
||||||
let sign = sign.borrow();
|
|
||||||
if sign.vars.is_empty() {
|
if sign.vars.is_empty() {
|
||||||
let call = Call {
|
let call = Call {
|
||||||
posargs: params,
|
posargs: params,
|
||||||
kwargs: HashMap::new(),
|
kwargs: HashMap::new(),
|
||||||
ret: sign.ret,
|
ret: sign.ret,
|
||||||
fun: RefCell::new(None),
|
fun: RefCell::new(None),
|
||||||
|
loc: Some(location),
|
||||||
};
|
};
|
||||||
if let Some(ret) = ret {
|
if let Some(ret) = ret {
|
||||||
self.unifier.unify(sign.ret, ret).unwrap();
|
self.unifier.unify(sign.ret, ret).unwrap();
|
||||||
@ -534,25 +534,26 @@ impl<'a> Inferencer<'a> {
|
|||||||
.rev()
|
.rev()
|
||||||
.collect();
|
.collect();
|
||||||
self.unifier
|
self.unifier
|
||||||
.unify_call(&call, ty, &sign, &required)
|
.unify_call(&call, ty, sign, &required)
|
||||||
.map_err(|old| format!("{} at {}", old, location))?;
|
.map_err(|e| e.at(Some(location)).to_display(self.unifier).to_string())?;
|
||||||
return Ok(sign.ret);
|
return Ok(sign.ret);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let ret = ret.unwrap_or_else(|| self.unifier.get_fresh_var().0);
|
let ret = ret.unwrap_or_else(|| self.unifier.get_dummy_var().0);
|
||||||
|
|
||||||
let call = self.unifier.add_call(Call {
|
let call = self.unifier.add_call(Call {
|
||||||
posargs: params,
|
posargs: params,
|
||||||
kwargs: HashMap::new(),
|
kwargs: HashMap::new(),
|
||||||
ret,
|
ret,
|
||||||
fun: RefCell::new(None),
|
fun: RefCell::new(None),
|
||||||
|
loc: Some(location),
|
||||||
});
|
});
|
||||||
self.calls.insert(location.into(), call);
|
self.calls.insert(location.into(), call);
|
||||||
let call = self.unifier.add_ty(TypeEnum::TCall(vec![call].into()));
|
let call = self.unifier.add_ty(TypeEnum::TCall(vec![call]));
|
||||||
let fields = once((method, (call, false))).collect();
|
let fields = once((method.into(), RecordField::new(call, false, Some(location)))).collect();
|
||||||
let record = self.unifier.add_record(fields);
|
let record = self.unifier.add_record(fields);
|
||||||
self.constrain(obj, record, &location)?;
|
self.constrain(obj, record, &location)?;
|
||||||
Ok(ret)
|
Ok(ret)
|
||||||
@ -585,10 +586,10 @@ impl<'a> Inferencer<'a> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
let fn_args: Vec<_> =
|
let fn_args: Vec<_> =
|
||||||
args.args.iter().map(|v| (v.node.arg, self.unifier.get_fresh_var().0)).collect();
|
args.args.iter().map(|v| (v.node.arg, self.unifier.get_fresh_var(Some(v.node.arg), Some(v.location)).0)).collect();
|
||||||
let mut variable_mapping = self.variable_mapping.clone();
|
let mut variable_mapping = self.variable_mapping.clone();
|
||||||
variable_mapping.extend(fn_args.iter().cloned());
|
variable_mapping.extend(fn_args.iter().cloned());
|
||||||
let ret = self.unifier.get_fresh_var().0;
|
let ret = self.unifier.get_dummy_var().0;
|
||||||
|
|
||||||
let mut new_context = Inferencer {
|
let mut new_context = Inferencer {
|
||||||
function_data: self.function_data,
|
function_data: self.function_data,
|
||||||
@ -620,7 +621,7 @@ impl<'a> Inferencer<'a> {
|
|||||||
Ok(Located {
|
Ok(Located {
|
||||||
location,
|
location,
|
||||||
node: ExprKind::Lambda { args: args.into(), body: body.into() },
|
node: ExprKind::Lambda { args: args.into(), body: body.into() },
|
||||||
custom: Some(self.unifier.add_ty(TypeEnum::TFunc(fun.into()))),
|
custom: Some(self.unifier.add_ty(TypeEnum::TFunc(fun))),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -725,7 +726,7 @@ impl<'a> Inferencer<'a> {
|
|||||||
&arg,
|
&arg,
|
||||||
)?
|
)?
|
||||||
} else {
|
} else {
|
||||||
self.unifier.get_fresh_var().0
|
self.unifier.get_dummy_var().0
|
||||||
};
|
};
|
||||||
self.virtual_checks.push((arg0.custom.unwrap(), ty, func_location));
|
self.virtual_checks.push((arg0.custom.unwrap(), ty, func_location));
|
||||||
let custom = Some(self.unifier.add_ty(TypeEnum::TVirtual { ty }));
|
let custom = Some(self.unifier.add_ty(TypeEnum::TVirtual { ty }));
|
||||||
@ -774,7 +775,6 @@ impl<'a> Inferencer<'a> {
|
|||||||
.collect::<Result<Vec<_>, _>>()?;
|
.collect::<Result<Vec<_>, _>>()?;
|
||||||
|
|
||||||
if let TypeEnum::TFunc(sign) = &*self.unifier.get_ty(func.custom.unwrap()) {
|
if let TypeEnum::TFunc(sign) = &*self.unifier.get_ty(func.custom.unwrap()) {
|
||||||
let sign = sign.borrow();
|
|
||||||
if sign.vars.is_empty() {
|
if sign.vars.is_empty() {
|
||||||
let call = Call {
|
let call = Call {
|
||||||
posargs: args.iter().map(|v| v.custom.unwrap()).collect(),
|
posargs: args.iter().map(|v| v.custom.unwrap()).collect(),
|
||||||
@ -784,6 +784,7 @@ impl<'a> Inferencer<'a> {
|
|||||||
.collect(),
|
.collect(),
|
||||||
fun: RefCell::new(None),
|
fun: RefCell::new(None),
|
||||||
ret: sign.ret,
|
ret: sign.ret,
|
||||||
|
loc: Some(location)
|
||||||
};
|
};
|
||||||
let required: Vec<_> = sign
|
let required: Vec<_> = sign
|
||||||
.args
|
.args
|
||||||
@ -793,8 +794,8 @@ impl<'a> Inferencer<'a> {
|
|||||||
.rev()
|
.rev()
|
||||||
.collect();
|
.collect();
|
||||||
self.unifier
|
self.unifier
|
||||||
.unify_call(&call, func.custom.unwrap(), &sign, &required)
|
.unify_call(&call, func.custom.unwrap(), sign, &required)
|
||||||
.map_err(|old| format!("{} at {}", old, location))?;
|
.map_err(|e| e.at(Some(location)).to_display(self.unifier).to_string())?;
|
||||||
return Ok(Located {
|
return Ok(Located {
|
||||||
location,
|
location,
|
||||||
custom: Some(sign.ret),
|
custom: Some(sign.ret),
|
||||||
@ -803,7 +804,7 @@ impl<'a> Inferencer<'a> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let ret = self.unifier.get_fresh_var().0;
|
let ret = self.unifier.get_dummy_var().0;
|
||||||
let call = self.unifier.add_call(Call {
|
let call = self.unifier.add_call(Call {
|
||||||
posargs: args.iter().map(|v| v.custom.unwrap()).collect(),
|
posargs: args.iter().map(|v| v.custom.unwrap()).collect(),
|
||||||
kwargs: keywords
|
kwargs: keywords
|
||||||
@ -812,9 +813,10 @@ impl<'a> Inferencer<'a> {
|
|||||||
.collect(),
|
.collect(),
|
||||||
fun: RefCell::new(None),
|
fun: RefCell::new(None),
|
||||||
ret,
|
ret,
|
||||||
|
loc: Some(location)
|
||||||
});
|
});
|
||||||
self.calls.insert(location.into(), call);
|
self.calls.insert(location.into(), call);
|
||||||
let call = self.unifier.add_ty(TypeEnum::TCall(vec![call].into()));
|
let call = self.unifier.add_ty(TypeEnum::TCall(vec![call]));
|
||||||
self.unify(func.custom.unwrap(), call, &func.location)?;
|
self.unify(func.custom.unwrap(), call, &func.location)?;
|
||||||
|
|
||||||
Ok(Located { location, custom: Some(ret), node: ExprKind::Call { func, args, keywords } })
|
Ok(Located { location, custom: Some(ret), node: ExprKind::Call { func, args, keywords } })
|
||||||
@ -831,7 +833,7 @@ impl<'a> Inferencer<'a> {
|
|||||||
.resolver
|
.resolver
|
||||||
.get_symbol_type(unifier, &self.top_level.definitions.read(), self.primitives, id)
|
.get_symbol_type(unifier, &self.top_level.definitions.read(), self.primitives, id)
|
||||||
.unwrap_or_else(|_| {
|
.unwrap_or_else(|_| {
|
||||||
let ty = unifier.get_fresh_var().0;
|
let ty = unifier.get_dummy_var().0;
|
||||||
variable_mapping.insert(id, ty);
|
variable_mapping.insert(id, ty);
|
||||||
ty
|
ty
|
||||||
}))
|
}))
|
||||||
@ -867,7 +869,7 @@ impl<'a> Inferencer<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn infer_list(&mut self, elts: &[ast::Expr<Option<Type>>]) -> InferenceResult {
|
fn infer_list(&mut self, elts: &[ast::Expr<Option<Type>>]) -> InferenceResult {
|
||||||
let (ty, _) = self.unifier.get_fresh_var();
|
let ty = self.unifier.get_dummy_var().0;
|
||||||
for t in elts.iter() {
|
for t in elts.iter() {
|
||||||
self.unify(ty, t.custom.unwrap(), &t.location)?;
|
self.unify(ty, t.custom.unwrap(), &t.location)?;
|
||||||
}
|
}
|
||||||
@ -888,7 +890,6 @@ impl<'a> Inferencer<'a> {
|
|||||||
let ty = value.custom.unwrap();
|
let ty = value.custom.unwrap();
|
||||||
if let TypeEnum::TObj { fields, .. } = &*self.unifier.get_ty(ty) {
|
if let TypeEnum::TObj { fields, .. } = &*self.unifier.get_ty(ty) {
|
||||||
// just a fast path
|
// just a fast path
|
||||||
let fields = fields.borrow();
|
|
||||||
match (fields.get(&attr), ctx == &ExprContext::Store) {
|
match (fields.get(&attr), ctx == &ExprContext::Store) {
|
||||||
(Some((ty, true)), _) => Ok(*ty),
|
(Some((ty, true)), _) => Ok(*ty),
|
||||||
(Some((ty, false)), false) => Ok(*ty),
|
(Some((ty, false)), false) => Ok(*ty),
|
||||||
@ -898,8 +899,9 @@ impl<'a> Inferencer<'a> {
|
|||||||
(None, _) => report_error(&format!("No such field {}", attr), value.location),
|
(None, _) => report_error(&format!("No such field {}", attr), value.location),
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
let (attr_ty, _) = self.unifier.get_fresh_var();
|
let attr_ty = self.unifier.get_dummy_var().0;
|
||||||
let fields = once((attr, (attr_ty, ctx == &ExprContext::Store))).collect();
|
let fields = once((attr.into(), RecordField::new(
|
||||||
|
attr_ty, ctx == &ExprContext::Store, Some(value.location)))).collect();
|
||||||
let record = self.unifier.add_record(fields);
|
let record = self.unifier.add_record(fields);
|
||||||
self.constrain(value.custom.unwrap(), record, &value.location)?;
|
self.constrain(value.custom.unwrap(), record, &value.location)?;
|
||||||
Ok(attr_ty)
|
Ok(attr_ty)
|
||||||
@ -965,8 +967,9 @@ impl<'a> Inferencer<'a> {
|
|||||||
&mut self,
|
&mut self,
|
||||||
value: &ast::Expr<Option<Type>>,
|
value: &ast::Expr<Option<Type>>,
|
||||||
slice: &ast::Expr<Option<Type>>,
|
slice: &ast::Expr<Option<Type>>,
|
||||||
|
ctx: &ExprContext,
|
||||||
) -> InferenceResult {
|
) -> InferenceResult {
|
||||||
let ty = self.unifier.get_fresh_var().0;
|
let ty = self.unifier.get_dummy_var().0;
|
||||||
match &slice.node {
|
match &slice.node {
|
||||||
ast::ExprKind::Slice { lower, upper, step } => {
|
ast::ExprKind::Slice { lower, upper, step } => {
|
||||||
for v in [lower.as_ref(), upper.as_ref(), step.as_ref()].iter().flatten() {
|
for v in [lower.as_ref(), upper.as_ref(), step.as_ref()].iter().flatten() {
|
||||||
@ -983,8 +986,9 @@ impl<'a> Inferencer<'a> {
|
|||||||
None => None,
|
None => None,
|
||||||
};
|
};
|
||||||
let ind = ind.ok_or_else(|| "Index must be int32".to_string())?;
|
let ind = ind.ok_or_else(|| "Index must be int32".to_string())?;
|
||||||
let map = once((ind, ty)).collect();
|
let map = once((ind.into(), RecordField::new(
|
||||||
let seq = self.unifier.add_sequence(map);
|
ty, ctx == &ExprContext::Store, Some(value.location)))).collect();
|
||||||
|
let seq = self.unifier.add_record(map);
|
||||||
self.constrain(value.custom.unwrap(), seq, &value.location)?;
|
self.constrain(value.custom.unwrap(), seq, &value.location)?;
|
||||||
Ok(ty)
|
Ok(ty)
|
||||||
}
|
}
|
||||||
@ -1005,9 +1009,7 @@ impl<'a> Inferencer<'a> {
|
|||||||
orelse: &ast::Expr<Option<Type>>,
|
orelse: &ast::Expr<Option<Type>>,
|
||||||
) -> InferenceResult {
|
) -> InferenceResult {
|
||||||
self.constrain(test.custom.unwrap(), self.primitives.bool, &test.location)?;
|
self.constrain(test.custom.unwrap(), self.primitives.bool, &test.location)?;
|
||||||
let ty = self.unifier.get_fresh_var().0;
|
self.constrain(body.custom.unwrap(), orelse.custom.unwrap(), &body.location)?;
|
||||||
self.constrain(body.custom.unwrap(), ty, &body.location)?;
|
Ok(body.custom.unwrap())
|
||||||
self.constrain(orelse.custom.unwrap(), ty, &orelse.location)?;
|
|
||||||
Ok(ty)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
use super::super::typedef::*;
|
use super::super::{typedef::*, magic_methods::with_fields};
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::{
|
use crate::{
|
||||||
codegen::CodeGenContext,
|
codegen::CodeGenContext,
|
||||||
@ -40,8 +40,8 @@ impl SymbolResolver for Resolver {
|
|||||||
unimplemented!()
|
unimplemented!()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_identifier_def(&self, id: StrRef) -> Option<DefinitionId> {
|
fn get_identifier_def(&self, id: StrRef) -> Result<DefinitionId, String> {
|
||||||
self.id_to_def.get(&id).cloned()
|
self.id_to_def.get(&id).cloned().ok_or("Unknown identifier".to_string())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_string_id(&self, _: &str) -> i32 {
|
fn get_string_id(&self, _: &str) -> i32 {
|
||||||
@ -69,7 +69,7 @@ impl TestEnvironment {
|
|||||||
fields: HashMap::new().into(),
|
fields: HashMap::new().into(),
|
||||||
params: HashMap::new().into(),
|
params: HashMap::new().into(),
|
||||||
});
|
});
|
||||||
if let TypeEnum::TObj { fields, .. } = &*unifier.get_ty(int32) {
|
with_fields(&mut unifier, int32, |unifier, fields| {
|
||||||
let add_ty = unifier.add_ty(TypeEnum::TFunc(
|
let add_ty = unifier.add_ty(TypeEnum::TFunc(
|
||||||
FunSignature {
|
FunSignature {
|
||||||
args: vec![FuncArg { name: "other".into(), ty: int32, default_value: None }],
|
args: vec![FuncArg { name: "other".into(), ty: int32, default_value: None }],
|
||||||
@ -78,8 +78,8 @@ impl TestEnvironment {
|
|||||||
}
|
}
|
||||||
.into(),
|
.into(),
|
||||||
));
|
));
|
||||||
fields.borrow_mut().insert("__add__".into(), (add_ty, false));
|
fields.insert("__add__".into(), (add_ty, false));
|
||||||
}
|
});
|
||||||
let int64 = unifier.add_ty(TypeEnum::TObj {
|
let int64 = unifier.add_ty(TypeEnum::TObj {
|
||||||
obj_id: DefinitionId(1),
|
obj_id: DefinitionId(1),
|
||||||
fields: HashMap::new().into(),
|
fields: HashMap::new().into(),
|
||||||
@ -170,7 +170,7 @@ impl TestEnvironment {
|
|||||||
fields: HashMap::new().into(),
|
fields: HashMap::new().into(),
|
||||||
params: HashMap::new().into(),
|
params: HashMap::new().into(),
|
||||||
});
|
});
|
||||||
if let TypeEnum::TObj { fields, .. } = &*unifier.get_ty(int32) {
|
with_fields(&mut unifier, int32, |unifier, fields| {
|
||||||
let add_ty = unifier.add_ty(TypeEnum::TFunc(
|
let add_ty = unifier.add_ty(TypeEnum::TFunc(
|
||||||
FunSignature {
|
FunSignature {
|
||||||
args: vec![FuncArg { name: "other".into(), ty: int32, default_value: None }],
|
args: vec![FuncArg { name: "other".into(), ty: int32, default_value: None }],
|
||||||
@ -179,8 +179,8 @@ impl TestEnvironment {
|
|||||||
}
|
}
|
||||||
.into(),
|
.into(),
|
||||||
));
|
));
|
||||||
fields.borrow_mut().insert("__add__".into(), (add_ty, false));
|
fields.insert("__add__".into(), (add_ty, false));
|
||||||
}
|
});
|
||||||
let int64 = unifier.add_ty(TypeEnum::TObj {
|
let int64 = unifier.add_ty(TypeEnum::TObj {
|
||||||
obj_id: DefinitionId(1),
|
obj_id: DefinitionId(1),
|
||||||
fields: HashMap::new().into(),
|
fields: HashMap::new().into(),
|
||||||
@ -230,6 +230,7 @@ impl TestEnvironment {
|
|||||||
ancestors: Default::default(),
|
ancestors: Default::default(),
|
||||||
resolver: None,
|
resolver: None,
|
||||||
constructor: None,
|
constructor: None,
|
||||||
|
loc: None
|
||||||
})
|
})
|
||||||
.into(),
|
.into(),
|
||||||
);
|
);
|
||||||
@ -238,7 +239,7 @@ impl TestEnvironment {
|
|||||||
|
|
||||||
let primitives = PrimitiveStore { int32, int64, float, bool, none, range, str, exception };
|
let primitives = PrimitiveStore { int32, int64, float, bool, none, range, str, exception };
|
||||||
|
|
||||||
let (v0, id) = unifier.get_fresh_var();
|
let (v0, id) = unifier.get_dummy_var();
|
||||||
|
|
||||||
let foo_ty = unifier.add_ty(TypeEnum::TObj {
|
let foo_ty = unifier.add_ty(TypeEnum::TObj {
|
||||||
obj_id: DefinitionId(defs + 1),
|
obj_id: DefinitionId(defs + 1),
|
||||||
@ -255,6 +256,7 @@ impl TestEnvironment {
|
|||||||
ancestors: Default::default(),
|
ancestors: Default::default(),
|
||||||
resolver: None,
|
resolver: None,
|
||||||
constructor: None,
|
constructor: None,
|
||||||
|
loc: None,
|
||||||
})
|
})
|
||||||
.into(),
|
.into(),
|
||||||
);
|
);
|
||||||
@ -293,6 +295,7 @@ impl TestEnvironment {
|
|||||||
ancestors: Default::default(),
|
ancestors: Default::default(),
|
||||||
resolver: None,
|
resolver: None,
|
||||||
constructor: None,
|
constructor: None,
|
||||||
|
loc: None
|
||||||
})
|
})
|
||||||
.into(),
|
.into(),
|
||||||
);
|
);
|
||||||
@ -322,6 +325,7 @@ impl TestEnvironment {
|
|||||||
ancestors: Default::default(),
|
ancestors: Default::default(),
|
||||||
resolver: None,
|
resolver: None,
|
||||||
constructor: None,
|
constructor: None,
|
||||||
|
loc: None
|
||||||
})
|
})
|
||||||
.into(),
|
.into(),
|
||||||
);
|
);
|
||||||
@ -416,7 +420,7 @@ impl TestEnvironment {
|
|||||||
c = 1.234
|
c = 1.234
|
||||||
d = b(c)
|
d = b(c)
|
||||||
"},
|
"},
|
||||||
[("a", "fn[[x=float, y=float], float]"), ("b", "fn[[x=float], float]"), ("c", "float"), ("d", "float")].iter().cloned().collect(),
|
[("a", "fn[[x:float, y:float], float]"), ("b", "fn[[x:float], float]"), ("c", "float"), ("d", "float")].iter().cloned().collect(),
|
||||||
&[]
|
&[]
|
||||||
; "lambda test")]
|
; "lambda test")]
|
||||||
#[test_case(indoc! {"
|
#[test_case(indoc! {"
|
||||||
@ -425,7 +429,7 @@ impl TestEnvironment {
|
|||||||
a = b
|
a = b
|
||||||
c = b(1)
|
c = b(1)
|
||||||
"},
|
"},
|
||||||
[("a", "fn[[x=int32], int32]"), ("b", "fn[[x=int32], int32]"), ("c", "int32")].iter().cloned().collect(),
|
[("a", "fn[[x:int32], int32]"), ("b", "fn[[x:int32], int32]"), ("c", "int32")].iter().cloned().collect(),
|
||||||
&[]
|
&[]
|
||||||
; "lambda test 2")]
|
; "lambda test 2")]
|
||||||
#[test_case(indoc! {"
|
#[test_case(indoc! {"
|
||||||
@ -441,8 +445,8 @@ impl TestEnvironment {
|
|||||||
b(123)
|
b(123)
|
||||||
|
|
||||||
"},
|
"},
|
||||||
[("a", "fn[[x=bool], bool]"), ("b", "fn[[x=int32], int32]"), ("c", "bool"),
|
[("a", "fn[[x:bool], bool]"), ("b", "fn[[x:int32], int32]"), ("c", "bool"),
|
||||||
("d", "int32"), ("foo1", "Foo[1->bool]"), ("foo2", "Foo[1->int32]")].iter().cloned().collect(),
|
("d", "int32"), ("foo1", "Foo[bool]"), ("foo2", "Foo[int32]")].iter().cloned().collect(),
|
||||||
&[]
|
&[]
|
||||||
; "obj test")]
|
; "obj test")]
|
||||||
#[test_case(indoc! {"
|
#[test_case(indoc! {"
|
||||||
@ -485,33 +489,37 @@ fn test_basic(source: &str, mapping: HashMap<&str, &str>, virtuals: &[(&str, &st
|
|||||||
inferencer.check_block(&statements, &mut defined_identifiers).unwrap();
|
inferencer.check_block(&statements, &mut defined_identifiers).unwrap();
|
||||||
|
|
||||||
for (k, v) in inferencer.variable_mapping.iter() {
|
for (k, v) in inferencer.variable_mapping.iter() {
|
||||||
let name = inferencer.unifier.stringify(
|
let name = inferencer.unifier.internal_stringify(
|
||||||
*v,
|
*v,
|
||||||
&mut |v| (*id_to_name.get(&v).unwrap()).into(),
|
&mut |v| (*id_to_name.get(&v).unwrap()).into(),
|
||||||
&mut |v| format!("v{}", v),
|
&mut |v| format!("v{}", v),
|
||||||
|
&mut None
|
||||||
);
|
);
|
||||||
println!("{}: {}", k, name);
|
println!("{}: {}", k, name);
|
||||||
}
|
}
|
||||||
for (k, v) in mapping.iter() {
|
for (k, v) in mapping.iter() {
|
||||||
let ty = inferencer.variable_mapping.get(&(*k).into()).unwrap();
|
let ty = inferencer.variable_mapping.get(&(*k).into()).unwrap();
|
||||||
let name = inferencer.unifier.stringify(
|
let name = inferencer.unifier.internal_stringify(
|
||||||
*ty,
|
*ty,
|
||||||
&mut |v| (*id_to_name.get(&v).unwrap()).into(),
|
&mut |v| (*id_to_name.get(&v).unwrap()).into(),
|
||||||
&mut |v| format!("v{}", v),
|
&mut |v| format!("v{}", v),
|
||||||
|
&mut None
|
||||||
);
|
);
|
||||||
assert_eq!(format!("{}: {}", k, v), format!("{}: {}", k, name));
|
assert_eq!(format!("{}: {}", k, v), format!("{}: {}", k, name));
|
||||||
}
|
}
|
||||||
assert_eq!(inferencer.virtual_checks.len(), virtuals.len());
|
assert_eq!(inferencer.virtual_checks.len(), virtuals.len());
|
||||||
for ((a, b, _), (x, y)) in zip(inferencer.virtual_checks.iter(), virtuals) {
|
for ((a, b, _), (x, y)) in zip(inferencer.virtual_checks.iter(), virtuals) {
|
||||||
let a = inferencer.unifier.stringify(
|
let a = inferencer.unifier.internal_stringify(
|
||||||
*a,
|
*a,
|
||||||
&mut |v| (*id_to_name.get(&v).unwrap()).into(),
|
&mut |v| (*id_to_name.get(&v).unwrap()).into(),
|
||||||
&mut |v| format!("v{}", v),
|
&mut |v| format!("v{}", v),
|
||||||
|
&mut None
|
||||||
);
|
);
|
||||||
let b = inferencer.unifier.stringify(
|
let b = inferencer.unifier.internal_stringify(
|
||||||
*b,
|
*b,
|
||||||
&mut |v| (*id_to_name.get(&v).unwrap()).into(),
|
&mut |v| (*id_to_name.get(&v).unwrap()).into(),
|
||||||
&mut |v| format!("v{}", v),
|
&mut |v| format!("v{}", v),
|
||||||
|
&mut None
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_eq!(&a, x);
|
assert_eq!(&a, x);
|
||||||
@ -627,19 +635,21 @@ fn test_primitive_magic_methods(source: &str, mapping: HashMap<&str, &str>) {
|
|||||||
inferencer.check_block(&statements, &mut defined_identifiers).unwrap();
|
inferencer.check_block(&statements, &mut defined_identifiers).unwrap();
|
||||||
|
|
||||||
for (k, v) in inferencer.variable_mapping.iter() {
|
for (k, v) in inferencer.variable_mapping.iter() {
|
||||||
let name = inferencer.unifier.stringify(
|
let name = inferencer.unifier.internal_stringify(
|
||||||
*v,
|
*v,
|
||||||
&mut |v| (*id_to_name.get(&v).unwrap()).into(),
|
&mut |v| (*id_to_name.get(&v).unwrap()).into(),
|
||||||
&mut |v| format!("v{}", v),
|
&mut |v| format!("v{}", v),
|
||||||
|
&mut None
|
||||||
);
|
);
|
||||||
println!("{}: {}", k, name);
|
println!("{}: {}", k, name);
|
||||||
}
|
}
|
||||||
for (k, v) in mapping.iter() {
|
for (k, v) in mapping.iter() {
|
||||||
let ty = inferencer.variable_mapping.get(&(*k).into()).unwrap();
|
let ty = inferencer.variable_mapping.get(&(*k).into()).unwrap();
|
||||||
let name = inferencer.unifier.stringify(
|
let name = inferencer.unifier.internal_stringify(
|
||||||
*ty,
|
*ty,
|
||||||
&mut |v| (*id_to_name.get(&v).unwrap()).into(),
|
&mut |v| (*id_to_name.get(&v).unwrap()).into(),
|
||||||
&mut |v| format!("v{}", v),
|
&mut |v| format!("v{}", v),
|
||||||
|
&mut None
|
||||||
);
|
);
|
||||||
assert_eq!(format!("{}: {}", k, v), format!("{}: {}", k, name));
|
assert_eq!(format!("{}: {}", k, v), format!("{}: {}", k, name));
|
||||||
}
|
}
|
||||||
|
@ -1,13 +1,15 @@
|
|||||||
use itertools::{zip, Itertools};
|
use itertools::{zip, Itertools};
|
||||||
use std::cell::RefCell;
|
use std::cell::RefCell;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
use std::fmt::Display;
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
use std::sync::{Arc, Mutex};
|
use std::sync::{Arc, Mutex};
|
||||||
use std::{borrow::Cow, collections::HashSet};
|
use std::{borrow::Cow, collections::HashSet};
|
||||||
|
|
||||||
use nac3parser::ast::StrRef;
|
use nac3parser::ast::{StrRef, Location};
|
||||||
|
|
||||||
use super::unification_table::{UnificationKey, UnificationTable};
|
use super::unification_table::{UnificationKey, UnificationTable};
|
||||||
|
use super::type_error::{TypeError, TypeErrorKind};
|
||||||
use crate::symbol_resolver::SymbolValue;
|
use crate::symbol_resolver::SymbolValue;
|
||||||
use crate::toplevel::{DefinitionId, TopLevelContext, TopLevelDef};
|
use crate::toplevel::{DefinitionId, TopLevelContext, TopLevelDef};
|
||||||
|
|
||||||
@ -18,7 +20,7 @@ mod test;
|
|||||||
pub type Type = UnificationKey;
|
pub type Type = UnificationKey;
|
||||||
|
|
||||||
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
|
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
|
||||||
pub struct CallId(usize);
|
pub struct CallId(pub(super) usize);
|
||||||
|
|
||||||
pub type Mapping<K, V = Type> = HashMap<K, V>;
|
pub type Mapping<K, V = Type> = HashMap<K, V>;
|
||||||
type VarMap = Mapping<u32>;
|
type VarMap = Mapping<u32>;
|
||||||
@ -29,6 +31,7 @@ pub struct Call {
|
|||||||
pub kwargs: HashMap<StrRef, Type>,
|
pub kwargs: HashMap<StrRef, Type>,
|
||||||
pub ret: Type,
|
pub ret: Type,
|
||||||
pub fun: RefCell<Option<Type>>,
|
pub fun: RefCell<Option<Type>>,
|
||||||
|
pub loc: Option<Location>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
@ -45,23 +48,76 @@ pub struct FunSignature {
|
|||||||
pub vars: VarMap,
|
pub vars: VarMap,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||||
pub enum TypeVarMeta {
|
pub enum RecordKey {
|
||||||
Generic,
|
Str(StrRef),
|
||||||
Sequence(RefCell<Mapping<i32>>),
|
Int(i32)
|
||||||
Record(RefCell<Mapping<StrRef, (Type, bool)>>),
|
}
|
||||||
|
|
||||||
|
impl From<&RecordKey> for StrRef {
|
||||||
|
fn from(r: &RecordKey) -> Self {
|
||||||
|
match r {
|
||||||
|
RecordKey::Str(s) => *s,
|
||||||
|
RecordKey::Int(i) => StrRef::from(i.to_string())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<StrRef> for RecordKey {
|
||||||
|
fn from(s: StrRef) -> Self {
|
||||||
|
RecordKey::Str(s)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<&str> for RecordKey {
|
||||||
|
fn from(s: &str) -> Self {
|
||||||
|
RecordKey::Str(s.into())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<i32> for RecordKey {
|
||||||
|
fn from(i: i32) -> Self {
|
||||||
|
RecordKey::Int(i)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Display for RecordKey {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
match self {
|
||||||
|
RecordKey::Str(s) => write!(f, "{}", s),
|
||||||
|
RecordKey::Int(i) => write!(f, "{}", i)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Copy)]
|
||||||
|
pub struct RecordField {
|
||||||
|
ty: Type,
|
||||||
|
mutable: bool,
|
||||||
|
loc: Option<Location>
|
||||||
|
}
|
||||||
|
|
||||||
|
impl RecordField {
|
||||||
|
pub fn new(ty: Type, mutable: bool, loc: Option<Location>) -> RecordField {
|
||||||
|
RecordField { ty, mutable, loc }
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub enum TypeEnum {
|
pub enum TypeEnum {
|
||||||
TRigidVar {
|
TRigidVar {
|
||||||
id: u32,
|
id: u32,
|
||||||
|
name: Option<StrRef>,
|
||||||
|
loc: Option<Location>
|
||||||
},
|
},
|
||||||
TVar {
|
TVar {
|
||||||
id: u32,
|
id: u32,
|
||||||
meta: TypeVarMeta,
|
// empty indicates this is not a struct/tuple/list
|
||||||
|
fields: Option<Mapping<RecordKey, RecordField>>,
|
||||||
// empty indicates no restriction
|
// empty indicates no restriction
|
||||||
range: RefCell<Vec<Type>>,
|
range: Vec<Type>,
|
||||||
|
name: Option<StrRef>,
|
||||||
|
loc: Option<Location>
|
||||||
},
|
},
|
||||||
TTuple {
|
TTuple {
|
||||||
ty: Vec<Type>,
|
ty: Vec<Type>,
|
||||||
@ -71,14 +127,14 @@ pub enum TypeEnum {
|
|||||||
},
|
},
|
||||||
TObj {
|
TObj {
|
||||||
obj_id: DefinitionId,
|
obj_id: DefinitionId,
|
||||||
fields: RefCell<Mapping<StrRef, (Type, bool)>>,
|
fields: Mapping<StrRef, (Type, bool)>,
|
||||||
params: RefCell<VarMap>,
|
params: VarMap,
|
||||||
},
|
},
|
||||||
TVirtual {
|
TVirtual {
|
||||||
ty: Type,
|
ty: Type,
|
||||||
},
|
},
|
||||||
TCall(RefCell<Vec<CallId>>),
|
TCall(Vec<CallId>),
|
||||||
TFunc(RefCell<FunSignature>),
|
TFunc(FunSignature),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TypeEnum {
|
impl TypeEnum {
|
||||||
@ -102,7 +158,7 @@ pub type SharedUnifier = Arc<Mutex<(UnificationTable<TypeEnum>, u32, Vec<Call>)>
|
|||||||
pub struct Unifier {
|
pub struct Unifier {
|
||||||
pub top_level: Option<Arc<TopLevelContext>>,
|
pub top_level: Option<Arc<TopLevelContext>>,
|
||||||
unification_table: UnificationTable<Rc<TypeEnum>>,
|
unification_table: UnificationTable<Rc<TypeEnum>>,
|
||||||
calls: Vec<Rc<Call>>,
|
pub(super) calls: Vec<Rc<Call>>,
|
||||||
var_id: u32,
|
var_id: u32,
|
||||||
unify_cache: HashSet<(Type, Type)>,
|
unify_cache: HashSet<(Type, Type)>,
|
||||||
}
|
}
|
||||||
@ -125,6 +181,10 @@ impl Unifier {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub unsafe fn get_unification_table(&mut self) -> &mut UnificationTable<Rc<TypeEnum>> {
|
||||||
|
&mut self.unification_table
|
||||||
|
}
|
||||||
|
|
||||||
/// Determine if the two types are the same
|
/// Determine if the two types are the same
|
||||||
pub fn unioned(&mut self, a: Type, b: Type) -> bool {
|
pub fn unioned(&mut self, a: Type, b: Type) -> bool {
|
||||||
self.unification_table.unioned(a, b)
|
self.unification_table.unioned(a, b)
|
||||||
@ -155,13 +215,15 @@ impl Unifier {
|
|||||||
self.unification_table.new_key(Rc::new(a))
|
self.unification_table.new_key(Rc::new(a))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn add_record(&mut self, fields: Mapping<StrRef, (Type, bool)>) -> Type {
|
pub fn add_record(&mut self, fields: Mapping<RecordKey, RecordField>) -> Type {
|
||||||
let id = self.var_id + 1;
|
let id = self.var_id + 1;
|
||||||
self.var_id += 1;
|
self.var_id += 1;
|
||||||
self.add_ty(TypeEnum::TVar {
|
self.add_ty(TypeEnum::TVar {
|
||||||
id,
|
id,
|
||||||
range: vec![].into(),
|
range: vec![],
|
||||||
meta: TypeVarMeta::Record(fields.into()),
|
fields: Some(fields),
|
||||||
|
name: None,
|
||||||
|
loc: None,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -174,7 +236,16 @@ impl Unifier {
|
|||||||
pub fn get_call_signature(&mut self, id: CallId) -> Option<FunSignature> {
|
pub fn get_call_signature(&mut self, id: CallId) -> Option<FunSignature> {
|
||||||
let fun = self.calls.get(id.0).unwrap().fun.borrow().unwrap();
|
let fun = self.calls.get(id.0).unwrap().fun.borrow().unwrap();
|
||||||
if let TypeEnum::TFunc(sign) = &*self.get_ty(fun) {
|
if let TypeEnum::TFunc(sign) = &*self.get_ty(fun) {
|
||||||
Some(sign.borrow().clone())
|
Some(sign.clone())
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_call_signature_immutable(&self, id: CallId) -> Option<FunSignature> {
|
||||||
|
let fun = self.calls.get(id.0).unwrap().fun.borrow().unwrap();
|
||||||
|
if let TypeEnum::TFunc(sign) = &*self.get_ty_immutable(fun) {
|
||||||
|
Some(sign.clone())
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
@ -184,37 +255,35 @@ impl Unifier {
|
|||||||
self.unification_table.get_representative(ty)
|
self.unification_table.get_representative(ty)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn add_sequence(&mut self, sequence: Mapping<i32>) -> Type {
|
|
||||||
let id = self.var_id + 1;
|
|
||||||
self.var_id += 1;
|
|
||||||
self.add_ty(TypeEnum::TVar {
|
|
||||||
id,
|
|
||||||
range: vec![].into(),
|
|
||||||
meta: TypeVarMeta::Sequence(sequence.into()),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get the TypeEnum of a type.
|
/// Get the TypeEnum of a type.
|
||||||
pub fn get_ty(&mut self, a: Type) -> Rc<TypeEnum> {
|
pub fn get_ty(&mut self, a: Type) -> Rc<TypeEnum> {
|
||||||
self.unification_table.probe_value(a).clone()
|
self.unification_table.probe_value(a).clone()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_fresh_rigid_var(&mut self) -> (Type, u32) {
|
pub fn get_ty_immutable(&self, a: Type) -> Rc<TypeEnum> {
|
||||||
let id = self.var_id + 1;
|
self.unification_table.probe_value_immutable(a).clone()
|
||||||
self.var_id += 1;
|
|
||||||
(self.add_ty(TypeEnum::TRigidVar { id }), id)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_fresh_var(&mut self) -> (Type, u32) {
|
pub fn get_fresh_rigid_var(&mut self, name: Option<StrRef>, loc: Option<Location>) -> (Type, u32) {
|
||||||
self.get_fresh_var_with_range(&[])
|
let id = self.var_id + 1;
|
||||||
|
self.var_id += 1;
|
||||||
|
(self.add_ty(TypeEnum::TRigidVar { id, name, loc }), id)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_dummy_var(&mut self) -> (Type, u32) {
|
||||||
|
self.get_fresh_var_with_range(&[], None, None)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_fresh_var(&mut self, name: Option<StrRef>, loc: Option<Location>) -> (Type, u32) {
|
||||||
|
self.get_fresh_var_with_range(&[], name, loc)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get a fresh type variable.
|
/// Get a fresh type variable.
|
||||||
pub fn get_fresh_var_with_range(&mut self, range: &[Type]) -> (Type, u32) {
|
pub fn get_fresh_var_with_range(&mut self, range: &[Type], name: Option<StrRef>, loc: Option<Location>) -> (Type, u32) {
|
||||||
let id = self.var_id + 1;
|
let id = self.var_id + 1;
|
||||||
self.var_id += 1;
|
self.var_id += 1;
|
||||||
let range = range.to_vec().into();
|
let range = range.to_vec();
|
||||||
(self.add_ty(TypeEnum::TVar { id, range, meta: TypeVarMeta::Generic }), id)
|
(self.add_ty(TypeEnum::TVar { id, range, fields: None, name, loc}), id)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Unification would not unify rigid variables with other types, but we want to do this for
|
/// Unification would not unify rigid variables with other types, but we want to do this for
|
||||||
@ -227,7 +296,6 @@ impl Unifier {
|
|||||||
pub fn get_instantiations(&mut self, ty: Type) -> Option<Vec<Type>> {
|
pub fn get_instantiations(&mut self, ty: Type) -> Option<Vec<Type>> {
|
||||||
match &*self.get_ty(ty) {
|
match &*self.get_ty(ty) {
|
||||||
TypeEnum::TVar { range, .. } => {
|
TypeEnum::TVar { range, .. } => {
|
||||||
let range = range.borrow();
|
|
||||||
if range.is_empty() {
|
if range.is_empty() {
|
||||||
None
|
None
|
||||||
} else {
|
} else {
|
||||||
@ -261,11 +329,10 @@ impl Unifier {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
TypeEnum::TObj { params, .. } => {
|
TypeEnum::TObj { params, .. } => {
|
||||||
let params = params.borrow();
|
let (keys, params): (Vec<u32>, Vec<Type>) = params.iter().unzip();
|
||||||
let (keys, params): (Vec<&u32>, Vec<&Type>) = params.iter().unzip();
|
|
||||||
let params = params
|
let params = params
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|ty| self.get_instantiations(*ty).unwrap_or_else(|| vec![*ty]))
|
.map(|ty| self.get_instantiations(ty).unwrap_or_else(|| vec![ty]))
|
||||||
.multi_cartesian_product()
|
.multi_cartesian_product()
|
||||||
.collect_vec();
|
.collect_vec();
|
||||||
if params.len() <= 1 {
|
if params.len() <= 1 {
|
||||||
@ -277,7 +344,7 @@ impl Unifier {
|
|||||||
.map(|params| {
|
.map(|params| {
|
||||||
self.subst(
|
self.subst(
|
||||||
ty,
|
ty,
|
||||||
&zip(keys.iter().cloned().cloned(), params.iter().cloned())
|
&zip(keys.iter().cloned(), params.iter().cloned())
|
||||||
.collect(),
|
.collect(),
|
||||||
)
|
)
|
||||||
.unwrap_or(ty)
|
.unwrap_or(ty)
|
||||||
@ -299,7 +366,7 @@ impl Unifier {
|
|||||||
TList { ty } => self.is_concrete(*ty, allowed_typevars),
|
TList { ty } => self.is_concrete(*ty, allowed_typevars),
|
||||||
TTuple { ty } => ty.iter().all(|ty| self.is_concrete(*ty, allowed_typevars)),
|
TTuple { ty } => ty.iter().all(|ty| self.is_concrete(*ty, allowed_typevars)),
|
||||||
TObj { params: vars, .. } => {
|
TObj { params: vars, .. } => {
|
||||||
vars.borrow().values().all(|ty| self.is_concrete(*ty, allowed_typevars))
|
vars.values().all(|ty| self.is_concrete(*ty, allowed_typevars))
|
||||||
}
|
}
|
||||||
// functions are instantiated for each call sites, so the function type can contain
|
// functions are instantiated for each call sites, so the function type can contain
|
||||||
// type variables.
|
// type variables.
|
||||||
@ -314,8 +381,8 @@ impl Unifier {
|
|||||||
b: Type,
|
b: Type,
|
||||||
signature: &FunSignature,
|
signature: &FunSignature,
|
||||||
required: &[StrRef],
|
required: &[StrRef],
|
||||||
) -> Result<(), String> {
|
) -> Result<(), TypeError> {
|
||||||
let Call { posargs, kwargs, ret, fun } = call;
|
let Call { posargs, kwargs, ret, fun, loc } = call;
|
||||||
let instantiated = self.instantiate_fun(b, &*signature);
|
let instantiated = self.instantiate_fun(b, &*signature);
|
||||||
let r = self.get_ty(instantiated);
|
let r = self.get_ty(instantiated);
|
||||||
let r = r.as_ref();
|
let r = r.as_ref();
|
||||||
@ -329,15 +396,22 @@ impl Unifier {
|
|||||||
// arguments) are provided, and do not provide the same argument twice.
|
// arguments) are provided, and do not provide the same argument twice.
|
||||||
let mut required = required.to_vec();
|
let mut required = required.to_vec();
|
||||||
let mut all_names: Vec<_> =
|
let mut all_names: Vec<_> =
|
||||||
signature.borrow().args.iter().map(|v| (v.name, v.ty)).rev().collect();
|
signature.args.iter().map(|v| (v.name, v.ty)).rev().collect();
|
||||||
for (i, t) in posargs.iter().enumerate() {
|
for (i, t) in posargs.iter().enumerate() {
|
||||||
if signature.borrow().args.len() <= i {
|
if signature.args.len() <= i {
|
||||||
return Err("Too many arguments.".to_string());
|
return Err(TypeError::new(TypeErrorKind::TooManyArguments{
|
||||||
|
expected: signature.args.len(),
|
||||||
|
got: i,
|
||||||
|
}, *loc));
|
||||||
}
|
}
|
||||||
if !required.is_empty() {
|
|
||||||
required.pop();
|
required.pop();
|
||||||
}
|
let (name, expected) = all_names.pop().unwrap();
|
||||||
self.unify_impl(all_names.pop().unwrap().1, *t, false)?;
|
self.unify_impl(expected, *t, false)
|
||||||
|
.map_err(|_| TypeError::new(TypeErrorKind::IncorrectArgType {
|
||||||
|
name,
|
||||||
|
expected,
|
||||||
|
got: *t,
|
||||||
|
}, *loc))?;
|
||||||
}
|
}
|
||||||
for (k, t) in kwargs.iter() {
|
for (k, t) in kwargs.iter() {
|
||||||
if let Some(i) = required.iter().position(|v| v == k) {
|
if let Some(i) = required.iter().position(|v| v == k) {
|
||||||
@ -346,18 +420,30 @@ impl Unifier {
|
|||||||
let i = all_names
|
let i = all_names
|
||||||
.iter()
|
.iter()
|
||||||
.position(|v| &v.0 == k)
|
.position(|v| &v.0 == k)
|
||||||
.ok_or_else(|| format!("Unknown keyword argument {}", k))?;
|
.ok_or_else(|| TypeError::new(TypeErrorKind::UnknownArgName(*k), *loc))?;
|
||||||
self.unify_impl(all_names.remove(i).1, *t, false)?;
|
let (name, expected) = all_names.remove(i);
|
||||||
|
self.unify_impl(expected, *t, false)
|
||||||
|
.map_err(|_| TypeError::new(TypeErrorKind::IncorrectArgType {
|
||||||
|
name,
|
||||||
|
expected,
|
||||||
|
got: *t,
|
||||||
|
}, *loc))?;
|
||||||
}
|
}
|
||||||
if !required.is_empty() {
|
if !required.is_empty() {
|
||||||
return Err("Expected more arguments".to_string());
|
return Err(TypeError::new(TypeErrorKind::MissingArgs(required.iter().join(", ")), *loc));
|
||||||
}
|
}
|
||||||
self.unify_impl(*ret, signature.borrow().ret, false)?;
|
self.unify_impl(*ret, signature.ret, false)
|
||||||
|
.map_err(|mut err| {
|
||||||
|
if err.loc.is_none() {
|
||||||
|
err.loc = *loc;
|
||||||
|
}
|
||||||
|
err
|
||||||
|
})?;
|
||||||
*fun.borrow_mut() = Some(instantiated);
|
*fun.borrow_mut() = Some(instantiated);
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn unify(&mut self, a: Type, b: Type) -> Result<(), String> {
|
pub fn unify(&mut self, a: Type, b: Type) -> Result<(), TypeError> {
|
||||||
self.unify_cache.clear();
|
self.unify_cache.clear();
|
||||||
if self.unification_table.unioned(a, b) {
|
if self.unification_table.unioned(a, b) {
|
||||||
Ok(())
|
Ok(())
|
||||||
@ -366,9 +452,8 @@ impl Unifier {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn unify_impl(&mut self, a: Type, b: Type, swapped: bool) -> Result<(), String> {
|
fn unify_impl(&mut self, a: Type, b: Type, swapped: bool) -> Result<(), TypeError> {
|
||||||
use TypeEnum::*;
|
use TypeEnum::*;
|
||||||
use TypeVarMeta::*;
|
|
||||||
|
|
||||||
if !swapped {
|
if !swapped {
|
||||||
let rep_a = self.unification_table.get_representative(a);
|
let rep_a = self.unification_table.get_representative(a);
|
||||||
@ -386,62 +471,48 @@ impl Unifier {
|
|||||||
)
|
)
|
||||||
};
|
};
|
||||||
match (&*ty_a, &*ty_b) {
|
match (&*ty_a, &*ty_b) {
|
||||||
(TVar { meta: meta1, range: range1, .. }, TVar { meta: meta2, range: range2, .. }) => {
|
(TVar { fields: fields1, id, name: name1, loc: loc1, .. }, TVar { fields: fields2, name: name2, loc: loc2, .. }) => {
|
||||||
match (meta1, meta2) {
|
let new_fields = match (fields1, fields2) {
|
||||||
(Generic, _) => {}
|
(None, None) => None,
|
||||||
(_, Generic) => {
|
(None, Some(fields)) => Some(fields.clone()),
|
||||||
|
(_, None) => {
|
||||||
return self.unify_impl(b, a, true);
|
return self.unify_impl(b, a, true);
|
||||||
}
|
},
|
||||||
(Record(fields1), Record(fields2)) => {
|
(Some(fields1), Some(fields2)) => {
|
||||||
let mut fields2 = fields2.borrow_mut();
|
let mut new_fields: Mapping<_, _> = fields2.clone();
|
||||||
for (key, (ty, is_mutable)) in fields1.borrow().iter() {
|
for (key, val1) in fields1.iter() {
|
||||||
if let Some((ty2, is_mutable2)) = fields2.get_mut(key) {
|
if let Some(val2) = fields2.get(key) {
|
||||||
self.unify_impl(*ty2, *ty, false)?;
|
self.unify_impl(val1.ty, val2.ty, false)
|
||||||
*is_mutable2 |= *is_mutable;
|
.map_err(|_| TypeError::new(TypeErrorKind::FieldUnificationError {
|
||||||
|
field: *key,
|
||||||
|
types: (val1.ty, val2.ty),
|
||||||
|
loc: (*loc1, *loc2),
|
||||||
|
}, None))?;
|
||||||
|
new_fields.insert(*key, RecordField::new(val1.ty, val1.mutable || val2.mutable, val1.loc.or(val2.loc)));
|
||||||
} else {
|
} else {
|
||||||
fields2.insert(*key, (*ty, *is_mutable));
|
new_fields.insert(*key, *val1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Some(new_fields)
|
||||||
}
|
}
|
||||||
(Sequence(map1), Sequence(map2)) => {
|
};
|
||||||
let mut map2 = map2.borrow_mut();
|
let intersection = self.get_intersection(a, b).map_err(|_|
|
||||||
for (key, value) in map1.borrow().iter() {
|
TypeError::new(TypeErrorKind::IncompatibleTypes(a, b), None))?.unwrap();
|
||||||
if let Some(ty) = map2.get(key) {
|
let range = if let TypeEnum::TVar { range, .. } = &*self.get_ty(intersection) {
|
||||||
self.unify_impl(*ty, *value, false)?;
|
range.clone()
|
||||||
} else {
|
} else {
|
||||||
map2.insert(*key, *value);
|
unreachable!()
|
||||||
|
};
|
||||||
|
self.unification_table.unify(a, b);
|
||||||
|
self.unification_table.set_value(a, Rc::new(TypeEnum::TVar {
|
||||||
|
id: *id,
|
||||||
|
fields: new_fields,
|
||||||
|
range,
|
||||||
|
name: name1.or(*name2),
|
||||||
|
loc: loc1.or(*loc2)
|
||||||
|
}));
|
||||||
}
|
}
|
||||||
}
|
(TVar { fields: None, range, .. }, _) => {
|
||||||
}
|
|
||||||
_ => {
|
|
||||||
return Err("Incompatible type variables".to_string());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
let range1 = range1.borrow();
|
|
||||||
// new range is the intersection of them
|
|
||||||
// empty range indicates no constraint
|
|
||||||
if !range1.is_empty() {
|
|
||||||
let old_range2 = range2.take();
|
|
||||||
let mut range2 = range2.borrow_mut();
|
|
||||||
if old_range2.is_empty() {
|
|
||||||
range2.extend_from_slice(&range1);
|
|
||||||
}
|
|
||||||
for v1 in old_range2.iter() {
|
|
||||||
for v2 in range1.iter() {
|
|
||||||
if let Ok(result) = self.get_intersection(*v1, *v2) {
|
|
||||||
range2.push(result.unwrap_or(*v2));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if range2.is_empty() {
|
|
||||||
return Err(
|
|
||||||
"cannot unify type variables with incompatible value range".to_string()
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
self.set_a_to_b(a, b);
|
|
||||||
}
|
|
||||||
(TVar { meta: Generic, id, range, .. }, _) => {
|
|
||||||
// We check for the range of the type variable to see if unification is allowed.
|
// We check for the range of the type variable to see if unification is allowed.
|
||||||
// Note that although b may be compatible with a, we may have to constrain type
|
// Note that although b may be compatible with a, we may have to constrain type
|
||||||
// variables in b to make sure that instantiations of b would always be compatible
|
// variables in b to make sure that instantiations of b would always be compatible
|
||||||
@ -449,42 +520,50 @@ impl Unifier {
|
|||||||
// The return value x of check_var_compatibility would be a new type that is
|
// The return value x of check_var_compatibility would be a new type that is
|
||||||
// guaranteed to be compatible with a under all possible instantiations. So we
|
// guaranteed to be compatible with a under all possible instantiations. So we
|
||||||
// unify x with b to recursively apply the constrains, and then set a to x.
|
// unify x with b to recursively apply the constrains, and then set a to x.
|
||||||
let x = self.check_var_compatibility(*id, b, &range.borrow())?.unwrap_or(b);
|
let x = self.check_var_compatibility(b, range).map_err(|_|
|
||||||
|
TypeError::new(TypeErrorKind::IncompatibleRange(b, range.clone()), None))?.unwrap_or(b);
|
||||||
self.unify_impl(x, b, false)?;
|
self.unify_impl(x, b, false)?;
|
||||||
self.set_a_to_b(a, x);
|
self.set_a_to_b(a, x);
|
||||||
}
|
}
|
||||||
(TVar { meta: Sequence(map), id, range, .. }, TTuple { ty }) => {
|
(TVar { fields: Some(fields), range, .. }, TTuple { ty }) => {
|
||||||
let len = ty.len() as i32;
|
let len = ty.len() as i32;
|
||||||
for (k, v) in map.borrow().iter() {
|
for (k, v) in fields.iter() {
|
||||||
// handle negative index
|
match *k {
|
||||||
let ind = if *k < 0 { len + *k } else { *k };
|
RecordKey::Int(i) => {
|
||||||
|
if v.mutable {
|
||||||
|
return Err(TypeError::new(
|
||||||
|
TypeErrorKind::MutationError(*k, b), v.loc));
|
||||||
|
}
|
||||||
|
let ind = if i < 0 { len + i } else { i };
|
||||||
if ind >= len || ind < 0 {
|
if ind >= len || ind < 0 {
|
||||||
return Err(format!(
|
return Err(TypeError::new(
|
||||||
"Tuple index out of range. (Length: {}, Index: {})",
|
TypeErrorKind::TupleIndexOutOfBounds{ index: i, len}, v.loc));
|
||||||
len, k
|
|
||||||
));
|
|
||||||
}
|
}
|
||||||
self.unify_impl(*v, ty[ind as usize], false)?;
|
self.unify_impl(v.ty, ty[ind as usize], false).map_err(|e| e.at(v.loc))?;
|
||||||
}
|
}
|
||||||
let x = self.check_var_compatibility(*id, b, &range.borrow())?.unwrap_or(b);
|
RecordKey::Str(_) => return Err(TypeError::new(
|
||||||
|
TypeErrorKind::NoSuchField(*k, b), v.loc)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let x = self.check_var_compatibility(b, range)?.unwrap_or(b);
|
||||||
self.unify_impl(x, b, false)?;
|
self.unify_impl(x, b, false)?;
|
||||||
self.set_a_to_b(a, x);
|
self.set_a_to_b(a, x);
|
||||||
}
|
}
|
||||||
(TVar { meta: Sequence(map), id, range, .. }, TList { ty }) => {
|
(TVar { fields: Some(fields), range, .. }, TList { ty }) => {
|
||||||
for v in map.borrow().values() {
|
for (k, v) in fields.iter() {
|
||||||
self.unify_impl(*v, *ty, false)?;
|
match *k {
|
||||||
|
RecordKey::Int(_) => self.unify_impl(v.ty, *ty, false).map_err(|e| e.at(v.loc))?,
|
||||||
|
RecordKey::Str(_) => return Err(TypeError::new(
|
||||||
|
TypeErrorKind::NoSuchField(*k, b), v.loc)),
|
||||||
}
|
}
|
||||||
let x = self.check_var_compatibility(*id, b, &range.borrow())?.unwrap_or(b);
|
}
|
||||||
|
let x = self.check_var_compatibility(b, range)?.unwrap_or(b);
|
||||||
self.unify_impl(x, b, false)?;
|
self.unify_impl(x, b, false)?;
|
||||||
self.set_a_to_b(a, x);
|
self.set_a_to_b(a, x);
|
||||||
}
|
}
|
||||||
(TTuple { ty: ty1 }, TTuple { ty: ty2 }) => {
|
(TTuple { ty: ty1 }, TTuple { ty: ty2 }) => {
|
||||||
if ty1.len() != ty2.len() {
|
if ty1.len() != ty2.len() {
|
||||||
return Err(format!(
|
return Err(TypeError::new(TypeErrorKind::IncompatibleTypes(a, b), None));
|
||||||
"Cannot unify tuples with length {} and {}",
|
|
||||||
ty1.len(),
|
|
||||||
ty2.len()
|
|
||||||
));
|
|
||||||
}
|
}
|
||||||
for (x, y) in ty1.iter().zip(ty2.iter()) {
|
for (x, y) in ty1.iter().zip(ty2.iter()) {
|
||||||
self.unify_impl(*x, *y, false)?;
|
self.unify_impl(*x, *y, false)?;
|
||||||
@ -495,47 +574,64 @@ impl Unifier {
|
|||||||
self.unify_impl(*ty1, *ty2, false)?;
|
self.unify_impl(*ty1, *ty2, false)?;
|
||||||
self.set_a_to_b(a, b);
|
self.set_a_to_b(a, b);
|
||||||
}
|
}
|
||||||
(TVar { meta: Record(map), id, range, .. }, TObj { fields, .. }) => {
|
(TVar { fields: Some(map), range, .. }, TObj { fields, .. }) => {
|
||||||
for (k, (ty, is_mutable)) in map.borrow().iter() {
|
for (k, field) in map.iter() {
|
||||||
let (ty2, is_mutable2) = fields
|
match *k {
|
||||||
.borrow()
|
RecordKey::Str(s) => {
|
||||||
.get(k)
|
let (ty, mutable) = fields
|
||||||
|
.get(&s)
|
||||||
.copied()
|
.copied()
|
||||||
.ok_or_else(|| format!("No such attribute {}", k))?;
|
.ok_or_else(|| TypeError::new(
|
||||||
|
TypeErrorKind::NoSuchField(*k, b), field.loc))?;
|
||||||
// typevar represents the usage of the variable
|
// typevar represents the usage of the variable
|
||||||
// it is OK to have immutable usage for mutable fields
|
// it is OK to have immutable usage for mutable fields
|
||||||
// but cannot have mutable usage for immutable fields
|
// but cannot have mutable usage for immutable fields
|
||||||
if *is_mutable && !is_mutable2 {
|
if field.mutable && !mutable{
|
||||||
return Err(format!("Field {} should be immutable", k));
|
return Err(TypeError::new(
|
||||||
|
TypeErrorKind::MutationError(*k, b), field.loc));
|
||||||
}
|
}
|
||||||
self.unify_impl(*ty, ty2, false)?;
|
self.unify_impl(field.ty, ty, false)
|
||||||
|
.map_err(|v| v.at(field.loc))?;
|
||||||
}
|
}
|
||||||
let x = self.check_var_compatibility(*id, b, &range.borrow())?.unwrap_or(b);
|
RecordKey::Int(_) => return Err(TypeError::new(
|
||||||
|
TypeErrorKind::NoSuchField(*k, b), field.loc))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let x = self.check_var_compatibility(b, range)?.unwrap_or(b);
|
||||||
self.unify_impl(x, b, false)?;
|
self.unify_impl(x, b, false)?;
|
||||||
self.set_a_to_b(a, x);
|
self.set_a_to_b(a, x);
|
||||||
}
|
}
|
||||||
(TVar { meta: Record(map), id, range, .. }, TVirtual { ty }) => {
|
(TVar { fields: Some(map), range, .. }, TVirtual { ty }) => {
|
||||||
let ty = self.get_ty(*ty);
|
let ty = self.get_ty(*ty);
|
||||||
if let TObj { fields, .. } = ty.as_ref() {
|
if let TObj { fields, .. } = ty.as_ref() {
|
||||||
for (k, (ty, is_mutable)) in map.borrow().iter() {
|
for (k, field) in map.iter() {
|
||||||
let (ty2, is_mutable2) = fields
|
match *k {
|
||||||
.borrow()
|
RecordKey::Str(s) => {
|
||||||
.get(k)
|
let (ty, _) = fields
|
||||||
|
.get(&s)
|
||||||
.copied()
|
.copied()
|
||||||
.ok_or_else(|| format!("No such attribute {}", k))?;
|
.ok_or_else(|| TypeError::new(
|
||||||
if !matches!(self.get_ty(ty2).as_ref(), TFunc { .. }) {
|
TypeErrorKind::NoSuchField(*k, b), field.loc))?;
|
||||||
return Err(format!("Cannot access field {} for virtual type", k));
|
if !matches!(self.get_ty(ty).as_ref(), TFunc { .. }) {
|
||||||
|
return Err(TypeError::new(
|
||||||
|
TypeErrorKind::NoSuchField(*k, b), field.loc))
|
||||||
}
|
}
|
||||||
if *is_mutable && !is_mutable2 {
|
if field.mutable {
|
||||||
return Err(format!("Field {} should be immutable", k));
|
return Err(TypeError::new(
|
||||||
|
TypeErrorKind::MutationError(*k, b), field.loc));
|
||||||
|
}
|
||||||
|
self.unify_impl(field.ty, ty, false)
|
||||||
|
.map_err(|v| v.at(field.loc))?;
|
||||||
|
}
|
||||||
|
RecordKey::Int(_) => return Err(TypeError::new(
|
||||||
|
TypeErrorKind::NoSuchField(*k, b), field.loc))
|
||||||
}
|
}
|
||||||
self.unify_impl(*ty, ty2, false)?;
|
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// require annotation...
|
// require annotation...
|
||||||
return Err("Requires type annotation for virtual".to_string());
|
return Err(TypeError::new(TypeErrorKind::RequiresTypeAnn, None))
|
||||||
}
|
}
|
||||||
let x = self.check_var_compatibility(*id, b, &range.borrow())?.unwrap_or(b);
|
let x = self.check_var_compatibility(b, range)?.unwrap_or(b);
|
||||||
self.unify_impl(x, b, false)?;
|
self.unify_impl(x, b, false)?;
|
||||||
self.set_a_to_b(a, x);
|
self.set_a_to_b(a, x);
|
||||||
}
|
}
|
||||||
@ -546,7 +642,7 @@ impl Unifier {
|
|||||||
if id1 != id2 {
|
if id1 != id2 {
|
||||||
self.incompatible_types(a, b)?;
|
self.incompatible_types(a, b)?;
|
||||||
}
|
}
|
||||||
for (x, y) in zip(params1.borrow().values(), params2.borrow().values()) {
|
for (x, y) in zip(params1.values(), params2.values()) {
|
||||||
self.unify_impl(*x, *y, false)?;
|
self.unify_impl(*x, *y, false)?;
|
||||||
}
|
}
|
||||||
self.set_a_to_b(a, b);
|
self.set_a_to_b(a, b);
|
||||||
@ -558,11 +654,12 @@ impl Unifier {
|
|||||||
(TCall(calls1), TCall(calls2)) => {
|
(TCall(calls1), TCall(calls2)) => {
|
||||||
// we do not unify individual calls, instead we defer until the unification wtih a
|
// we do not unify individual calls, instead we defer until the unification wtih a
|
||||||
// function definition.
|
// function definition.
|
||||||
calls2.borrow_mut().extend_from_slice(&calls1.borrow());
|
let calls = calls1.iter().chain(calls2.iter()).cloned().collect();
|
||||||
|
self.set_a_to_b(a, b);
|
||||||
|
self.unification_table.set_value(b, Rc::new(TCall(calls)));
|
||||||
}
|
}
|
||||||
(TCall(calls), TFunc(signature)) => {
|
(TCall(calls), TFunc(signature)) => {
|
||||||
let required: Vec<StrRef> = signature
|
let required: Vec<StrRef> = signature
|
||||||
.borrow()
|
|
||||||
.args
|
.args
|
||||||
.iter()
|
.iter()
|
||||||
.filter(|v| v.default_value.is_none())
|
.filter(|v| v.default_value.is_none())
|
||||||
@ -570,33 +667,32 @@ impl Unifier {
|
|||||||
.rev()
|
.rev()
|
||||||
.collect();
|
.collect();
|
||||||
// we unify every calls to the function signature.
|
// we unify every calls to the function signature.
|
||||||
let signature = signature.borrow();
|
for c in calls.iter() {
|
||||||
for c in calls.borrow().iter() {
|
|
||||||
let call = self.calls[c.0].clone();
|
let call = self.calls[c.0].clone();
|
||||||
self.unify_call(&call, b, &signature, &required)?;
|
self.unify_call(&call, b, signature, &required)?;
|
||||||
}
|
}
|
||||||
self.set_a_to_b(a, b);
|
self.set_a_to_b(a, b);
|
||||||
}
|
}
|
||||||
(TFunc(sign1), TFunc(sign2)) => {
|
(TFunc(sign1), TFunc(sign2)) => {
|
||||||
let (sign1, sign2) = (&*sign1.borrow(), &*sign2.borrow());
|
|
||||||
if !sign1.vars.is_empty() || !sign2.vars.is_empty() {
|
if !sign1.vars.is_empty() || !sign2.vars.is_empty() {
|
||||||
return Err("Polymorphic function pointer is prohibited.".to_string());
|
return Err(TypeError::new(TypeErrorKind::PolymorphicFunctionPointer, None));
|
||||||
}
|
}
|
||||||
if sign1.args.len() != sign2.args.len() {
|
if sign1.args.len() != sign2.args.len() {
|
||||||
return Err("Functions differ in number of parameters.".to_string());
|
return Err(TypeError::new(TypeErrorKind::IncompatibleTypes(a, b), None));
|
||||||
}
|
}
|
||||||
for (x, y) in sign1.args.iter().zip(sign2.args.iter()) {
|
for (x, y) in sign1.args.iter().zip(sign2.args.iter()) {
|
||||||
if x.name != y.name {
|
if x.name != y.name || x.default_value != y.default_value {
|
||||||
return Err("Functions differ in parameter names.".to_string());
|
return Err(TypeError::new(TypeErrorKind::IncompatibleTypes(a, b), None));
|
||||||
}
|
|
||||||
if x.default_value != y.default_value {
|
|
||||||
return Err("Functions differ in optional parameters value".to_string());
|
|
||||||
}
|
}
|
||||||
self.unify_impl(x.ty, y.ty, false)?;
|
self.unify_impl(x.ty, y.ty, false)?;
|
||||||
}
|
}
|
||||||
self.unify_impl(sign1.ret, sign2.ret, false)?;
|
self.unify_impl(sign1.ret, sign2.ret, false)?;
|
||||||
self.set_a_to_b(a, b);
|
self.set_a_to_b(a, b);
|
||||||
}
|
}
|
||||||
|
(TVar { fields: Some(fields), .. }, _) => {
|
||||||
|
let (k, v) = fields.iter().next().unwrap();
|
||||||
|
return Err(TypeError::new(TypeErrorKind::NoSuchField(*k, b), v.loc));
|
||||||
|
}
|
||||||
_ => {
|
_ => {
|
||||||
if swapped {
|
if swapped {
|
||||||
return self.incompatible_types(a, b);
|
return self.incompatible_types(a, b);
|
||||||
@ -608,9 +704,13 @@ impl Unifier {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn default_stringify(&mut self, ty: Type) -> String {
|
pub fn stringify(&self, ty: Type) -> String {
|
||||||
|
self.stringify_with_notes(ty, &mut None)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn stringify_with_notes(&self, ty: Type, notes: &mut Option<HashMap<u32, String>>) -> String {
|
||||||
let top_level = self.top_level.clone();
|
let top_level = self.top_level.clone();
|
||||||
self.stringify(
|
self.internal_stringify(
|
||||||
ty,
|
ty,
|
||||||
&mut |id| {
|
&mut |id| {
|
||||||
top_level.as_ref().map_or_else(
|
top_level.as_ref().map_or_else(
|
||||||
@ -627,54 +727,50 @@ impl Unifier {
|
|||||||
)
|
)
|
||||||
},
|
},
|
||||||
&mut |id| format!("var{}", id),
|
&mut |id| format!("var{}", id),
|
||||||
|
notes
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get string representation of the type
|
/// Get string representation of the type
|
||||||
pub fn stringify<F, G>(&mut self, ty: Type, obj_to_name: &mut F, var_to_name: &mut G) -> String
|
pub fn internal_stringify<F, G>(&self, ty: Type, obj_to_name: &mut F, var_to_name: &mut G, notes: &mut Option<HashMap<u32, String>>) -> String
|
||||||
where
|
where
|
||||||
F: FnMut(usize) -> String,
|
F: FnMut(usize) -> String,
|
||||||
G: FnMut(u32) -> String,
|
G: FnMut(u32) -> String,
|
||||||
{
|
{
|
||||||
use TypeVarMeta::*;
|
let ty = self.unification_table.probe_value_immutable(ty).clone();
|
||||||
let ty = self.unification_table.probe_value(ty).clone();
|
|
||||||
match ty.as_ref() {
|
match ty.as_ref() {
|
||||||
TypeEnum::TRigidVar { id } => var_to_name(*id),
|
TypeEnum::TRigidVar { id, name, .. } => name.map(|v| v.to_string()).unwrap_or_else(|| var_to_name(*id)),
|
||||||
TypeEnum::TVar { id, meta: Generic, .. } => var_to_name(*id),
|
TypeEnum::TVar { id, name, fields, range, .. } => {
|
||||||
TypeEnum::TVar { meta: Sequence(map), .. } => {
|
let n = if let Some(fields) = fields {
|
||||||
let fields = map
|
let mut fields = fields.iter().map(|(k, f)| format!("{}={}", k, self.internal_stringify(f.ty, obj_to_name, var_to_name, notes)));
|
||||||
.borrow()
|
let fields = fields.join(", ");
|
||||||
.iter()
|
format!("{}[{}]", name.map(|v| v.to_string()).unwrap_or_else(|| var_to_name(*id)), fields)
|
||||||
.map(|(k, v)| format!("{}={}", k, self.stringify(*v, obj_to_name, var_to_name)))
|
} else {
|
||||||
.join(", ");
|
name.map(|v| v.to_string()).unwrap_or_else(|| var_to_name(*id))
|
||||||
format!("seq[{}]", fields)
|
};
|
||||||
}
|
if !range.is_empty() && notes.is_some() && !notes.as_ref().unwrap().contains_key(id) {
|
||||||
TypeEnum::TVar { meta: Record(fields), .. } => {
|
// just in case if there is any cyclic dependency
|
||||||
let fields = fields
|
notes.as_mut().unwrap().insert(*id, "".into());
|
||||||
.borrow()
|
let body = format!("{} ∈ {{{}}}", n, range.iter().map(|v| self.internal_stringify(*v, obj_to_name, var_to_name, notes)).collect::<Vec<_>>().join(", "));
|
||||||
.iter()
|
notes.as_mut().unwrap().insert(*id, body);
|
||||||
.map(|(k, (v, _))| {
|
};
|
||||||
format!("{}={}", k, self.stringify(*v, obj_to_name, var_to_name))
|
n
|
||||||
})
|
|
||||||
.join(", ");
|
|
||||||
format!("record[{}]", fields)
|
|
||||||
}
|
}
|
||||||
TypeEnum::TTuple { ty } => {
|
TypeEnum::TTuple { ty } => {
|
||||||
let mut fields = ty.iter().map(|v| self.stringify(*v, obj_to_name, var_to_name));
|
let mut fields = ty.iter().map(|v| self.internal_stringify(*v, obj_to_name, var_to_name, notes));
|
||||||
format!("tuple[{}]", fields.join(", "))
|
format!("tuple[{}]", fields.join(", "))
|
||||||
}
|
}
|
||||||
TypeEnum::TList { ty } => {
|
TypeEnum::TList { ty } => {
|
||||||
format!("list[{}]", self.stringify(*ty, obj_to_name, var_to_name))
|
format!("list[{}]", self.internal_stringify(*ty, obj_to_name, var_to_name, notes))
|
||||||
}
|
}
|
||||||
TypeEnum::TVirtual { ty } => {
|
TypeEnum::TVirtual { ty } => {
|
||||||
format!("virtual[{}]", self.stringify(*ty, obj_to_name, var_to_name))
|
format!("virtual[{}]", self.internal_stringify(*ty, obj_to_name, var_to_name, notes))
|
||||||
}
|
}
|
||||||
TypeEnum::TObj { obj_id, params, .. } => {
|
TypeEnum::TObj { obj_id, params, .. } => {
|
||||||
let name = obj_to_name(obj_id.0);
|
let name = obj_to_name(obj_id.0);
|
||||||
let params = params.borrow();
|
|
||||||
if !params.is_empty() {
|
if !params.is_empty() {
|
||||||
let params = params.iter().map(|(id, v)| {
|
let params = params.iter().map(|(_, v)| {
|
||||||
format!("{}->{}", *id, self.stringify(*v, obj_to_name, var_to_name))
|
self.internal_stringify(*v, obj_to_name, var_to_name, notes)
|
||||||
});
|
});
|
||||||
// sort to preserve order
|
// sort to preserve order
|
||||||
let mut params = params.sorted();
|
let mut params = params.sorted();
|
||||||
@ -686,14 +782,17 @@ impl Unifier {
|
|||||||
TypeEnum::TCall { .. } => "call".to_owned(),
|
TypeEnum::TCall { .. } => "call".to_owned(),
|
||||||
TypeEnum::TFunc(signature) => {
|
TypeEnum::TFunc(signature) => {
|
||||||
let params = signature
|
let params = signature
|
||||||
.borrow()
|
|
||||||
.args
|
.args
|
||||||
.iter()
|
.iter()
|
||||||
.map(|arg| {
|
.map(|arg| {
|
||||||
format!("{}={}", arg.name, self.stringify(arg.ty, obj_to_name, var_to_name))
|
if let Some(dv) = &arg.default_value {
|
||||||
|
format!("{}:{}={}", arg.name, self.internal_stringify(arg.ty, obj_to_name, var_to_name, notes), dv)
|
||||||
|
} else {
|
||||||
|
format!("{}:{}", arg.name, self.internal_stringify(arg.ty, obj_to_name, var_to_name, notes))
|
||||||
|
}
|
||||||
})
|
})
|
||||||
.join(", ");
|
.join(", ");
|
||||||
let ret = self.stringify(signature.borrow().ret, obj_to_name, var_to_name);
|
let ret = self.internal_stringify(signature.ret, obj_to_name, var_to_name, notes);
|
||||||
format!("fn[[{}], {}]", params, ret)
|
format!("fn[[{}], {}]", params, ret)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -707,12 +806,8 @@ impl Unifier {
|
|||||||
table.set_value(a, ty_b)
|
table.set_value(a, ty_b)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn incompatible_types(&mut self, a: Type, b: Type) -> Result<(), String> {
|
fn incompatible_types(&mut self, a: Type, b: Type) -> Result<(), TypeError> {
|
||||||
Err(format!(
|
Err(TypeError::new(TypeErrorKind::IncompatibleTypes(a, b), None))
|
||||||
"Cannot unify {} with {}",
|
|
||||||
self.default_stringify(a),
|
|
||||||
self.default_stringify(b)
|
|
||||||
))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Instantiate a function if it hasn't been instantiated.
|
/// Instantiate a function if it hasn't been instantiated.
|
||||||
@ -722,7 +817,7 @@ impl Unifier {
|
|||||||
let mut instantiated = true;
|
let mut instantiated = true;
|
||||||
let mut vars = Vec::new();
|
let mut vars = Vec::new();
|
||||||
for (k, v) in fun.vars.iter() {
|
for (k, v) in fun.vars.iter() {
|
||||||
if let TypeEnum::TVar { id, range, .. } =
|
if let TypeEnum::TVar { id, name, loc, range, .. } =
|
||||||
self.unification_table.probe_value(*v).as_ref()
|
self.unification_table.probe_value(*v).as_ref()
|
||||||
{
|
{
|
||||||
// for class methods that contain type vars not in class declaration,
|
// for class methods that contain type vars not in class declaration,
|
||||||
@ -730,7 +825,7 @@ impl Unifier {
|
|||||||
// and need to do substitution on those type vars
|
// and need to do substitution on those type vars
|
||||||
if k == id {
|
if k == id {
|
||||||
instantiated = false;
|
instantiated = false;
|
||||||
vars.push((*k, range.clone()));
|
vars.push((*k, range.clone(), *name, *loc));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -739,7 +834,7 @@ impl Unifier {
|
|||||||
} else {
|
} else {
|
||||||
let mapping = vars
|
let mapping = vars
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|(k, range)| (k, self.get_fresh_var_with_range(range.borrow().as_ref()).0))
|
.map(|(k, range, name, loc)| (k, self.get_fresh_var_with_range(range.as_ref(), name, loc).0))
|
||||||
.collect();
|
.collect();
|
||||||
self.subst(ty, &mapping).unwrap_or(ty)
|
self.subst(ty, &mapping).unwrap_or(ty)
|
||||||
}
|
}
|
||||||
@ -762,7 +857,7 @@ impl Unifier {
|
|||||||
let cached = cache.get_mut(&a);
|
let cached = cache.get_mut(&a);
|
||||||
if let Some(cached) = cached {
|
if let Some(cached) = cached {
|
||||||
if cached.is_none() {
|
if cached.is_none() {
|
||||||
*cached = Some(self.get_fresh_var().0);
|
*cached = Some(self.get_fresh_var(None, None).0);
|
||||||
}
|
}
|
||||||
return *cached;
|
return *cached;
|
||||||
}
|
}
|
||||||
@ -799,7 +894,6 @@ impl Unifier {
|
|||||||
// If the mapping does not contain any type variables in the
|
// If the mapping does not contain any type variables in the
|
||||||
// parameter list, we don't need to substitute the fields.
|
// parameter list, we don't need to substitute the fields.
|
||||||
// This is also used to prevent infinite substitution...
|
// This is also used to prevent infinite substitution...
|
||||||
let params = params.borrow();
|
|
||||||
let need_subst = params.values().any(|v| {
|
let need_subst = params.values().any(|v| {
|
||||||
let ty = self.unification_table.probe_value(*v);
|
let ty = self.unification_table.probe_value(*v);
|
||||||
if let TypeEnum::TVar { id, .. } = ty.as_ref() {
|
if let TypeEnum::TVar { id, .. } = ty.as_ref() {
|
||||||
@ -812,15 +906,11 @@ impl Unifier {
|
|||||||
cache.insert(a, None);
|
cache.insert(a, None);
|
||||||
let obj_id = *obj_id;
|
let obj_id = *obj_id;
|
||||||
let params =
|
let params =
|
||||||
self.subst_map(¶ms, mapping, cache).unwrap_or_else(|| params.clone());
|
self.subst_map(params, mapping, cache).unwrap_or_else(|| params.clone());
|
||||||
let fields = self
|
let fields = self
|
||||||
.subst_map2(&fields.borrow(), mapping, cache)
|
.subst_map2(fields, mapping, cache)
|
||||||
.unwrap_or_else(|| fields.borrow().clone());
|
.unwrap_or_else(|| fields.clone());
|
||||||
let new_ty = self.add_ty(TypeEnum::TObj {
|
let new_ty = self.add_ty(TypeEnum::TObj { obj_id, params, fields });
|
||||||
obj_id,
|
|
||||||
params: params.into(),
|
|
||||||
fields: fields.into(),
|
|
||||||
});
|
|
||||||
if let Some(var) = cache.get(&a).unwrap() {
|
if let Some(var) = cache.get(&a).unwrap() {
|
||||||
self.unify_impl(new_ty, *var, false).unwrap();
|
self.unify_impl(new_ty, *var, false).unwrap();
|
||||||
}
|
}
|
||||||
@ -829,8 +919,7 @@ impl Unifier {
|
|||||||
None
|
None
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
TypeEnum::TFunc(sig) => {
|
TypeEnum::TFunc(FunSignature { args, ret, vars: params }) => {
|
||||||
let FunSignature { args, ret, vars: params } = &*sig.borrow();
|
|
||||||
let new_params = self.subst_map(params, mapping, cache);
|
let new_params = self.subst_map(params, mapping, cache);
|
||||||
let new_ret = self.subst_impl(*ret, mapping, cache);
|
let new_ret = self.subst_impl(*ret, mapping, cache);
|
||||||
let mut new_args = Cow::from(args);
|
let mut new_args = Cow::from(args);
|
||||||
@ -845,11 +934,7 @@ impl Unifier {
|
|||||||
let params = new_params.unwrap_or_else(|| params.clone());
|
let params = new_params.unwrap_or_else(|| params.clone());
|
||||||
let ret = new_ret.unwrap_or_else(|| *ret);
|
let ret = new_ret.unwrap_or_else(|| *ret);
|
||||||
let args = new_args.into_owned();
|
let args = new_args.into_owned();
|
||||||
Some(
|
Some( self.add_ty(TypeEnum::TFunc( FunSignature { args, ret, vars: params })),)
|
||||||
self.add_ty(TypeEnum::TFunc(
|
|
||||||
FunSignature { args, ret, vars: params }.into(),
|
|
||||||
)),
|
|
||||||
)
|
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
@ -907,40 +992,28 @@ impl Unifier {
|
|||||||
let x = self.get_ty(a);
|
let x = self.get_ty(a);
|
||||||
let y = self.get_ty(b);
|
let y = self.get_ty(b);
|
||||||
match (x.as_ref(), y.as_ref()) {
|
match (x.as_ref(), y.as_ref()) {
|
||||||
(TVar { range: range1, .. }, TVar { meta, range: range2, .. }) => {
|
(TVar { range: range1, name, loc, .. }, TVar { fields, range: range2, name: name2, loc: loc2, .. }) => {
|
||||||
// we should restrict range2
|
|
||||||
let range1 = range1.borrow();
|
|
||||||
// new range is the intersection of them
|
// new range is the intersection of them
|
||||||
// empty range indicates no constraint
|
// empty range indicates no constraint
|
||||||
if !range1.is_empty() {
|
if range1.is_empty() {
|
||||||
let range2 = range2.borrow();
|
Ok(Some(b))
|
||||||
let mut range = Vec::new();
|
} else if range2.is_empty() {
|
||||||
if range2.is_empty() {
|
Ok(Some(a))
|
||||||
range.extend_from_slice(&range1);
|
} else {
|
||||||
}
|
let range = range2.iter().cartesian_product(range1.iter())
|
||||||
for v1 in range2.iter() {
|
.filter_map(|(v1, v2)| self.get_intersection(*v1, *v2).map(|v| v.unwrap_or(*v1)).ok()).collect_vec();
|
||||||
for v2 in range1.iter() {
|
|
||||||
let result = self.get_intersection(*v1, *v2);
|
|
||||||
if let Ok(result) = result {
|
|
||||||
range.push(result.unwrap_or(*v2));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if range.is_empty() {
|
if range.is_empty() {
|
||||||
Err(())
|
Err(())
|
||||||
} else {
|
} else {
|
||||||
let id = self.var_id + 1;
|
let id = self.var_id + 1;
|
||||||
self.var_id += 1;
|
self.var_id += 1;
|
||||||
let ty = TVar { id, meta: meta.clone(), range: range.into() };
|
let ty = TVar { id, fields: fields.clone(), range, name: name2.or(*name), loc: loc2.or(*loc) };
|
||||||
Ok(Some(self.unification_table.new_key(ty.into())))
|
Ok(Some(self.unification_table.new_key(ty.into())))
|
||||||
}
|
}
|
||||||
} else {
|
|
||||||
Ok(Some(b))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
(_, TVar { range, .. }) => {
|
(_, TVar { range, .. }) => {
|
||||||
// range should be restricted to the left hand side
|
// range should be restricted to the left hand side
|
||||||
let range = range.borrow();
|
|
||||||
if range.is_empty() {
|
if range.is_empty() {
|
||||||
Ok(Some(a))
|
Ok(Some(a))
|
||||||
} else {
|
} else {
|
||||||
@ -953,24 +1026,13 @@ impl Unifier {
|
|||||||
Err(())
|
Err(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
(TVar { id, range, .. }, _) => {
|
(TVar { range, .. }, _) => {
|
||||||
self.check_var_compatibility(*id, b, &range.borrow()).or(Err(()))
|
self.check_var_compatibility(b, range).or(Err(()))
|
||||||
}
|
}
|
||||||
(TTuple { ty: ty1 }, TTuple { ty: ty2 }) => {
|
(TTuple { ty: ty1 }, TTuple { ty: ty2 }) if ty1.len() == ty2.len() => {
|
||||||
if ty1.len() != ty2.len() {
|
let ty: Vec<_> = zip(ty1.iter(), ty2.iter()).map(|(a, b)| self.get_intersection(*a, *b)).try_collect()?;
|
||||||
return Err(());
|
if ty.iter().any(Option::is_some) {
|
||||||
}
|
Ok(Some(self.add_ty(TTuple { ty: zip(ty.into_iter(), ty1.iter()).map(|(a, b)| a.unwrap_or(*b)).collect()})))
|
||||||
let mut need_new = false;
|
|
||||||
let mut ty = ty1.clone();
|
|
||||||
for (a, b) in zip(ty1.iter(), ty2.iter()) {
|
|
||||||
let result = self.get_intersection(*a, *b)?;
|
|
||||||
ty.push(result.unwrap_or(*a));
|
|
||||||
if result.is_some() {
|
|
||||||
need_new = true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if need_new {
|
|
||||||
Ok(Some(self.add_ty(TTuple { ty })))
|
|
||||||
} else {
|
} else {
|
||||||
Ok(None)
|
Ok(None)
|
||||||
}
|
}
|
||||||
@ -981,12 +1043,8 @@ impl Unifier {
|
|||||||
(TVirtual { ty: ty1 }, TVirtual { ty: ty2 }) => {
|
(TVirtual { ty: ty1 }, TVirtual { ty: ty2 }) => {
|
||||||
Ok(self.get_intersection(*ty1, *ty2)?.map(|ty| self.add_ty(TVirtual { ty })))
|
Ok(self.get_intersection(*ty1, *ty2)?.map(|ty| self.add_ty(TVirtual { ty })))
|
||||||
}
|
}
|
||||||
(TObj { obj_id: id1, .. }, TObj { obj_id: id2, .. }) => {
|
(TObj { obj_id: id1, .. }, TObj { obj_id: id2, .. }) if id1 == id2 => {
|
||||||
if id1 == id2 {
|
|
||||||
Ok(None)
|
Ok(None)
|
||||||
} else {
|
|
||||||
Err(())
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
// don't deal with function shape for now
|
// don't deal with function shape for now
|
||||||
_ => Err(()),
|
_ => Err(()),
|
||||||
@ -995,10 +1053,9 @@ impl Unifier {
|
|||||||
|
|
||||||
fn check_var_compatibility(
|
fn check_var_compatibility(
|
||||||
&mut self,
|
&mut self,
|
||||||
id: u32,
|
|
||||||
b: Type,
|
b: Type,
|
||||||
range: &[Type],
|
range: &[Type],
|
||||||
) -> Result<Option<Type>, String> {
|
) -> Result<Option<Type>, TypeError> {
|
||||||
if range.is_empty() {
|
if range.is_empty() {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
}
|
}
|
||||||
@ -1008,10 +1065,6 @@ impl Unifier {
|
|||||||
return Ok(result);
|
return Ok(result);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return Err(format!(
|
Err(TypeError::new(TypeErrorKind::IncompatibleRange(b, range.to_vec()), None))
|
||||||
"Cannot unify variable {} with {} due to incompatible value range",
|
|
||||||
id,
|
|
||||||
self.default_stringify(b)
|
|
||||||
));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
use super::*;
|
use super::*;
|
||||||
|
use super::super::magic_methods::with_fields;
|
||||||
use indoc::indoc;
|
use indoc::indoc;
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
@ -7,7 +8,6 @@ use test_case::test_case;
|
|||||||
impl Unifier {
|
impl Unifier {
|
||||||
/// Check whether two types are equal.
|
/// Check whether two types are equal.
|
||||||
fn eq(&mut self, a: Type, b: Type) -> bool {
|
fn eq(&mut self, a: Type, b: Type) -> bool {
|
||||||
use TypeVarMeta::*;
|
|
||||||
if a == b {
|
if a == b {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
@ -21,13 +21,13 @@ impl Unifier {
|
|||||||
|
|
||||||
match (&*ty_a, &*ty_b) {
|
match (&*ty_a, &*ty_b) {
|
||||||
(
|
(
|
||||||
TypeEnum::TVar { meta: Generic, id: id1, .. },
|
TypeEnum::TVar { fields: None, id: id1, .. },
|
||||||
TypeEnum::TVar { meta: Generic, id: id2, .. },
|
TypeEnum::TVar { fields: None, id: id2, .. },
|
||||||
) => id1 == id2,
|
) => id1 == id2,
|
||||||
(
|
(
|
||||||
TypeEnum::TVar { meta: Sequence(map1), .. },
|
TypeEnum::TVar { fields: Some(map1), .. },
|
||||||
TypeEnum::TVar { meta: Sequence(map2), .. },
|
TypeEnum::TVar { fields: Some(map2), .. },
|
||||||
) => self.map_eq(&map1.borrow(), &map2.borrow()),
|
) => self.map_eq2(map1, map2),
|
||||||
(TypeEnum::TTuple { ty: ty1 }, TypeEnum::TTuple { ty: ty2 }) => {
|
(TypeEnum::TTuple { ty: ty1 }, TypeEnum::TTuple { ty: ty2 }) => {
|
||||||
ty1.len() == ty2.len()
|
ty1.len() == ty2.len()
|
||||||
&& ty1.iter().zip(ty2.iter()).all(|(t1, t2)| self.eq(*t1, *t2))
|
&& ty1.iter().zip(ty2.iter()).all(|(t1, t2)| self.eq(*t1, *t2))
|
||||||
@ -36,14 +36,10 @@ impl Unifier {
|
|||||||
| (TypeEnum::TVirtual { ty: ty1 }, TypeEnum::TVirtual { ty: ty2 }) => {
|
| (TypeEnum::TVirtual { ty: ty1 }, TypeEnum::TVirtual { ty: ty2 }) => {
|
||||||
self.eq(*ty1, *ty2)
|
self.eq(*ty1, *ty2)
|
||||||
}
|
}
|
||||||
(
|
|
||||||
TypeEnum::TVar { meta: Record(fields1), .. },
|
|
||||||
TypeEnum::TVar { meta: Record(fields2), .. },
|
|
||||||
) => self.map_eq2(&fields1.borrow(), &fields2.borrow()),
|
|
||||||
(
|
(
|
||||||
TypeEnum::TObj { obj_id: id1, params: params1, .. },
|
TypeEnum::TObj { obj_id: id1, params: params1, .. },
|
||||||
TypeEnum::TObj { obj_id: id2, params: params2, .. },
|
TypeEnum::TObj { obj_id: id2, params: params2, .. },
|
||||||
) => id1 == id2 && self.map_eq(¶ms1.borrow(), ¶ms2.borrow()),
|
) => id1 == id2 && self.map_eq(params1, params2),
|
||||||
// TCall and TFunc are not yet implemented
|
// TCall and TFunc are not yet implemented
|
||||||
_ => false,
|
_ => false,
|
||||||
}
|
}
|
||||||
@ -64,19 +60,15 @@ impl Unifier {
|
|||||||
true
|
true
|
||||||
}
|
}
|
||||||
|
|
||||||
fn map_eq2<K>(
|
fn map_eq2<K>(&mut self, map1: &Mapping<K, RecordField>, map2: &Mapping<K, RecordField>) -> bool
|
||||||
&mut self,
|
|
||||||
map1: &Mapping<K, (Type, bool)>,
|
|
||||||
map2: &Mapping<K, (Type, bool)>,
|
|
||||||
) -> bool
|
|
||||||
where
|
where
|
||||||
K: std::hash::Hash + std::cmp::Eq + std::clone::Clone,
|
K: std::hash::Hash + std::cmp::Eq + std::clone::Clone,
|
||||||
{
|
{
|
||||||
if map1.len() != map2.len() {
|
if map1.len() != map2.len() {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
for (k, (ty1, m1)) in map1.iter() {
|
for (k, v) in map1.iter() {
|
||||||
if !map2.get(k).map(|(ty2, m2)| m1 == m2 && self.eq(*ty1, *ty2)).unwrap_or(false) {
|
if !map2.get(k).map(|v1| self.eq(v.ty, v1.ty)).unwrap_or(false) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -98,27 +90,27 @@ impl TestEnvironment {
|
|||||||
"int".into(),
|
"int".into(),
|
||||||
unifier.add_ty(TypeEnum::TObj {
|
unifier.add_ty(TypeEnum::TObj {
|
||||||
obj_id: DefinitionId(0),
|
obj_id: DefinitionId(0),
|
||||||
fields: HashMap::new().into(),
|
fields: HashMap::new(),
|
||||||
params: HashMap::new().into(),
|
params: HashMap::new(),
|
||||||
}),
|
}),
|
||||||
);
|
);
|
||||||
type_mapping.insert(
|
type_mapping.insert(
|
||||||
"float".into(),
|
"float".into(),
|
||||||
unifier.add_ty(TypeEnum::TObj {
|
unifier.add_ty(TypeEnum::TObj {
|
||||||
obj_id: DefinitionId(1),
|
obj_id: DefinitionId(1),
|
||||||
fields: HashMap::new().into(),
|
fields: HashMap::new(),
|
||||||
params: HashMap::new().into(),
|
params: HashMap::new(),
|
||||||
}),
|
}),
|
||||||
);
|
);
|
||||||
type_mapping.insert(
|
type_mapping.insert(
|
||||||
"bool".into(),
|
"bool".into(),
|
||||||
unifier.add_ty(TypeEnum::TObj {
|
unifier.add_ty(TypeEnum::TObj {
|
||||||
obj_id: DefinitionId(2),
|
obj_id: DefinitionId(2),
|
||||||
fields: HashMap::new().into(),
|
fields: HashMap::new(),
|
||||||
params: HashMap::new().into(),
|
params: HashMap::new(),
|
||||||
}),
|
}),
|
||||||
);
|
);
|
||||||
let (v0, id) = unifier.get_fresh_var();
|
let (v0, id) = unifier.get_dummy_var();
|
||||||
type_mapping.insert(
|
type_mapping.insert(
|
||||||
"Foo".into(),
|
"Foo".into(),
|
||||||
unifier.add_ty(TypeEnum::TObj {
|
unifier.add_ty(TypeEnum::TObj {
|
||||||
@ -126,9 +118,8 @@ impl TestEnvironment {
|
|||||||
fields: [("a".into(), (v0, true))]
|
fields: [("a".into(), (v0, true))]
|
||||||
.iter()
|
.iter()
|
||||||
.cloned()
|
.cloned()
|
||||||
.collect::<HashMap<_, _>>()
|
.collect::<HashMap<_, _>>(),
|
||||||
.into(),
|
params: [(id, v0)].iter().cloned().collect::<HashMap<_, _>>(),
|
||||||
params: [(id, v0)].iter().cloned().collect::<HashMap<_, _>>().into(),
|
|
||||||
}),
|
}),
|
||||||
);
|
);
|
||||||
|
|
||||||
@ -174,7 +165,7 @@ impl TestEnvironment {
|
|||||||
let eq = s.find('=').unwrap();
|
let eq = s.find('=').unwrap();
|
||||||
let key = s[1..eq].into();
|
let key = s[1..eq].into();
|
||||||
let result = self.internal_parse(&s[eq + 1..], mapping);
|
let result = self.internal_parse(&s[eq + 1..], mapping);
|
||||||
fields.insert(key, (result.0, true));
|
fields.insert(key, RecordField::new(result.0, true, None));
|
||||||
s = result.1;
|
s = result.1;
|
||||||
}
|
}
|
||||||
(self.unifier.add_record(fields), &s[1..])
|
(self.unifier.add_record(fields), &s[1..])
|
||||||
@ -187,7 +178,6 @@ impl TestEnvironment {
|
|||||||
let mut ty = *self.type_mapping.get(x).unwrap();
|
let mut ty = *self.type_mapping.get(x).unwrap();
|
||||||
let te = self.unifier.get_ty(ty);
|
let te = self.unifier.get_ty(ty);
|
||||||
if let TypeEnum::TObj { params, .. } = &*te.as_ref() {
|
if let TypeEnum::TObj { params, .. } = &*te.as_ref() {
|
||||||
let params = params.borrow();
|
|
||||||
if !params.is_empty() {
|
if !params.is_empty() {
|
||||||
assert!(&s[0..1] == "[");
|
assert!(&s[0..1] == "[");
|
||||||
let mut p = Vec::new();
|
let mut p = Vec::new();
|
||||||
@ -209,6 +199,10 @@ impl TestEnvironment {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn unify(&mut self, typ1: Type, typ2: Type) -> Result<(), String> {
|
||||||
|
self.unifier.unify(typ1, typ2).map_err(|e| e.to_display(&self.unifier).to_string())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_case(2,
|
#[test_case(2,
|
||||||
@ -258,7 +252,7 @@ fn test_unify(
|
|||||||
let mut env = TestEnvironment::new();
|
let mut env = TestEnvironment::new();
|
||||||
let mut mapping = HashMap::new();
|
let mut mapping = HashMap::new();
|
||||||
for i in 1..=variable_count {
|
for i in 1..=variable_count {
|
||||||
let v = env.unifier.get_fresh_var();
|
let v = env.unifier.get_dummy_var();
|
||||||
mapping.insert(format!("v{}", i), v.0);
|
mapping.insert(format!("v{}", i), v.0);
|
||||||
}
|
}
|
||||||
// unification may have side effect when we do type resolution, so freeze the types
|
// unification may have side effect when we do type resolution, so freeze the types
|
||||||
@ -276,6 +270,7 @@ fn test_unify(
|
|||||||
println!("{} = {}", a, b);
|
println!("{} = {}", a, b);
|
||||||
let t1 = env.parse(a, &mapping);
|
let t1 = env.parse(a, &mapping);
|
||||||
let t2 = env.parse(b, &mapping);
|
let t2 = env.parse(b, &mapping);
|
||||||
|
println!("a = {}, b = {}", env.unifier.stringify(t1), env.unifier.stringify(t2));
|
||||||
assert!(env.unifier.eq(t1, t2));
|
assert!(env.unifier.eq(t1, t2));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -286,7 +281,7 @@ fn test_unify(
|
|||||||
("v1", "tuple[int]"),
|
("v1", "tuple[int]"),
|
||||||
("v2", "list[int]"),
|
("v2", "list[int]"),
|
||||||
],
|
],
|
||||||
(("v1", "v2"), "Cannot unify list[0] with tuple[0]")
|
(("v1", "v2"), "Incompatible types: list[0] and tuple[0]")
|
||||||
; "type mismatch"
|
; "type mismatch"
|
||||||
)]
|
)]
|
||||||
#[test_case(2,
|
#[test_case(2,
|
||||||
@ -294,7 +289,7 @@ fn test_unify(
|
|||||||
("v1", "tuple[int]"),
|
("v1", "tuple[int]"),
|
||||||
("v2", "tuple[float]"),
|
("v2", "tuple[float]"),
|
||||||
],
|
],
|
||||||
(("v1", "v2"), "Cannot unify 0 with 1")
|
(("v1", "v2"), "Incompatible types: 0 and 1")
|
||||||
; "tuple parameter mismatch"
|
; "tuple parameter mismatch"
|
||||||
)]
|
)]
|
||||||
#[test_case(2,
|
#[test_case(2,
|
||||||
@ -302,7 +297,7 @@ fn test_unify(
|
|||||||
("v1", "tuple[int,int]"),
|
("v1", "tuple[int,int]"),
|
||||||
("v2", "tuple[int]"),
|
("v2", "tuple[int]"),
|
||||||
],
|
],
|
||||||
(("v1", "v2"), "Cannot unify tuples with length 2 and 1")
|
(("v1", "v2"), "Tuple length mismatch: got tuple[0, 0] and tuple[0]")
|
||||||
; "tuple length mismatch"
|
; "tuple length mismatch"
|
||||||
)]
|
)]
|
||||||
#[test_case(3,
|
#[test_case(3,
|
||||||
@ -310,7 +305,7 @@ fn test_unify(
|
|||||||
("v1", "Record[a=float,b=int]"),
|
("v1", "Record[a=float,b=int]"),
|
||||||
("v2", "Foo[v3]"),
|
("v2", "Foo[v3]"),
|
||||||
],
|
],
|
||||||
(("v1", "v2"), "No such attribute b")
|
(("v1", "v2"), "`3[var4]::b` field does not exist")
|
||||||
; "record obj merge"
|
; "record obj merge"
|
||||||
)]
|
)]
|
||||||
/// Test cases for invalid unifications.
|
/// Test cases for invalid unifications.
|
||||||
@ -322,7 +317,7 @@ fn test_invalid_unification(
|
|||||||
let mut env = TestEnvironment::new();
|
let mut env = TestEnvironment::new();
|
||||||
let mut mapping = HashMap::new();
|
let mut mapping = HashMap::new();
|
||||||
for i in 1..=variable_count {
|
for i in 1..=variable_count {
|
||||||
let v = env.unifier.get_fresh_var();
|
let v = env.unifier.get_dummy_var();
|
||||||
mapping.insert(format!("v{}", i), v.0);
|
mapping.insert(format!("v{}", i), v.0);
|
||||||
}
|
}
|
||||||
// unification may have side effect when we do type resolution, so freeze the types
|
// unification may have side effect when we do type resolution, so freeze the types
|
||||||
@ -338,7 +333,7 @@ fn test_invalid_unification(
|
|||||||
for (a, b) in pairs {
|
for (a, b) in pairs {
|
||||||
env.unifier.unify(a, b).unwrap();
|
env.unifier.unify(a, b).unwrap();
|
||||||
}
|
}
|
||||||
assert_eq!(env.unifier.unify(t1, t2), Err(errornous_pair.1.to_string()));
|
assert_eq!(env.unify(t1, t2), Err(errornous_pair.1.to_string()));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -348,16 +343,17 @@ fn test_recursive_subst() {
|
|||||||
let foo_id = *env.type_mapping.get("Foo").unwrap();
|
let foo_id = *env.type_mapping.get("Foo").unwrap();
|
||||||
let foo_ty = env.unifier.get_ty(foo_id);
|
let foo_ty = env.unifier.get_ty(foo_id);
|
||||||
let mapping: HashMap<_, _>;
|
let mapping: HashMap<_, _>;
|
||||||
if let TypeEnum::TObj { fields, params, .. } = &*foo_ty {
|
with_fields(&mut env.unifier, foo_id, |_unifier, fields| {
|
||||||
fields.borrow_mut().insert("rec".into(), (foo_id, true));
|
fields.insert("rec".into(), (foo_id, true));
|
||||||
mapping = params.borrow().iter().map(|(id, _)| (*id, int)).collect();
|
});
|
||||||
|
if let TypeEnum::TObj { params, .. } = &*foo_ty {
|
||||||
|
mapping = params.iter().map(|(id, _)| (*id, int)).collect();
|
||||||
} else {
|
} else {
|
||||||
unreachable!()
|
unreachable!()
|
||||||
}
|
}
|
||||||
let instantiated = env.unifier.subst(foo_id, &mapping).unwrap();
|
let instantiated = env.unifier.subst(foo_id, &mapping).unwrap();
|
||||||
let instantiated_ty = env.unifier.get_ty(instantiated);
|
let instantiated_ty = env.unifier.get_ty(instantiated);
|
||||||
if let TypeEnum::TObj { fields, .. } = &*instantiated_ty {
|
if let TypeEnum::TObj { fields, .. } = &*instantiated_ty {
|
||||||
let fields = fields.borrow();
|
|
||||||
assert!(env.unifier.unioned(fields.get(&"a".into()).unwrap().0, int));
|
assert!(env.unifier.unioned(fields.get(&"a".into()).unwrap().0, int));
|
||||||
assert!(env.unifier.unioned(fields.get(&"rec".into()).unwrap().0, instantiated));
|
assert!(env.unifier.unioned(fields.get(&"rec".into()).unwrap().0, instantiated));
|
||||||
} else {
|
} else {
|
||||||
@ -370,32 +366,31 @@ fn test_virtual() {
|
|||||||
let mut env = TestEnvironment::new();
|
let mut env = TestEnvironment::new();
|
||||||
let int = env.parse("int", &HashMap::new());
|
let int = env.parse("int", &HashMap::new());
|
||||||
let fun = env.unifier.add_ty(TypeEnum::TFunc(
|
let fun = env.unifier.add_ty(TypeEnum::TFunc(
|
||||||
FunSignature { args: vec![], ret: int, vars: HashMap::new() }.into(),
|
FunSignature { args: vec![], ret: int, vars: HashMap::new() },
|
||||||
));
|
));
|
||||||
let bar = env.unifier.add_ty(TypeEnum::TObj {
|
let bar = env.unifier.add_ty(TypeEnum::TObj {
|
||||||
obj_id: DefinitionId(5),
|
obj_id: DefinitionId(5),
|
||||||
fields: [("f".into(), (fun, false)), ("a".into(), (int, false))]
|
fields: [("f".into(), (fun, false)), ("a".into(), (int, false))]
|
||||||
.iter()
|
.iter()
|
||||||
.cloned()
|
.cloned()
|
||||||
.collect::<HashMap<StrRef, _>>()
|
.collect::<HashMap<StrRef, _>>(),
|
||||||
.into(),
|
params: HashMap::new(),
|
||||||
params: HashMap::new().into(),
|
|
||||||
});
|
});
|
||||||
let v0 = env.unifier.get_fresh_var().0;
|
let v0 = env.unifier.get_dummy_var().0;
|
||||||
let v1 = env.unifier.get_fresh_var().0;
|
let v1 = env.unifier.get_dummy_var().0;
|
||||||
|
|
||||||
let a = env.unifier.add_ty(TypeEnum::TVirtual { ty: bar });
|
let a = env.unifier.add_ty(TypeEnum::TVirtual { ty: bar });
|
||||||
let b = env.unifier.add_ty(TypeEnum::TVirtual { ty: v0 });
|
let b = env.unifier.add_ty(TypeEnum::TVirtual { ty: v0 });
|
||||||
let c = env.unifier.add_record([("f".into(), (v1, false))].iter().cloned().collect());
|
let c = env.unifier.add_record([("f".into(), RecordField::new(v1, false, None))].iter().cloned().collect());
|
||||||
env.unifier.unify(a, b).unwrap();
|
env.unifier.unify(a, b).unwrap();
|
||||||
env.unifier.unify(b, c).unwrap();
|
env.unifier.unify(b, c).unwrap();
|
||||||
assert!(env.unifier.eq(v1, fun));
|
assert!(env.unifier.eq(v1, fun));
|
||||||
|
|
||||||
let d = env.unifier.add_record([("a".into(), (v1, true))].iter().cloned().collect());
|
let d = env.unifier.add_record([("a".into(), RecordField::new(v1, true, None))].iter().cloned().collect());
|
||||||
assert_eq!(env.unifier.unify(b, d), Err("Cannot access field a for virtual type".to_string()));
|
assert_eq!(env.unify(b, d), Err("`virtual[5]::a` field does not exist".to_string()));
|
||||||
|
|
||||||
let d = env.unifier.add_record([("b".into(), (v1, true))].iter().cloned().collect());
|
let d = env.unifier.add_record([("b".into(), RecordField::new(v1, true, None))].iter().cloned().collect());
|
||||||
assert_eq!(env.unifier.unify(b, d), Err("No such attribute b".to_string()));
|
assert_eq!(env.unify(b, d), Err("`virtual[5]::b` field does not exist".to_string()));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -409,107 +404,107 @@ fn test_typevar_range() {
|
|||||||
|
|
||||||
// unification between v and int
|
// unification between v and int
|
||||||
// where v in (int, bool)
|
// where v in (int, bool)
|
||||||
let v = env.unifier.get_fresh_var_with_range(&[int, boolean]).0;
|
let v = env.unifier.get_fresh_var_with_range(&[int, boolean], None, None).0;
|
||||||
env.unifier.unify(int, v).unwrap();
|
env.unifier.unify(int, v).unwrap();
|
||||||
|
|
||||||
// unification between v and list[int]
|
// unification between v and list[int]
|
||||||
// where v in (int, bool)
|
// where v in (int, bool)
|
||||||
let v = env.unifier.get_fresh_var_with_range(&[int, boolean]).0;
|
let v = env.unifier.get_fresh_var_with_range(&[int, boolean], None, None).0;
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
env.unifier.unify(int_list, v),
|
env.unify(int_list, v),
|
||||||
Err("Cannot unify variable 3 with list[0] due to incompatible value range".to_string())
|
Err("Expected any one of these types: 0, 2, but got list[0]".to_string())
|
||||||
);
|
);
|
||||||
|
|
||||||
// unification between v and float
|
// unification between v and float
|
||||||
// where v in (int, bool)
|
// where v in (int, bool)
|
||||||
let v = env.unifier.get_fresh_var_with_range(&[int, boolean]).0;
|
let v = env.unifier.get_fresh_var_with_range(&[int, boolean], None, None).0;
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
env.unifier.unify(float, v),
|
env.unify(float, v),
|
||||||
Err("Cannot unify variable 4 with 1 due to incompatible value range".to_string())
|
Err("Expected any one of these types: 0, 2, but got 1".to_string())
|
||||||
);
|
);
|
||||||
|
|
||||||
let v1 = env.unifier.get_fresh_var_with_range(&[int, boolean]).0;
|
let v1 = env.unifier.get_fresh_var_with_range(&[int, boolean], None, None).0;
|
||||||
let v1_list = env.unifier.add_ty(TypeEnum::TList { ty: v1 });
|
let v1_list = env.unifier.add_ty(TypeEnum::TList { ty: v1 });
|
||||||
let v = env.unifier.get_fresh_var_with_range(&[int, v1_list]).0;
|
let v = env.unifier.get_fresh_var_with_range(&[int, v1_list], None, None).0;
|
||||||
// unification between v and int
|
// unification between v and int
|
||||||
// where v in (int, list[v1]), v1 in (int, bool)
|
// where v in (int, list[v1]), v1 in (int, bool)
|
||||||
env.unifier.unify(int, v).unwrap();
|
env.unifier.unify(int, v).unwrap();
|
||||||
|
|
||||||
let v = env.unifier.get_fresh_var_with_range(&[int, v1_list]).0;
|
let v = env.unifier.get_fresh_var_with_range(&[int, v1_list], None, None).0;
|
||||||
// unification between v and list[int]
|
// unification between v and list[int]
|
||||||
// where v in (int, list[v1]), v1 in (int, bool)
|
// where v in (int, list[v1]), v1 in (int, bool)
|
||||||
env.unifier.unify(int_list, v).unwrap();
|
env.unifier.unify(int_list, v).unwrap();
|
||||||
|
|
||||||
let v = env.unifier.get_fresh_var_with_range(&[int, v1_list]).0;
|
let v = env.unifier.get_fresh_var_with_range(&[int, v1_list], None, None).0;
|
||||||
// unification between v and list[float]
|
// unification between v and list[float]
|
||||||
// where v in (int, list[v1]), v1 in (int, bool)
|
// where v in (int, list[v1]), v1 in (int, bool)
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
env.unifier.unify(float_list, v),
|
env.unify(float_list, v),
|
||||||
Err("Cannot unify variable 8 with list[1] due to incompatible value range".to_string())
|
Err("Expected any one of these types: 0, list[var5], but got list[1]\n\nNotes:\n var5 ∈ {0, 2}".to_string())
|
||||||
);
|
);
|
||||||
|
|
||||||
let a = env.unifier.get_fresh_var_with_range(&[int, float]).0;
|
let a = env.unifier.get_fresh_var_with_range(&[int, float], None, None).0;
|
||||||
let b = env.unifier.get_fresh_var_with_range(&[boolean, float]).0;
|
let b = env.unifier.get_fresh_var_with_range(&[boolean, float], None, None).0;
|
||||||
env.unifier.unify(a, b).unwrap();
|
env.unifier.unify(a, b).unwrap();
|
||||||
env.unifier.unify(a, float).unwrap();
|
env.unifier.unify(a, float).unwrap();
|
||||||
|
|
||||||
let a = env.unifier.get_fresh_var_with_range(&[int, float]).0;
|
let a = env.unifier.get_fresh_var_with_range(&[int, float], None, None).0;
|
||||||
let b = env.unifier.get_fresh_var_with_range(&[boolean, float]).0;
|
let b = env.unifier.get_fresh_var_with_range(&[boolean, float], None, None).0;
|
||||||
env.unifier.unify(a, b).unwrap();
|
env.unifier.unify(a, b).unwrap();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
env.unifier.unify(a, int),
|
env.unify(a, int),
|
||||||
Err("Cannot unify variable 12 with 0 due to incompatible value range".into())
|
Err("Expected any one of these types: 1, but got 0".into())
|
||||||
);
|
);
|
||||||
|
|
||||||
let a = env.unifier.get_fresh_var_with_range(&[int, float]).0;
|
let a = env.unifier.get_fresh_var_with_range(&[int, float], None, None).0;
|
||||||
let b = env.unifier.get_fresh_var_with_range(&[boolean, float]).0;
|
let b = env.unifier.get_fresh_var_with_range(&[boolean, float], None, None).0;
|
||||||
let a_list = env.unifier.add_ty(TypeEnum::TList { ty: a });
|
let a_list = env.unifier.add_ty(TypeEnum::TList { ty: a });
|
||||||
let a_list = env.unifier.get_fresh_var_with_range(&[a_list]).0;
|
let a_list = env.unifier.get_fresh_var_with_range(&[a_list], None, None).0;
|
||||||
let b_list = env.unifier.add_ty(TypeEnum::TList { ty: b });
|
let b_list = env.unifier.add_ty(TypeEnum::TList { ty: b });
|
||||||
let b_list = env.unifier.get_fresh_var_with_range(&[b_list]).0;
|
let b_list = env.unifier.get_fresh_var_with_range(&[b_list], None, None).0;
|
||||||
env.unifier.unify(a_list, b_list).unwrap();
|
env.unifier.unify(a_list, b_list).unwrap();
|
||||||
let float_list = env.unifier.add_ty(TypeEnum::TList { ty: float });
|
let float_list = env.unifier.add_ty(TypeEnum::TList { ty: float });
|
||||||
env.unifier.unify(a_list, float_list).unwrap();
|
env.unifier.unify(a_list, float_list).unwrap();
|
||||||
// previous unifications should not affect a and b
|
// previous unifications should not affect a and b
|
||||||
env.unifier.unify(a, int).unwrap();
|
env.unifier.unify(a, int).unwrap();
|
||||||
|
|
||||||
let a = env.unifier.get_fresh_var_with_range(&[int, float]).0;
|
let a = env.unifier.get_fresh_var_with_range(&[int, float], None, None).0;
|
||||||
let b = env.unifier.get_fresh_var_with_range(&[boolean, float]).0;
|
let b = env.unifier.get_fresh_var_with_range(&[boolean, float], None, None).0;
|
||||||
let a_list = env.unifier.add_ty(TypeEnum::TList { ty: a });
|
let a_list = env.unifier.add_ty(TypeEnum::TList { ty: a });
|
||||||
let b_list = env.unifier.add_ty(TypeEnum::TList { ty: b });
|
let b_list = env.unifier.add_ty(TypeEnum::TList { ty: b });
|
||||||
env.unifier.unify(a_list, b_list).unwrap();
|
env.unifier.unify(a_list, b_list).unwrap();
|
||||||
let int_list = env.unifier.add_ty(TypeEnum::TList { ty: int });
|
let int_list = env.unifier.add_ty(TypeEnum::TList { ty: int });
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
env.unifier.unify(a_list, int_list),
|
env.unify(a_list, int_list),
|
||||||
Err("Cannot unify variable 19 with 0 due to incompatible value range".into())
|
Err("Expected any one of these types: 1, but got 0".into())
|
||||||
);
|
);
|
||||||
|
|
||||||
let a = env.unifier.get_fresh_var_with_range(&[int, float]).0;
|
let a = env.unifier.get_fresh_var_with_range(&[int, float], None, None).0;
|
||||||
let b = env.unifier.get_fresh_var().0;
|
let b = env.unifier.get_dummy_var().0;
|
||||||
let a_list = env.unifier.add_ty(TypeEnum::TList { ty: a });
|
let a_list = env.unifier.add_ty(TypeEnum::TList { ty: a });
|
||||||
let a_list = env.unifier.get_fresh_var_with_range(&[a_list]).0;
|
let a_list = env.unifier.get_fresh_var_with_range(&[a_list], None, None).0;
|
||||||
let b_list = env.unifier.add_ty(TypeEnum::TList { ty: b });
|
let b_list = env.unifier.add_ty(TypeEnum::TList { ty: b });
|
||||||
env.unifier.unify(a_list, b_list).unwrap();
|
env.unifier.unify(a_list, b_list).unwrap();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
env.unifier.unify(b, boolean),
|
env.unify(b, boolean),
|
||||||
Err("Cannot unify variable 21 with 2 due to incompatible value range".into())
|
Err("Expected any one of these types: 0, 1, but got 2".into())
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_rigid_var() {
|
fn test_rigid_var() {
|
||||||
let mut env = TestEnvironment::new();
|
let mut env = TestEnvironment::new();
|
||||||
let a = env.unifier.get_fresh_rigid_var().0;
|
let a = env.unifier.get_fresh_rigid_var(None, None).0;
|
||||||
let b = env.unifier.get_fresh_rigid_var().0;
|
let b = env.unifier.get_fresh_rigid_var(None, None).0;
|
||||||
let x = env.unifier.get_fresh_var().0;
|
let x = env.unifier.get_dummy_var().0;
|
||||||
let list_a = env.unifier.add_ty(TypeEnum::TList { ty: a });
|
let list_a = env.unifier.add_ty(TypeEnum::TList { ty: a });
|
||||||
let list_x = env.unifier.add_ty(TypeEnum::TList { ty: x });
|
let list_x = env.unifier.add_ty(TypeEnum::TList { ty: x });
|
||||||
let int = env.parse("int", &HashMap::new());
|
let int = env.parse("int", &HashMap::new());
|
||||||
let list_int = env.parse("list[int]", &HashMap::new());
|
let list_int = env.parse("list[int]", &HashMap::new());
|
||||||
|
|
||||||
assert_eq!(env.unifier.unify(a, b), Err("Cannot unify var3 with var2".to_string()));
|
assert_eq!(env.unify(a, b), Err("Incompatible types: var3 and var2".to_string()));
|
||||||
env.unifier.unify(list_a, list_x).unwrap();
|
env.unifier.unify(list_a, list_x).unwrap();
|
||||||
assert_eq!(env.unifier.unify(list_x, list_int), Err("Cannot unify 0 with var2".to_string()));
|
assert_eq!(env.unify(list_x, list_int), Err("Incompatible types: 0 and var2".to_string()));
|
||||||
|
|
||||||
env.unifier.replace_rigid_var(a, int);
|
env.unifier.replace_rigid_var(a, int);
|
||||||
env.unifier.unify(list_x, list_int).unwrap();
|
env.unifier.unify(list_x, list_int).unwrap();
|
||||||
@ -526,13 +521,13 @@ fn test_instantiation() {
|
|||||||
let obj_map: HashMap<_, _> =
|
let obj_map: HashMap<_, _> =
|
||||||
[(0usize, "int"), (1, "float"), (2, "bool")].iter().cloned().collect();
|
[(0usize, "int"), (1, "float"), (2, "bool")].iter().cloned().collect();
|
||||||
|
|
||||||
let v = env.unifier.get_fresh_var_with_range(&[int, boolean]).0;
|
let v = env.unifier.get_fresh_var_with_range(&[int, boolean], None, None).0;
|
||||||
let list_v = env.unifier.add_ty(TypeEnum::TList { ty: v });
|
let list_v = env.unifier.add_ty(TypeEnum::TList { ty: v });
|
||||||
let v1 = env.unifier.get_fresh_var_with_range(&[list_v, int]).0;
|
let v1 = env.unifier.get_fresh_var_with_range(&[list_v, int], None, None).0;
|
||||||
let v2 = env.unifier.get_fresh_var_with_range(&[list_int, float]).0;
|
let v2 = env.unifier.get_fresh_var_with_range(&[list_int, float], None, None).0;
|
||||||
let t = env.unifier.get_fresh_rigid_var().0;
|
let t = env.unifier.get_dummy_var().0;
|
||||||
let tuple = env.unifier.add_ty(TypeEnum::TTuple { ty: vec![v, v1, v2] });
|
let tuple = env.unifier.add_ty(TypeEnum::TTuple { ty: vec![v, v1, v2] });
|
||||||
let v3 = env.unifier.get_fresh_var_with_range(&[tuple, t]).0;
|
let v3 = env.unifier.get_fresh_var_with_range(&[tuple, t], None, None).0;
|
||||||
// t = TypeVar('t')
|
// t = TypeVar('t')
|
||||||
// v = TypeVar('v', int, bool)
|
// v = TypeVar('v', int, bool)
|
||||||
// v1 = TypeVar('v1', 'list[v]', int)
|
// v1 = TypeVar('v1', 'list[v]', int)
|
||||||
@ -561,9 +556,9 @@ fn test_instantiation() {
|
|||||||
let types = types
|
let types = types
|
||||||
.iter()
|
.iter()
|
||||||
.map(|ty| {
|
.map(|ty| {
|
||||||
env.unifier.stringify(*ty, &mut |i| obj_map.get(&i).unwrap().to_string(), &mut |i| {
|
env.unifier.internal_stringify(*ty, &mut |i| obj_map.get(&i).unwrap().to_string(), &mut |i| {
|
||||||
format!("v{}", i)
|
format!("v{}", i)
|
||||||
})
|
}, &mut None)
|
||||||
})
|
})
|
||||||
.sorted()
|
.sorted()
|
||||||
.collect_vec();
|
.collect_vec();
|
||||||
|
@ -46,6 +46,17 @@ impl<V> UnificationTable<V> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn probe_value_immutable(&self, key: UnificationKey) -> &V {
|
||||||
|
let mut root = key.0;
|
||||||
|
let mut parent = self.parents[root];
|
||||||
|
while root != parent {
|
||||||
|
root = parent;
|
||||||
|
// parent = root.parent
|
||||||
|
parent = self.parents[parent];
|
||||||
|
}
|
||||||
|
self.values[parent].as_ref().unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
pub fn probe_value(&mut self, a: UnificationKey) -> &V {
|
pub fn probe_value(&mut self, a: UnificationKey) -> &V {
|
||||||
let index = self.find(a);
|
let index = self.find(a);
|
||||||
self.values[index].as_ref().unwrap()
|
self.values[index].as_ref().unwrap()
|
||||||
|
@ -63,8 +63,8 @@ impl SymbolResolver for Resolver {
|
|||||||
unimplemented!()
|
unimplemented!()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_identifier_def(&self, id: StrRef) -> Option<DefinitionId> {
|
fn get_identifier_def(&self, id: StrRef) -> Result<DefinitionId, String> {
|
||||||
self.0.id_to_def.lock().get(&id).cloned()
|
self.0.id_to_def.lock().get(&id).cloned().ok_or_else(|| "Undefined identifier".to_string())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_string_id(&self, s: &str) -> i32 {
|
fn get_string_id(&self, s: &str) -> i32 {
|
||||||
|
@ -86,7 +86,7 @@ fn main() {
|
|||||||
get_type_from_type_annotation_kinds(def_list, unifier, primitives, &ty)
|
get_type_from_type_annotation_kinds(def_list, unifier, primitives, &ty)
|
||||||
})
|
})
|
||||||
.collect::<Result<Vec<_>, _>>()?;
|
.collect::<Result<Vec<_>, _>>()?;
|
||||||
Ok(unifier.get_fresh_var_with_range(&constraints).0)
|
Ok(unifier.get_fresh_var_with_range(&constraints, None, None).0)
|
||||||
} else {
|
} else {
|
||||||
Err(format!("expression {:?} cannot be handled as a TypeVar in global scope", var))
|
Err(format!("expression {:?} cannot be handled as a TypeVar in global scope", var))
|
||||||
}
|
}
|
||||||
@ -219,7 +219,7 @@ fn main() {
|
|||||||
let mut instance =
|
let mut instance =
|
||||||
defs[resolver
|
defs[resolver
|
||||||
.get_identifier_def("run".into())
|
.get_identifier_def("run".into())
|
||||||
.unwrap_or_else(|| panic!("cannot find run() entry point")).0
|
.unwrap_or_else(|_| panic!("cannot find run() entry point")).0
|
||||||
].write();
|
].write();
|
||||||
if let TopLevelDef::Function {
|
if let TopLevelDef::Function {
|
||||||
instance_to_stmt,
|
instance_to_stmt,
|
||||||
|
Loading…
Reference in New Issue
Block a user