forked from M-Labs/nac3
meta: Refactor to use more let-else bindings
This commit is contained in:
parent
5bf05c6a69
commit
a19f1065e3
|
@ -215,148 +215,148 @@ impl<'b> CodeGenerator for ArtiqCodeGenerator<'b> {
|
||||||
ctx: &mut CodeGenContext<'_, '_>,
|
ctx: &mut CodeGenContext<'_, '_>,
|
||||||
stmt: &Stmt<Option<Type>>,
|
stmt: &Stmt<Option<Type>>,
|
||||||
) -> Result<(), String> {
|
) -> Result<(), String> {
|
||||||
if let StmtKind::With { items, body, .. } = &stmt.node {
|
let StmtKind::With { items, body, .. } = &stmt.node else {
|
||||||
if items.len() == 1 && items[0].optional_vars.is_none() {
|
unreachable!()
|
||||||
let item = &items[0];
|
};
|
||||||
|
|
||||||
// Behavior of parallel and sequential:
|
if items.len() == 1 && items[0].optional_vars.is_none() {
|
||||||
// Each function call (indirectly, can be inside a sequential block) within a parallel
|
let item = &items[0];
|
||||||
// block will update the end variable to the maximum now_mu in the block.
|
|
||||||
// Each function call directly inside a parallel block will reset the timeline after
|
|
||||||
// execution. A parallel block within a sequential block (or not within any block) will
|
|
||||||
// set the timeline to the max now_mu within the block (and the outer max now_mu will also
|
|
||||||
// be updated).
|
|
||||||
//
|
|
||||||
// Implementation: We track the start and end separately.
|
|
||||||
// - If there is a start variable, it indicates that we are directly inside a
|
|
||||||
// parallel block and we have to reset the timeline after every function call.
|
|
||||||
// - If there is a end variable, it indicates that we are (indirectly) inside a
|
|
||||||
// parallel block, and we should update the max end value.
|
|
||||||
if let ExprKind::Name { id, ctx: name_ctx } = &item.context_expr.node {
|
|
||||||
if id == &"parallel".into() || id == &"legacy_parallel".into() {
|
|
||||||
let old_start = self.start.take();
|
|
||||||
let old_end = self.end.take();
|
|
||||||
let old_parallel_mode = self.parallel_mode;
|
|
||||||
|
|
||||||
let now = if let Some(old_start) = &old_start {
|
// Behavior of parallel and sequential:
|
||||||
self.gen_expr(ctx, old_start)?
|
// Each function call (indirectly, can be inside a sequential block) within a parallel
|
||||||
.unwrap()
|
// block will update the end variable to the maximum now_mu in the block.
|
||||||
.to_basic_value_enum(ctx, self, old_start.custom.unwrap())?
|
// Each function call directly inside a parallel block will reset the timeline after
|
||||||
} else {
|
// execution. A parallel block within a sequential block (or not within any block) will
|
||||||
self.timeline.emit_now_mu(ctx)
|
// set the timeline to the max now_mu within the block (and the outer max now_mu will also
|
||||||
};
|
// be updated).
|
||||||
|
//
|
||||||
|
// Implementation: We track the start and end separately.
|
||||||
|
// - If there is a start variable, it indicates that we are directly inside a
|
||||||
|
// parallel block and we have to reset the timeline after every function call.
|
||||||
|
// - If there is a end variable, it indicates that we are (indirectly) inside a
|
||||||
|
// parallel block, and we should update the max end value.
|
||||||
|
if let ExprKind::Name { id, ctx: name_ctx } = &item.context_expr.node {
|
||||||
|
if id == &"parallel".into() || id == &"legacy_parallel".into() {
|
||||||
|
let old_start = self.start.take();
|
||||||
|
let old_end = self.end.take();
|
||||||
|
let old_parallel_mode = self.parallel_mode;
|
||||||
|
|
||||||
// Emulate variable allocation, as we need to use the CodeGenContext
|
let now = if let Some(old_start) = &old_start {
|
||||||
// HashMap to store our variable due to lifetime limitation
|
self.gen_expr(ctx, old_start)?
|
||||||
// Note: we should be able to store variables directly if generic
|
|
||||||
// associative type is used by limiting the lifetime of CodeGenerator to
|
|
||||||
// the LLVM Context.
|
|
||||||
// The name is guaranteed to be unique as users cannot use this as variable
|
|
||||||
// name.
|
|
||||||
self.start = old_start.clone().map_or_else(
|
|
||||||
|| {
|
|
||||||
let start = format!("with-{}-start", self.name_counter).into();
|
|
||||||
let start_expr = Located {
|
|
||||||
// location does not matter at this point
|
|
||||||
location: stmt.location,
|
|
||||||
node: ExprKind::Name { id: start, ctx: name_ctx.clone() },
|
|
||||||
custom: Some(ctx.primitives.int64),
|
|
||||||
};
|
|
||||||
let start = self
|
|
||||||
.gen_store_target(ctx, &start_expr, Some("start.addr"))?
|
|
||||||
.unwrap();
|
|
||||||
ctx.builder.build_store(start, now);
|
|
||||||
Ok(Some(start_expr)) as Result<_, String>
|
|
||||||
},
|
|
||||||
|v| Ok(Some(v)),
|
|
||||||
)?;
|
|
||||||
let end = format!("with-{}-end", self.name_counter).into();
|
|
||||||
let end_expr = Located {
|
|
||||||
// location does not matter at this point
|
|
||||||
location: stmt.location,
|
|
||||||
node: ExprKind::Name { id: end, ctx: name_ctx.clone() },
|
|
||||||
custom: Some(ctx.primitives.int64),
|
|
||||||
};
|
|
||||||
let end = self
|
|
||||||
.gen_store_target(ctx, &end_expr, Some("end.addr"))?
|
|
||||||
.unwrap();
|
|
||||||
ctx.builder.build_store(end, now);
|
|
||||||
self.end = Some(end_expr);
|
|
||||||
self.name_counter += 1;
|
|
||||||
self.parallel_mode = match id.to_string().as_str() {
|
|
||||||
"parallel" => ParallelMode::Deep,
|
|
||||||
"legacy_parallel" => ParallelMode::Legacy,
|
|
||||||
_ => unreachable!(),
|
|
||||||
};
|
|
||||||
|
|
||||||
self.gen_block(ctx, body.iter())?;
|
|
||||||
|
|
||||||
let current = ctx.builder.get_insert_block().unwrap();
|
|
||||||
|
|
||||||
// if the current block is terminated, move before the terminator
|
|
||||||
// we want to set the timeline before reaching the terminator
|
|
||||||
// TODO: This may be unsound if there are multiple exit paths in the
|
|
||||||
// block... e.g.
|
|
||||||
// if ...:
|
|
||||||
// return
|
|
||||||
// Perhaps we can fix this by using actual with block?
|
|
||||||
let reset_position = if let Some(terminator) = current.get_terminator() {
|
|
||||||
ctx.builder.position_before(&terminator);
|
|
||||||
true
|
|
||||||
} else {
|
|
||||||
false
|
|
||||||
};
|
|
||||||
|
|
||||||
// set duration
|
|
||||||
let end_expr = self.end.take().unwrap();
|
|
||||||
let end_val = self
|
|
||||||
.gen_expr(ctx, &end_expr)?
|
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.to_basic_value_enum(ctx, self, end_expr.custom.unwrap())?;
|
.to_basic_value_enum(ctx, self, old_start.custom.unwrap())?
|
||||||
|
} else {
|
||||||
|
self.timeline.emit_now_mu(ctx)
|
||||||
|
};
|
||||||
|
|
||||||
// inside a sequential block
|
// Emulate variable allocation, as we need to use the CodeGenContext
|
||||||
if old_start.is_none() {
|
// HashMap to store our variable due to lifetime limitation
|
||||||
self.timeline.emit_at_mu(ctx, end_val);
|
// Note: we should be able to store variables directly if generic
|
||||||
}
|
// associative type is used by limiting the lifetime of CodeGenerator to
|
||||||
|
// the LLVM Context.
|
||||||
|
// The name is guaranteed to be unique as users cannot use this as variable
|
||||||
|
// name.
|
||||||
|
self.start = old_start.clone().map_or_else(
|
||||||
|
|| {
|
||||||
|
let start = format!("with-{}-start", self.name_counter).into();
|
||||||
|
let start_expr = Located {
|
||||||
|
// location does not matter at this point
|
||||||
|
location: stmt.location,
|
||||||
|
node: ExprKind::Name { id: start, ctx: name_ctx.clone() },
|
||||||
|
custom: Some(ctx.primitives.int64),
|
||||||
|
};
|
||||||
|
let start = self
|
||||||
|
.gen_store_target(ctx, &start_expr, Some("start.addr"))?
|
||||||
|
.unwrap();
|
||||||
|
ctx.builder.build_store(start, now);
|
||||||
|
Ok(Some(start_expr)) as Result<_, String>
|
||||||
|
},
|
||||||
|
|v| Ok(Some(v)),
|
||||||
|
)?;
|
||||||
|
let end = format!("with-{}-end", self.name_counter).into();
|
||||||
|
let end_expr = Located {
|
||||||
|
// location does not matter at this point
|
||||||
|
location: stmt.location,
|
||||||
|
node: ExprKind::Name { id: end, ctx: name_ctx.clone() },
|
||||||
|
custom: Some(ctx.primitives.int64),
|
||||||
|
};
|
||||||
|
let end = self
|
||||||
|
.gen_store_target(ctx, &end_expr, Some("end.addr"))?
|
||||||
|
.unwrap();
|
||||||
|
ctx.builder.build_store(end, now);
|
||||||
|
self.end = Some(end_expr);
|
||||||
|
self.name_counter += 1;
|
||||||
|
self.parallel_mode = match id.to_string().as_str() {
|
||||||
|
"parallel" => ParallelMode::Deep,
|
||||||
|
"legacy_parallel" => ParallelMode::Legacy,
|
||||||
|
_ => unreachable!(),
|
||||||
|
};
|
||||||
|
|
||||||
// inside a parallel block, should update the outer max now_mu
|
self.gen_block(ctx, body.iter())?;
|
||||||
self.timeline_update_end_max(ctx, old_end.clone(), Some("outer.end"))?;
|
|
||||||
|
|
||||||
self.parallel_mode = old_parallel_mode;
|
let current = ctx.builder.get_insert_block().unwrap();
|
||||||
self.end = old_end;
|
|
||||||
self.start = old_start;
|
|
||||||
|
|
||||||
if reset_position {
|
// if the current block is terminated, move before the terminator
|
||||||
ctx.builder.position_at_end(current);
|
// we want to set the timeline before reaching the terminator
|
||||||
}
|
// TODO: This may be unsound if there are multiple exit paths in the
|
||||||
|
// block... e.g.
|
||||||
|
// if ...:
|
||||||
|
// return
|
||||||
|
// Perhaps we can fix this by using actual with block?
|
||||||
|
let reset_position = if let Some(terminator) = current.get_terminator() {
|
||||||
|
ctx.builder.position_before(&terminator);
|
||||||
|
true
|
||||||
|
} else {
|
||||||
|
false
|
||||||
|
};
|
||||||
|
|
||||||
return Ok(());
|
// set duration
|
||||||
} else if id == &"sequential".into() {
|
let end_expr = self.end.take().unwrap();
|
||||||
// For deep parallel, temporarily take away start to avoid function calls in
|
let end_val = self
|
||||||
// the block from resetting the timeline.
|
.gen_expr(ctx, &end_expr)?
|
||||||
// This does not affect legacy parallel, as the timeline will be reset after
|
.unwrap()
|
||||||
// this block finishes execution.
|
.to_basic_value_enum(ctx, self, end_expr.custom.unwrap())?;
|
||||||
let start = self.start.take();
|
|
||||||
self.gen_block(ctx, body.iter())?;
|
|
||||||
self.start = start;
|
|
||||||
|
|
||||||
// Reset the timeline when we are exiting the sequential block
|
// inside a sequential block
|
||||||
// Legacy parallel does not need this, since it will be reset after codegen
|
if old_start.is_none() {
|
||||||
// for this statement is completed
|
self.timeline.emit_at_mu(ctx, end_val);
|
||||||
if self.parallel_mode == ParallelMode::Deep {
|
|
||||||
self.timeline_reset_start(ctx)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
return Ok(());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// inside a parallel block, should update the outer max now_mu
|
||||||
|
self.timeline_update_end_max(ctx, old_end.clone(), Some("outer.end"))?;
|
||||||
|
|
||||||
|
self.parallel_mode = old_parallel_mode;
|
||||||
|
self.end = old_end;
|
||||||
|
self.start = old_start;
|
||||||
|
|
||||||
|
if reset_position {
|
||||||
|
ctx.builder.position_at_end(current);
|
||||||
|
}
|
||||||
|
|
||||||
|
return Ok(());
|
||||||
|
} else if id == &"sequential".into() {
|
||||||
|
// For deep parallel, temporarily take away start to avoid function calls in
|
||||||
|
// the block from resetting the timeline.
|
||||||
|
// This does not affect legacy parallel, as the timeline will be reset after
|
||||||
|
// this block finishes execution.
|
||||||
|
let start = self.start.take();
|
||||||
|
self.gen_block(ctx, body.iter())?;
|
||||||
|
self.start = start;
|
||||||
|
|
||||||
|
// Reset the timeline when we are exiting the sequential block
|
||||||
|
// Legacy parallel does not need this, since it will be reset after codegen
|
||||||
|
// for this statement is completed
|
||||||
|
if self.parallel_mode == ParallelMode::Deep {
|
||||||
|
self.timeline_reset_start(ctx)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
return Ok(());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// not parallel/sequential
|
|
||||||
gen_with(self, ctx, stmt)
|
|
||||||
} else {
|
|
||||||
unreachable!()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// not parallel/sequential
|
||||||
|
gen_with(self, ctx, stmt)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -533,14 +533,13 @@ impl Nac3 {
|
||||||
let instance = {
|
let instance = {
|
||||||
let defs = top_level.definitions.read();
|
let defs = top_level.definitions.read();
|
||||||
let mut definition = defs[def_id.0].write();
|
let mut definition = defs[def_id.0].write();
|
||||||
if let TopLevelDef::Function { instance_to_stmt, instance_to_symbol, .. } =
|
let TopLevelDef::Function { instance_to_stmt, instance_to_symbol, .. } =
|
||||||
&mut *definition
|
&mut *definition else {
|
||||||
{
|
|
||||||
instance_to_symbol.insert(String::new(), "__modinit__".into());
|
|
||||||
instance_to_stmt[""].clone()
|
|
||||||
} else {
|
|
||||||
unreachable!()
|
unreachable!()
|
||||||
}
|
};
|
||||||
|
|
||||||
|
instance_to_symbol.insert(String::new(), "__modinit__".into());
|
||||||
|
instance_to_stmt[""].clone()
|
||||||
};
|
};
|
||||||
|
|
||||||
let task = CodeGenTask {
|
let task = CodeGenTask {
|
||||||
|
|
|
@ -311,37 +311,37 @@ impl InnerResolver {
|
||||||
unreachable!("none cannot be typeid")
|
unreachable!("none cannot be typeid")
|
||||||
} else if let Some(def_id) = self.pyid_to_def.read().get(&ty_id).copied() {
|
} else if let Some(def_id) = self.pyid_to_def.read().get(&ty_id).copied() {
|
||||||
let def = defs[def_id.0].read();
|
let def = defs[def_id.0].read();
|
||||||
if let TopLevelDef::Class { object_id, type_vars, fields, methods, .. } = &*def {
|
let TopLevelDef::Class { object_id, type_vars, fields, methods, .. } = &*def else {
|
||||||
// do not handle type var param and concrete check here, and no subst
|
|
||||||
Ok(Ok({
|
|
||||||
let ty = TypeEnum::TObj {
|
|
||||||
obj_id: *object_id,
|
|
||||||
params: type_vars
|
|
||||||
.iter()
|
|
||||||
.map(|x| {
|
|
||||||
if let TypeEnum::TVar { id, .. } = &*unifier.get_ty(*x) {
|
|
||||||
(*id, *x)
|
|
||||||
} else {
|
|
||||||
unreachable!()
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect(),
|
|
||||||
fields: {
|
|
||||||
let mut res = methods
|
|
||||||
.iter()
|
|
||||||
.map(|(iden, ty, _)| (*iden, (*ty, false)))
|
|
||||||
.collect::<HashMap<_, _>>();
|
|
||||||
res.extend(fields.clone().into_iter().map(|x| (x.0, (x.1, x.2))));
|
|
||||||
res
|
|
||||||
},
|
|
||||||
};
|
|
||||||
// here also false, later instantiation use python object to check compatible
|
|
||||||
(unifier.add_ty(ty), false)
|
|
||||||
}))
|
|
||||||
} else {
|
|
||||||
// only object is supported, functions are not supported
|
// only object is supported, functions are not supported
|
||||||
unreachable!("function type is not supported, should not be queried")
|
unreachable!("function type is not supported, should not be queried")
|
||||||
}
|
};
|
||||||
|
|
||||||
|
// do not handle type var param and concrete check here, and no subst
|
||||||
|
Ok(Ok({
|
||||||
|
let ty = TypeEnum::TObj {
|
||||||
|
obj_id: *object_id,
|
||||||
|
params: type_vars
|
||||||
|
.iter()
|
||||||
|
.map(|x| {
|
||||||
|
let TypeEnum::TVar { id, .. } = &*unifier.get_ty(*x) else {
|
||||||
|
unreachable!()
|
||||||
|
};
|
||||||
|
|
||||||
|
(*id, *x)
|
||||||
|
})
|
||||||
|
.collect(),
|
||||||
|
fields: {
|
||||||
|
let mut res = methods
|
||||||
|
.iter()
|
||||||
|
.map(|(iden, ty, _)| (*iden, (*ty, false)))
|
||||||
|
.collect::<HashMap<_, _>>();
|
||||||
|
res.extend(fields.clone().into_iter().map(|x| (x.0, (x.1, x.2))));
|
||||||
|
res
|
||||||
|
},
|
||||||
|
};
|
||||||
|
// here also false, later instantiation use python object to check compatible
|
||||||
|
(unifier.add_ty(ty), false)
|
||||||
|
}))
|
||||||
} else if ty_ty_id == self.primitive_ids.typevar {
|
} else if ty_ty_id == self.primitive_ids.typevar {
|
||||||
let name: &str = pyty.getattr("__name__").unwrap().extract().unwrap();
|
let name: &str = pyty.getattr("__name__").unwrap().extract().unwrap();
|
||||||
let (constraint_types, is_const_generic) = {
|
let (constraint_types, is_const_generic) = {
|
||||||
|
@ -652,23 +652,23 @@ impl InnerResolver {
|
||||||
// if is `none`
|
// if is `none`
|
||||||
let zelf_id: u64 = self.helper.id_fn.call1(py, (obj,))?.extract(py)?;
|
let zelf_id: u64 = self.helper.id_fn.call1(py, (obj,))?.extract(py)?;
|
||||||
if zelf_id == self.primitive_ids.none {
|
if zelf_id == self.primitive_ids.none {
|
||||||
if let TypeEnum::TObj { params, .. } =
|
let ty_enum = unifier.get_ty_immutable(primitives.option);
|
||||||
unifier.get_ty_immutable(primitives.option).as_ref()
|
let TypeEnum::TObj { params, .. } = ty_enum.as_ref() else {
|
||||||
{
|
unreachable!("must be tobj")
|
||||||
let var_map = params
|
};
|
||||||
.iter()
|
|
||||||
.map(|(id_var, ty)| {
|
let var_map = params
|
||||||
if let TypeEnum::TVar { id, range, name, loc, .. } = &*unifier.get_ty(*ty) {
|
.iter()
|
||||||
assert_eq!(*id, *id_var);
|
.map(|(id_var, ty)| {
|
||||||
(*id, unifier.get_fresh_var_with_range(range, *name, *loc).0)
|
let TypeEnum::TVar { id, range, name, loc, .. } = &*unifier.get_ty(*ty) else {
|
||||||
} else {
|
unreachable!()
|
||||||
unreachable!()
|
};
|
||||||
}
|
|
||||||
})
|
assert_eq!(*id, *id_var);
|
||||||
.collect::<HashMap<_, _>>();
|
(*id, unifier.get_fresh_var_with_range(range, *name, *loc).0)
|
||||||
return Ok(Ok(unifier.subst(primitives.option, &var_map).unwrap()))
|
})
|
||||||
}
|
.collect::<HashMap<_, _>>();
|
||||||
unreachable!("must be tobj")
|
return Ok(Ok(unifier.subst(primitives.option, &var_map).unwrap()))
|
||||||
}
|
}
|
||||||
|
|
||||||
let ty = match self.get_obj_type(py, field_data, unifier, defs, primitives)? {
|
let ty = match self.get_obj_type(py, field_data, unifier, defs, primitives)? {
|
||||||
|
@ -688,14 +688,13 @@ impl InnerResolver {
|
||||||
let var_map = params
|
let var_map = params
|
||||||
.iter()
|
.iter()
|
||||||
.map(|(id_var, ty)| {
|
.map(|(id_var, ty)| {
|
||||||
if let TypeEnum::TVar { id, range, name, loc, .. } =
|
let TypeEnum::TVar { id, range, name, loc, .. } =
|
||||||
&*unifier.get_ty(*ty)
|
&*unifier.get_ty(*ty) else {
|
||||||
{
|
|
||||||
assert_eq!(*id, *id_var);
|
|
||||||
(*id, unifier.get_fresh_var_with_range(range, *name, *loc).0)
|
|
||||||
} else {
|
|
||||||
unreachable!()
|
unreachable!()
|
||||||
}
|
};
|
||||||
|
|
||||||
|
assert_eq!(*id, *id_var);
|
||||||
|
(*id, unifier.get_fresh_var_with_range(range, *name, *loc).0)
|
||||||
})
|
})
|
||||||
.collect::<HashMap<_, _>>();
|
.collect::<HashMap<_, _>>();
|
||||||
let mut instantiate_obj = || {
|
let mut instantiate_obj = || {
|
||||||
|
@ -900,28 +899,29 @@ impl InnerResolver {
|
||||||
|
|
||||||
Ok(Some(global.as_pointer_value().into()))
|
Ok(Some(global.as_pointer_value().into()))
|
||||||
} else if ty_id == self.primitive_ids.tuple {
|
} else if ty_id == self.primitive_ids.tuple {
|
||||||
if let TypeEnum::TTuple { ty } = ctx.unifier.get_ty_immutable(expected_ty).as_ref() {
|
let expected_ty_enum = ctx.unifier.get_ty_immutable(expected_ty);
|
||||||
let tup_tys = ty.iter();
|
let TypeEnum::TTuple { ty } = expected_ty_enum.as_ref() else {
|
||||||
let elements: &PyTuple = obj.downcast()?;
|
unreachable!()
|
||||||
assert_eq!(elements.len(), tup_tys.len());
|
};
|
||||||
let val: Result<Option<Vec<_>>, _> =
|
|
||||||
elements
|
let tup_tys = ty.iter();
|
||||||
.iter()
|
let elements: &PyTuple = obj.downcast()?;
|
||||||
.enumerate()
|
assert_eq!(elements.len(), tup_tys.len());
|
||||||
.zip(tup_tys)
|
let val: Result<Option<Vec<_>>, _> =
|
||||||
.map(|((i, elem), ty)| self
|
elements
|
||||||
.get_obj_value(py, elem, ctx, generator, *ty).map_err(|e|
|
.iter()
|
||||||
super::CompileError::new_err(
|
.enumerate()
|
||||||
format!("Error getting element {i}: {e}")
|
.zip(tup_tys)
|
||||||
)
|
.map(|((i, elem), ty)| self
|
||||||
|
.get_obj_value(py, elem, ctx, generator, *ty).map_err(|e|
|
||||||
|
super::CompileError::new_err(
|
||||||
|
format!("Error getting element {i}: {e}")
|
||||||
)
|
)
|
||||||
).collect();
|
)
|
||||||
let val = val?.unwrap();
|
).collect();
|
||||||
let val = ctx.ctx.const_struct(&val, false);
|
let val = val?.unwrap();
|
||||||
Ok(Some(val.into()))
|
let val = ctx.ctx.const_struct(&val, false);
|
||||||
} else {
|
Ok(Some(val.into()))
|
||||||
unreachable!("must expect tuple type")
|
|
||||||
}
|
|
||||||
} else if ty_id == self.primitive_ids.option {
|
} else if ty_id == self.primitive_ids.option {
|
||||||
let option_val_ty = match ctx.unifier.get_ty_immutable(expected_ty).as_ref() {
|
let option_val_ty = match ctx.unifier.get_ty_immutable(expected_ty).as_ref() {
|
||||||
TypeEnum::TObj { obj_id, params, .. }
|
TypeEnum::TObj { obj_id, params, .. }
|
||||||
|
@ -993,27 +993,25 @@ impl InnerResolver {
|
||||||
// should be classes
|
// should be classes
|
||||||
let definition =
|
let definition =
|
||||||
top_level_defs.get(self.pyid_to_def.read().get(&ty_id).unwrap().0).unwrap().read();
|
top_level_defs.get(self.pyid_to_def.read().get(&ty_id).unwrap().0).unwrap().read();
|
||||||
if let TopLevelDef::Class { fields, .. } = &*definition {
|
let TopLevelDef::Class { fields, .. } = &*definition else { unreachable!() };
|
||||||
let values: Result<Option<Vec<_>>, _> = fields
|
|
||||||
.iter()
|
let values: Result<Option<Vec<_>>, _> = fields
|
||||||
.map(|(name, ty, _)| {
|
.iter()
|
||||||
self.get_obj_value(py, obj.getattr(name.to_string().as_str())?, ctx, generator, *ty)
|
.map(|(name, ty, _)| {
|
||||||
.map_err(|e| super::CompileError::new_err(format!("Error getting field {name}: {e}")))
|
self.get_obj_value(py, obj.getattr(name.to_string().as_str())?, ctx, generator, *ty)
|
||||||
})
|
.map_err(|e| super::CompileError::new_err(format!("Error getting field {name}: {e}")))
|
||||||
.collect();
|
})
|
||||||
let values = values?;
|
.collect();
|
||||||
if let Some(values) = values {
|
let values = values?;
|
||||||
let val = ty.const_named_struct(&values);
|
if let Some(values) = values {
|
||||||
let global = ctx.module.get_global(&id_str).unwrap_or_else(|| {
|
let val = ty.const_named_struct(&values);
|
||||||
ctx.module.add_global(ty, Some(AddressSpace::default()), &id_str)
|
let global = ctx.module.get_global(&id_str).unwrap_or_else(|| {
|
||||||
});
|
ctx.module.add_global(ty, Some(AddressSpace::default()), &id_str)
|
||||||
global.set_initializer(&val);
|
});
|
||||||
Ok(Some(global.as_pointer_value().into()))
|
global.set_initializer(&val);
|
||||||
} else {
|
Ok(Some(global.as_pointer_value().into()))
|
||||||
Ok(None)
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
unreachable!()
|
Ok(None)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1065,27 +1063,26 @@ impl InnerResolver {
|
||||||
|
|
||||||
impl SymbolResolver for Resolver {
|
impl SymbolResolver for Resolver {
|
||||||
fn get_default_param_value(&self, expr: &ast::Expr) -> Option<SymbolValue> {
|
fn get_default_param_value(&self, expr: &ast::Expr) -> Option<SymbolValue> {
|
||||||
match &expr.node {
|
let ast::ExprKind::Name { id, .. } = &expr.node else {
|
||||||
ast::ExprKind::Name { id, .. } => {
|
|
||||||
Python::with_gil(|py| -> PyResult<Option<SymbolValue>> {
|
unreachable!("only for resolving names")
|
||||||
let obj: &PyAny = self.0.module.extract(py)?;
|
};
|
||||||
let members: &PyDict = obj.getattr("__dict__").unwrap().downcast().unwrap();
|
|
||||||
let mut sym_value = None;
|
Python::with_gil(|py| -> PyResult<Option<SymbolValue>> {
|
||||||
for (key, val) in members {
|
let obj: &PyAny = self.0.module.extract(py)?;
|
||||||
let key: &str = key.extract()?;
|
let members: &PyDict = obj.getattr("__dict__").unwrap().downcast().unwrap();
|
||||||
if key == id.to_string() {
|
let mut sym_value = None;
|
||||||
if let Ok(Ok(v)) = self.0.get_default_param_obj_value(py, val) {
|
for (key, val) in members {
|
||||||
sym_value = Some(v);
|
let key: &str = key.extract()?;
|
||||||
}
|
if key == id.to_string() {
|
||||||
break;
|
if let Ok(Ok(v)) = self.0.get_default_param_obj_value(py, val) {
|
||||||
}
|
sym_value = Some(v);
|
||||||
}
|
}
|
||||||
Ok(sym_value)
|
break;
|
||||||
})
|
}
|
||||||
.unwrap()
|
|
||||||
}
|
}
|
||||||
_ => unreachable!("only for resolving names"),
|
Ok(sym_value)
|
||||||
}
|
}).unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_symbol_type(
|
fn get_symbol_type(
|
||||||
|
|
|
@ -29,29 +29,29 @@ impl TimeFns for NowPinningTimeFns64 {
|
||||||
let now_hiptr =
|
let now_hiptr =
|
||||||
ctx.builder.build_bitcast(now, i32_type.ptr_type(AddressSpace::default()), "now.hi.addr");
|
ctx.builder.build_bitcast(now, i32_type.ptr_type(AddressSpace::default()), "now.hi.addr");
|
||||||
|
|
||||||
if let BasicValueEnum::PointerValue(now_hiptr) = now_hiptr {
|
let BasicValueEnum::PointerValue(now_hiptr) = now_hiptr else {
|
||||||
let now_loptr = unsafe {
|
unreachable!()
|
||||||
ctx.builder.build_gep(now_hiptr, &[i32_type.const_int(2, false)], "now.lo.addr")
|
};
|
||||||
};
|
|
||||||
|
|
||||||
if let (BasicValueEnum::IntValue(now_hi), BasicValueEnum::IntValue(now_lo)) = (
|
let now_loptr = unsafe {
|
||||||
ctx.builder.build_load(now_hiptr, "now.hi"),
|
ctx.builder.build_gep(now_hiptr, &[i32_type.const_int(2, false)], "now.lo.addr")
|
||||||
ctx.builder.build_load(now_loptr, "now.lo"),
|
};
|
||||||
) {
|
|
||||||
let zext_hi = ctx.builder.build_int_z_extend(now_hi, i64_type, "");
|
let (BasicValueEnum::IntValue(now_hi), BasicValueEnum::IntValue(now_lo)) = (
|
||||||
let shifted_hi = ctx.builder.build_left_shift(
|
ctx.builder.build_load(now_hiptr, "now.hi"),
|
||||||
zext_hi,
|
ctx.builder.build_load(now_loptr, "now.lo"),
|
||||||
i64_type.const_int(32, false),
|
) else {
|
||||||
"",
|
unreachable!()
|
||||||
);
|
};
|
||||||
let zext_lo = ctx.builder.build_int_z_extend(now_lo, i64_type, "");
|
|
||||||
ctx.builder.build_or(shifted_hi, zext_lo, "now_mu").into()
|
let zext_hi = ctx.builder.build_int_z_extend(now_hi, i64_type, "");
|
||||||
} else {
|
let shifted_hi = ctx.builder.build_left_shift(
|
||||||
unreachable!();
|
zext_hi,
|
||||||
}
|
i64_type.const_int(32, false),
|
||||||
} else {
|
"",
|
||||||
unreachable!();
|
);
|
||||||
}
|
let zext_lo = ctx.builder.build_int_z_extend(now_lo, i64_type, "");
|
||||||
|
ctx.builder.build_or(shifted_hi, zext_lo, "now_mu").into()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn emit_at_mu<'ctx>(&self, ctx: &mut CodeGenContext<'ctx, '_>, t: BasicValueEnum<'ctx>) {
|
fn emit_at_mu<'ctx>(&self, ctx: &mut CodeGenContext<'ctx, '_>, t: BasicValueEnum<'ctx>) {
|
||||||
|
@ -59,41 +59,41 @@ impl TimeFns for NowPinningTimeFns64 {
|
||||||
let i64_type = ctx.ctx.i64_type();
|
let i64_type = ctx.ctx.i64_type();
|
||||||
|
|
||||||
let i64_32 = i64_type.const_int(32, false);
|
let i64_32 = i64_type.const_int(32, false);
|
||||||
if let BasicValueEnum::IntValue(time) = t {
|
let BasicValueEnum::IntValue(time) = t else {
|
||||||
let time_hi = ctx.builder.build_int_truncate(
|
unreachable!()
|
||||||
ctx.builder.build_right_shift(time, i64_32, false, "time.hi"),
|
};
|
||||||
i32_type,
|
|
||||||
"",
|
|
||||||
);
|
|
||||||
let time_lo = ctx.builder.build_int_truncate(time, i32_type, "time.lo");
|
|
||||||
let now = ctx
|
|
||||||
.module
|
|
||||||
.get_global("now")
|
|
||||||
.unwrap_or_else(|| ctx.module.add_global(i64_type, None, "now"));
|
|
||||||
let now_hiptr = ctx.builder.build_bitcast(
|
|
||||||
now,
|
|
||||||
i32_type.ptr_type(AddressSpace::default()),
|
|
||||||
"now.hi.addr",
|
|
||||||
);
|
|
||||||
|
|
||||||
if let BasicValueEnum::PointerValue(now_hiptr) = now_hiptr {
|
let time_hi = ctx.builder.build_int_truncate(
|
||||||
let now_loptr = unsafe {
|
ctx.builder.build_right_shift(time, i64_32, false, "time.hi"),
|
||||||
ctx.builder.build_gep(now_hiptr, &[i32_type.const_int(2, false)], "now.lo.addr")
|
i32_type,
|
||||||
};
|
"",
|
||||||
ctx.builder
|
);
|
||||||
.build_store(now_hiptr, time_hi)
|
let time_lo = ctx.builder.build_int_truncate(time, i32_type, "time.lo");
|
||||||
.set_atomic_ordering(AtomicOrdering::SequentiallyConsistent)
|
let now = ctx
|
||||||
.unwrap();
|
.module
|
||||||
ctx.builder
|
.get_global("now")
|
||||||
.build_store(now_loptr, time_lo)
|
.unwrap_or_else(|| ctx.module.add_global(i64_type, None, "now"));
|
||||||
.set_atomic_ordering(AtomicOrdering::SequentiallyConsistent)
|
let now_hiptr = ctx.builder.build_bitcast(
|
||||||
.unwrap();
|
now,
|
||||||
} else {
|
i32_type.ptr_type(AddressSpace::default()),
|
||||||
unreachable!();
|
"now.hi.addr",
|
||||||
}
|
);
|
||||||
} else {
|
|
||||||
unreachable!();
|
let BasicValueEnum::PointerValue(now_hiptr) = now_hiptr else {
|
||||||
}
|
unreachable!()
|
||||||
|
};
|
||||||
|
|
||||||
|
let now_loptr = unsafe {
|
||||||
|
ctx.builder.build_gep(now_hiptr, &[i32_type.const_int(2, false)], "now.lo.addr")
|
||||||
|
};
|
||||||
|
ctx.builder
|
||||||
|
.build_store(now_hiptr, time_hi)
|
||||||
|
.set_atomic_ordering(AtomicOrdering::SequentiallyConsistent)
|
||||||
|
.unwrap();
|
||||||
|
ctx.builder
|
||||||
|
.build_store(now_loptr, time_lo)
|
||||||
|
.set_atomic_ordering(AtomicOrdering::SequentiallyConsistent)
|
||||||
|
.unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
fn emit_delay_mu<'ctx>(
|
fn emit_delay_mu<'ctx>(
|
||||||
|
@ -110,56 +110,56 @@ impl TimeFns for NowPinningTimeFns64 {
|
||||||
let now_hiptr =
|
let now_hiptr =
|
||||||
ctx.builder.build_bitcast(now, i32_type.ptr_type(AddressSpace::default()), "now.hi.addr");
|
ctx.builder.build_bitcast(now, i32_type.ptr_type(AddressSpace::default()), "now.hi.addr");
|
||||||
|
|
||||||
if let BasicValueEnum::PointerValue(now_hiptr) = now_hiptr {
|
let BasicValueEnum::PointerValue(now_hiptr) = now_hiptr else {
|
||||||
let now_loptr = unsafe {
|
unreachable!()
|
||||||
ctx.builder.build_gep(now_hiptr, &[i32_type.const_int(2, false)], "now.lo.addr")
|
|
||||||
};
|
|
||||||
|
|
||||||
if let (
|
|
||||||
BasicValueEnum::IntValue(now_hi),
|
|
||||||
BasicValueEnum::IntValue(now_lo),
|
|
||||||
BasicValueEnum::IntValue(dt),
|
|
||||||
) = (
|
|
||||||
ctx.builder.build_load(now_hiptr, "now.hi"),
|
|
||||||
ctx.builder.build_load(now_loptr, "now.lo"),
|
|
||||||
dt,
|
|
||||||
) {
|
|
||||||
let zext_hi = ctx.builder.build_int_z_extend(now_hi, i64_type, "");
|
|
||||||
let shifted_hi = ctx.builder.build_left_shift(
|
|
||||||
zext_hi,
|
|
||||||
i64_type.const_int(32, false),
|
|
||||||
"",
|
|
||||||
);
|
|
||||||
let zext_lo = ctx.builder.build_int_z_extend(now_lo, i64_type, "");
|
|
||||||
let now_val = ctx.builder.build_or(shifted_hi, zext_lo, "now");
|
|
||||||
|
|
||||||
let time = ctx.builder.build_int_add(now_val, dt, "time");
|
|
||||||
let time_hi = ctx.builder.build_int_truncate(
|
|
||||||
ctx.builder.build_right_shift(
|
|
||||||
time,
|
|
||||||
i64_type.const_int(32, false),
|
|
||||||
false,
|
|
||||||
"",
|
|
||||||
),
|
|
||||||
i32_type,
|
|
||||||
"time.hi",
|
|
||||||
);
|
|
||||||
let time_lo = ctx.builder.build_int_truncate(time, i32_type, "time.lo");
|
|
||||||
|
|
||||||
ctx.builder
|
|
||||||
.build_store(now_hiptr, time_hi)
|
|
||||||
.set_atomic_ordering(AtomicOrdering::SequentiallyConsistent)
|
|
||||||
.unwrap();
|
|
||||||
ctx.builder
|
|
||||||
.build_store(now_loptr, time_lo)
|
|
||||||
.set_atomic_ordering(AtomicOrdering::SequentiallyConsistent)
|
|
||||||
.unwrap();
|
|
||||||
} else {
|
|
||||||
unreachable!();
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
unreachable!();
|
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let now_loptr = unsafe {
|
||||||
|
ctx.builder.build_gep(now_hiptr, &[i32_type.const_int(2, false)], "now.lo.addr")
|
||||||
|
};
|
||||||
|
|
||||||
|
let (
|
||||||
|
BasicValueEnum::IntValue(now_hi),
|
||||||
|
BasicValueEnum::IntValue(now_lo),
|
||||||
|
BasicValueEnum::IntValue(dt),
|
||||||
|
) = (
|
||||||
|
ctx.builder.build_load(now_hiptr, "now.hi"),
|
||||||
|
ctx.builder.build_load(now_loptr, "now.lo"),
|
||||||
|
dt,
|
||||||
|
) else {
|
||||||
|
unreachable!()
|
||||||
|
};
|
||||||
|
|
||||||
|
let zext_hi = ctx.builder.build_int_z_extend(now_hi, i64_type, "");
|
||||||
|
let shifted_hi = ctx.builder.build_left_shift(
|
||||||
|
zext_hi,
|
||||||
|
i64_type.const_int(32, false),
|
||||||
|
"",
|
||||||
|
);
|
||||||
|
let zext_lo = ctx.builder.build_int_z_extend(now_lo, i64_type, "");
|
||||||
|
let now_val = ctx.builder.build_or(shifted_hi, zext_lo, "now");
|
||||||
|
|
||||||
|
let time = ctx.builder.build_int_add(now_val, dt, "time");
|
||||||
|
let time_hi = ctx.builder.build_int_truncate(
|
||||||
|
ctx.builder.build_right_shift(
|
||||||
|
time,
|
||||||
|
i64_type.const_int(32, false),
|
||||||
|
false,
|
||||||
|
"",
|
||||||
|
),
|
||||||
|
i32_type,
|
||||||
|
"time.hi",
|
||||||
|
);
|
||||||
|
let time_lo = ctx.builder.build_int_truncate(time, i32_type, "time.lo");
|
||||||
|
|
||||||
|
ctx.builder
|
||||||
|
.build_store(now_hiptr, time_hi)
|
||||||
|
.set_atomic_ordering(AtomicOrdering::SequentiallyConsistent)
|
||||||
|
.unwrap();
|
||||||
|
ctx.builder
|
||||||
|
.build_store(now_loptr, time_lo)
|
||||||
|
.set_atomic_ordering(AtomicOrdering::SequentiallyConsistent)
|
||||||
|
.unwrap();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -176,14 +176,14 @@ impl TimeFns for NowPinningTimeFns {
|
||||||
.unwrap_or_else(|| ctx.module.add_global(i64_type, None, "now"));
|
.unwrap_or_else(|| ctx.module.add_global(i64_type, None, "now"));
|
||||||
let now_raw = ctx.builder.build_load(now.as_pointer_value(), "now");
|
let now_raw = ctx.builder.build_load(now.as_pointer_value(), "now");
|
||||||
|
|
||||||
if let BasicValueEnum::IntValue(now_raw) = now_raw {
|
let BasicValueEnum::IntValue(now_raw) = now_raw else {
|
||||||
let i64_32 = i64_type.const_int(32, false);
|
unreachable!()
|
||||||
let now_lo = ctx.builder.build_left_shift(now_raw, i64_32, "now.lo");
|
};
|
||||||
let now_hi = ctx.builder.build_right_shift(now_raw, i64_32, false, "now.hi");
|
|
||||||
ctx.builder.build_or(now_lo, now_hi, "now_mu").into()
|
let i64_32 = i64_type.const_int(32, false);
|
||||||
} else {
|
let now_lo = ctx.builder.build_left_shift(now_raw, i64_32, "now.lo");
|
||||||
unreachable!();
|
let now_hi = ctx.builder.build_right_shift(now_raw, i64_32, false, "now.hi");
|
||||||
}
|
ctx.builder.build_or(now_lo, now_hi, "now_mu").into()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn emit_at_mu<'ctx>(&self, ctx: &mut CodeGenContext<'ctx, '_>, t: BasicValueEnum<'ctx>) {
|
fn emit_at_mu<'ctx>(&self, ctx: &mut CodeGenContext<'ctx, '_>, t: BasicValueEnum<'ctx>) {
|
||||||
|
@ -191,41 +191,41 @@ impl TimeFns for NowPinningTimeFns {
|
||||||
let i64_type = ctx.ctx.i64_type();
|
let i64_type = ctx.ctx.i64_type();
|
||||||
let i64_32 = i64_type.const_int(32, false);
|
let i64_32 = i64_type.const_int(32, false);
|
||||||
|
|
||||||
if let BasicValueEnum::IntValue(time) = t {
|
let BasicValueEnum::IntValue(time) = t else {
|
||||||
let time_hi = ctx.builder.build_int_truncate(
|
unreachable!()
|
||||||
ctx.builder.build_right_shift(time, i64_32, false, ""),
|
};
|
||||||
i32_type,
|
|
||||||
"time.hi",
|
|
||||||
);
|
|
||||||
let time_lo = ctx.builder.build_int_truncate(time, i32_type, "now_trunc");
|
|
||||||
let now = ctx
|
|
||||||
.module
|
|
||||||
.get_global("now")
|
|
||||||
.unwrap_or_else(|| ctx.module.add_global(i64_type, None, "now"));
|
|
||||||
let now_hiptr = ctx.builder.build_bitcast(
|
|
||||||
now,
|
|
||||||
i32_type.ptr_type(AddressSpace::default()),
|
|
||||||
"now.hi.addr",
|
|
||||||
);
|
|
||||||
|
|
||||||
if let BasicValueEnum::PointerValue(now_hiptr) = now_hiptr {
|
let time_hi = ctx.builder.build_int_truncate(
|
||||||
let now_loptr = unsafe {
|
ctx.builder.build_right_shift(time, i64_32, false, ""),
|
||||||
ctx.builder.build_gep(now_hiptr, &[i32_type.const_int(1, false)], "now.lo.addr")
|
i32_type,
|
||||||
};
|
"time.hi",
|
||||||
ctx.builder
|
);
|
||||||
.build_store(now_hiptr, time_hi)
|
let time_lo = ctx.builder.build_int_truncate(time, i32_type, "now_trunc");
|
||||||
.set_atomic_ordering(AtomicOrdering::SequentiallyConsistent)
|
let now = ctx
|
||||||
.unwrap();
|
.module
|
||||||
ctx.builder
|
.get_global("now")
|
||||||
.build_store(now_loptr, time_lo)
|
.unwrap_or_else(|| ctx.module.add_global(i64_type, None, "now"));
|
||||||
.set_atomic_ordering(AtomicOrdering::SequentiallyConsistent)
|
let now_hiptr = ctx.builder.build_bitcast(
|
||||||
.unwrap();
|
now,
|
||||||
} else {
|
i32_type.ptr_type(AddressSpace::default()),
|
||||||
unreachable!();
|
"now.hi.addr",
|
||||||
}
|
);
|
||||||
} else {
|
|
||||||
unreachable!();
|
let BasicValueEnum::PointerValue(now_hiptr) = now_hiptr else {
|
||||||
}
|
unreachable!()
|
||||||
|
};
|
||||||
|
|
||||||
|
let now_loptr = unsafe {
|
||||||
|
ctx.builder.build_gep(now_hiptr, &[i32_type.const_int(1, false)], "now.lo.addr")
|
||||||
|
};
|
||||||
|
ctx.builder
|
||||||
|
.build_store(now_hiptr, time_hi)
|
||||||
|
.set_atomic_ordering(AtomicOrdering::SequentiallyConsistent)
|
||||||
|
.unwrap();
|
||||||
|
ctx.builder
|
||||||
|
.build_store(now_loptr, time_lo)
|
||||||
|
.set_atomic_ordering(AtomicOrdering::SequentiallyConsistent)
|
||||||
|
.unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
fn emit_delay_mu<'ctx>(
|
fn emit_delay_mu<'ctx>(
|
||||||
|
@ -242,41 +242,41 @@ impl TimeFns for NowPinningTimeFns {
|
||||||
.unwrap_or_else(|| ctx.module.add_global(i64_type, None, "now"));
|
.unwrap_or_else(|| ctx.module.add_global(i64_type, None, "now"));
|
||||||
let now_raw = ctx.builder.build_load(now.as_pointer_value(), "");
|
let now_raw = ctx.builder.build_load(now.as_pointer_value(), "");
|
||||||
|
|
||||||
if let (BasicValueEnum::IntValue(now_raw), BasicValueEnum::IntValue(dt)) = (now_raw, dt) {
|
let (BasicValueEnum::IntValue(now_raw), BasicValueEnum::IntValue(dt)) = (now_raw, dt) else {
|
||||||
let now_lo = ctx.builder.build_left_shift(now_raw, i64_32, "now.lo");
|
unreachable!()
|
||||||
let now_hi = ctx.builder.build_right_shift(now_raw, i64_32, false, "now.hi");
|
};
|
||||||
let now_val = ctx.builder.build_or(now_lo, now_hi, "now_val");
|
|
||||||
let time = ctx.builder.build_int_add(now_val, dt, "time");
|
|
||||||
let time_hi = ctx.builder.build_int_truncate(
|
|
||||||
ctx.builder.build_right_shift(time, i64_32, false, "time.hi"),
|
|
||||||
i32_type,
|
|
||||||
"now_trunc",
|
|
||||||
);
|
|
||||||
let time_lo = ctx.builder.build_int_truncate(time, i32_type, "time.lo");
|
|
||||||
let now_hiptr = ctx.builder.build_bitcast(
|
|
||||||
now,
|
|
||||||
i32_type.ptr_type(AddressSpace::default()),
|
|
||||||
"now.hi.addr",
|
|
||||||
);
|
|
||||||
|
|
||||||
if let BasicValueEnum::PointerValue(now_hiptr) = now_hiptr {
|
let now_lo = ctx.builder.build_left_shift(now_raw, i64_32, "now.lo");
|
||||||
let now_loptr = unsafe {
|
let now_hi = ctx.builder.build_right_shift(now_raw, i64_32, false, "now.hi");
|
||||||
ctx.builder.build_gep(now_hiptr, &[i32_type.const_int(1, false)], "now.lo.addr")
|
let now_val = ctx.builder.build_or(now_lo, now_hi, "now_val");
|
||||||
};
|
let time = ctx.builder.build_int_add(now_val, dt, "time");
|
||||||
ctx.builder
|
let time_hi = ctx.builder.build_int_truncate(
|
||||||
.build_store(now_hiptr, time_hi)
|
ctx.builder.build_right_shift(time, i64_32, false, "time.hi"),
|
||||||
.set_atomic_ordering(AtomicOrdering::SequentiallyConsistent)
|
i32_type,
|
||||||
.unwrap();
|
"now_trunc",
|
||||||
ctx.builder
|
);
|
||||||
.build_store(now_loptr, time_lo)
|
let time_lo = ctx.builder.build_int_truncate(time, i32_type, "time.lo");
|
||||||
.set_atomic_ordering(AtomicOrdering::SequentiallyConsistent)
|
let now_hiptr = ctx.builder.build_bitcast(
|
||||||
.unwrap();
|
now,
|
||||||
} else {
|
i32_type.ptr_type(AddressSpace::default()),
|
||||||
unreachable!();
|
"now.hi.addr",
|
||||||
}
|
);
|
||||||
} else {
|
|
||||||
unreachable!();
|
let BasicValueEnum::PointerValue(now_hiptr) = now_hiptr else {
|
||||||
}
|
unreachable!()
|
||||||
|
};
|
||||||
|
|
||||||
|
let now_loptr = unsafe {
|
||||||
|
ctx.builder.build_gep(now_hiptr, &[i32_type.const_int(1, false)], "now.lo.addr")
|
||||||
|
};
|
||||||
|
ctx.builder
|
||||||
|
.build_store(now_hiptr, time_hi)
|
||||||
|
.set_atomic_ordering(AtomicOrdering::SequentiallyConsistent)
|
||||||
|
.unwrap();
|
||||||
|
ctx.builder
|
||||||
|
.build_store(now_loptr, time_lo)
|
||||||
|
.set_atomic_ordering(AtomicOrdering::SequentiallyConsistent)
|
||||||
|
.unwrap();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -39,11 +39,10 @@ pub fn get_subst_key(
|
||||||
) -> String {
|
) -> String {
|
||||||
let mut vars = obj
|
let mut vars = obj
|
||||||
.map(|ty| {
|
.map(|ty| {
|
||||||
if let TypeEnum::TObj { params, .. } = &*unifier.get_ty(ty) {
|
let TypeEnum::TObj { params, .. } = &*unifier.get_ty(ty) else {
|
||||||
params.clone()
|
|
||||||
} else {
|
|
||||||
unreachable!()
|
unreachable!()
|
||||||
}
|
};
|
||||||
|
params.clone()
|
||||||
})
|
})
|
||||||
.unwrap_or_default();
|
.unwrap_or_default();
|
||||||
vars.extend(fun_vars.iter());
|
vars.extend(fun_vars.iter());
|
||||||
|
@ -224,7 +223,7 @@ impl<'ctx, 'a> CodeGenContext<'ctx, 'a> {
|
||||||
{
|
{
|
||||||
self.ctx.i64_type()
|
self.ctx.i64_type()
|
||||||
} else {
|
} else {
|
||||||
unreachable!();
|
unreachable!()
|
||||||
};
|
};
|
||||||
Some(ty.const_int(*val as u64, false).into())
|
Some(ty.const_int(*val as u64, false).into())
|
||||||
}
|
}
|
||||||
|
@ -599,28 +598,27 @@ pub fn gen_constructor<'ctx, 'a, G: CodeGenerator>(
|
||||||
def: &TopLevelDef,
|
def: &TopLevelDef,
|
||||||
params: Vec<(Option<StrRef>, ValueEnum<'ctx>)>,
|
params: Vec<(Option<StrRef>, ValueEnum<'ctx>)>,
|
||||||
) -> Result<BasicValueEnum<'ctx>, String> {
|
) -> Result<BasicValueEnum<'ctx>, String> {
|
||||||
match def {
|
let TopLevelDef::Class { methods, .. } = def else {
|
||||||
TopLevelDef::Class { methods, .. } => {
|
unreachable!()
|
||||||
// TODO: what about other fields that require alloca?
|
};
|
||||||
let fun_id = methods.iter().find(|method| method.0 == "__init__".into()).map(|method| method.2);
|
|
||||||
let ty = ctx.get_llvm_type(generator, signature.ret).into_pointer_type();
|
// TODO: what about other fields that require alloca?
|
||||||
let zelf_ty: BasicTypeEnum = ty.get_element_type().try_into().unwrap();
|
let fun_id = methods.iter().find(|method| method.0 == "__init__".into()).map(|method| method.2);
|
||||||
let zelf: BasicValueEnum<'ctx> = ctx.builder.build_alloca(zelf_ty, "alloca").into();
|
let ty = ctx.get_llvm_type(generator, signature.ret).into_pointer_type();
|
||||||
// call `__init__` if there is one
|
let zelf_ty: BasicTypeEnum = ty.get_element_type().try_into().unwrap();
|
||||||
if let Some(fun_id) = fun_id {
|
let zelf: BasicValueEnum<'ctx> = ctx.builder.build_alloca(zelf_ty, "alloca").into();
|
||||||
let mut sign = signature.clone();
|
// call `__init__` if there is one
|
||||||
sign.ret = ctx.primitives.none;
|
if let Some(fun_id) = fun_id {
|
||||||
generator.gen_call(
|
let mut sign = signature.clone();
|
||||||
ctx,
|
sign.ret = ctx.primitives.none;
|
||||||
Some((signature.ret, zelf.into())),
|
generator.gen_call(
|
||||||
(&sign, fun_id),
|
ctx,
|
||||||
params,
|
Some((signature.ret, zelf.into())),
|
||||||
)?;
|
(&sign, fun_id),
|
||||||
}
|
params,
|
||||||
Ok(zelf)
|
)?;
|
||||||
}
|
|
||||||
TopLevelDef::Function { .. } => unreachable!(),
|
|
||||||
}
|
}
|
||||||
|
Ok(zelf)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// See [`CodeGenerator::gen_func_instance`].
|
/// See [`CodeGenerator::gen_func_instance`].
|
||||||
|
@ -630,74 +628,71 @@ pub fn gen_func_instance<'ctx>(
|
||||||
fun: (&FunSignature, &mut TopLevelDef, String),
|
fun: (&FunSignature, &mut TopLevelDef, String),
|
||||||
id: usize,
|
id: usize,
|
||||||
) -> Result<String, String> {
|
) -> Result<String, String> {
|
||||||
if let (
|
let (
|
||||||
sign,
|
sign,
|
||||||
TopLevelDef::Function {
|
TopLevelDef::Function {
|
||||||
name, instance_to_symbol, instance_to_stmt, var_id, resolver, ..
|
name, instance_to_symbol, instance_to_stmt, var_id, resolver, ..
|
||||||
},
|
},
|
||||||
key,
|
key,
|
||||||
) = fun
|
) = fun else { unreachable!() };
|
||||||
{
|
|
||||||
if let Some(sym) = instance_to_symbol.get(&key) {
|
|
||||||
return Ok(sym.clone());
|
|
||||||
}
|
|
||||||
let symbol = format!("{}.{}", name, instance_to_symbol.len());
|
|
||||||
instance_to_symbol.insert(key, symbol.clone());
|
|
||||||
let mut filter = var_id.clone();
|
|
||||||
if let Some((obj_ty, _)) = &obj {
|
|
||||||
if let TypeEnum::TObj { params, .. } = &*ctx.unifier.get_ty(*obj_ty) {
|
|
||||||
filter.extend(params.keys());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
let key = ctx.get_subst_key(obj.as_ref().map(|a| a.0), sign, Some(&filter));
|
|
||||||
let instance = instance_to_stmt.get(&key).unwrap();
|
|
||||||
|
|
||||||
let mut store = ConcreteTypeStore::new();
|
if let Some(sym) = instance_to_symbol.get(&key) {
|
||||||
let mut cache = HashMap::new();
|
return Ok(sym.clone());
|
||||||
|
|
||||||
let subst = sign
|
|
||||||
.vars
|
|
||||||
.iter()
|
|
||||||
.map(|(id, ty)| {
|
|
||||||
(
|
|
||||||
*instance.subst.get(id).unwrap(),
|
|
||||||
store.from_unifier_type(&mut ctx.unifier, &ctx.primitives, *ty, &mut cache),
|
|
||||||
)
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
let mut signature =
|
|
||||||
store.from_signature(&mut ctx.unifier, &ctx.primitives, sign, &mut cache);
|
|
||||||
|
|
||||||
if let Some(obj) = &obj {
|
|
||||||
let zelf =
|
|
||||||
store.from_unifier_type(&mut ctx.unifier, &ctx.primitives, obj.0, &mut cache);
|
|
||||||
if let ConcreteTypeEnum::TFunc { args, .. } = &mut signature {
|
|
||||||
args.insert(
|
|
||||||
0,
|
|
||||||
ConcreteFuncArg { name: "self".into(), ty: zelf, default_value: None },
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
unreachable!()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
let signature = store.add_cty(signature);
|
|
||||||
|
|
||||||
ctx.registry.add_task(CodeGenTask {
|
|
||||||
symbol_name: symbol.clone(),
|
|
||||||
body: instance.body.clone(),
|
|
||||||
resolver: resolver.as_ref().unwrap().clone(),
|
|
||||||
calls: instance.calls.clone(),
|
|
||||||
subst,
|
|
||||||
signature,
|
|
||||||
store,
|
|
||||||
unifier_index: instance.unifier_id,
|
|
||||||
id,
|
|
||||||
});
|
|
||||||
Ok(symbol)
|
|
||||||
} else {
|
|
||||||
unreachable!()
|
|
||||||
}
|
}
|
||||||
|
let symbol = format!("{}.{}", name, instance_to_symbol.len());
|
||||||
|
instance_to_symbol.insert(key, symbol.clone());
|
||||||
|
let mut filter = var_id.clone();
|
||||||
|
if let Some((obj_ty, _)) = &obj {
|
||||||
|
if let TypeEnum::TObj { params, .. } = &*ctx.unifier.get_ty(*obj_ty) {
|
||||||
|
filter.extend(params.keys());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let key = ctx.get_subst_key(obj.as_ref().map(|a| a.0), sign, Some(&filter));
|
||||||
|
let instance = instance_to_stmt.get(&key).unwrap();
|
||||||
|
|
||||||
|
let mut store = ConcreteTypeStore::new();
|
||||||
|
let mut cache = HashMap::new();
|
||||||
|
|
||||||
|
let subst = sign
|
||||||
|
.vars
|
||||||
|
.iter()
|
||||||
|
.map(|(id, ty)| {
|
||||||
|
(
|
||||||
|
*instance.subst.get(id).unwrap(),
|
||||||
|
store.from_unifier_type(&mut ctx.unifier, &ctx.primitives, *ty, &mut cache),
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let mut signature =
|
||||||
|
store.from_signature(&mut ctx.unifier, &ctx.primitives, sign, &mut cache);
|
||||||
|
|
||||||
|
if let Some(obj) = &obj {
|
||||||
|
let zelf =
|
||||||
|
store.from_unifier_type(&mut ctx.unifier, &ctx.primitives, obj.0, &mut cache);
|
||||||
|
let ConcreteTypeEnum::TFunc { args, .. } = &mut signature else {
|
||||||
|
unreachable!()
|
||||||
|
};
|
||||||
|
|
||||||
|
args.insert(
|
||||||
|
0,
|
||||||
|
ConcreteFuncArg { name: "self".into(), ty: zelf, default_value: None },
|
||||||
|
);
|
||||||
|
}
|
||||||
|
let signature = store.add_cty(signature);
|
||||||
|
|
||||||
|
ctx.registry.add_task(CodeGenTask {
|
||||||
|
symbol_name: symbol.clone(),
|
||||||
|
body: instance.body.clone(),
|
||||||
|
resolver: resolver.as_ref().unwrap().clone(),
|
||||||
|
calls: instance.calls.clone(),
|
||||||
|
subst,
|
||||||
|
signature,
|
||||||
|
store,
|
||||||
|
unifier_index: instance.unifier_id,
|
||||||
|
id,
|
||||||
|
});
|
||||||
|
Ok(symbol)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// See [`CodeGenerator::gen_call`].
|
/// See [`CodeGenerator::gen_call`].
|
||||||
|
@ -946,172 +941,172 @@ pub fn gen_comprehension<'ctx, G: CodeGenerator>(
|
||||||
ctx: &mut CodeGenContext<'ctx, '_>,
|
ctx: &mut CodeGenContext<'ctx, '_>,
|
||||||
expr: &Expr<Option<Type>>,
|
expr: &Expr<Option<Type>>,
|
||||||
) -> Result<Option<BasicValueEnum<'ctx>>, String> {
|
) -> Result<Option<BasicValueEnum<'ctx>>, String> {
|
||||||
if let ExprKind::ListComp { elt, generators } = &expr.node {
|
let ExprKind::ListComp { elt, generators } = &expr.node else {
|
||||||
let current = ctx.builder.get_insert_block().unwrap().get_parent().unwrap();
|
unreachable!()
|
||||||
|
};
|
||||||
|
|
||||||
let init_bb = ctx.ctx.append_basic_block(current, "listcomp.init");
|
let current = ctx.builder.get_insert_block().unwrap().get_parent().unwrap();
|
||||||
let test_bb = ctx.ctx.append_basic_block(current, "listcomp.test");
|
|
||||||
let body_bb = ctx.ctx.append_basic_block(current, "listcomp.body");
|
|
||||||
let cont_bb = ctx.ctx.append_basic_block(current, "listcomp.cont");
|
|
||||||
|
|
||||||
ctx.builder.build_unconditional_branch(init_bb);
|
let init_bb = ctx.ctx.append_basic_block(current, "listcomp.init");
|
||||||
|
let test_bb = ctx.ctx.append_basic_block(current, "listcomp.test");
|
||||||
|
let body_bb = ctx.ctx.append_basic_block(current, "listcomp.body");
|
||||||
|
let cont_bb = ctx.ctx.append_basic_block(current, "listcomp.cont");
|
||||||
|
|
||||||
ctx.builder.position_at_end(init_bb);
|
ctx.builder.build_unconditional_branch(init_bb);
|
||||||
|
|
||||||
let Comprehension { target, iter, ifs, .. } = &generators[0];
|
ctx.builder.position_at_end(init_bb);
|
||||||
let iter_val = if let Some(v) = generator.gen_expr(ctx, iter)? {
|
|
||||||
v.to_basic_value_enum(ctx, generator, iter.custom.unwrap())?
|
|
||||||
} else {
|
|
||||||
for bb in [test_bb, body_bb, cont_bb] {
|
|
||||||
ctx.builder.position_at_end(bb);
|
|
||||||
ctx.builder.build_unreachable();
|
|
||||||
}
|
|
||||||
|
|
||||||
return Ok(None)
|
let Comprehension { target, iter, ifs, .. } = &generators[0];
|
||||||
};
|
let iter_val = if let Some(v) = generator.gen_expr(ctx, iter)? {
|
||||||
let int32 = ctx.ctx.i32_type();
|
v.to_basic_value_enum(ctx, generator, iter.custom.unwrap())?
|
||||||
let size_t = generator.get_size_type(ctx.ctx);
|
} else {
|
||||||
let zero_size_t = size_t.const_zero();
|
for bb in [test_bb, body_bb, cont_bb] {
|
||||||
let zero_32 = int32.const_zero();
|
ctx.builder.position_at_end(bb);
|
||||||
|
ctx.builder.build_unreachable();
|
||||||
let index = generator.gen_var_alloc(ctx, size_t.into(), Some("index.addr"))?;
|
|
||||||
ctx.builder.build_store(index, zero_size_t);
|
|
||||||
|
|
||||||
let elem_ty = ctx.get_llvm_type(generator, elt.custom.unwrap());
|
|
||||||
let is_range = ctx.unifier.unioned(iter.custom.unwrap(), ctx.primitives.range);
|
|
||||||
let list;
|
|
||||||
let list_content;
|
|
||||||
|
|
||||||
if is_range {
|
|
||||||
let iter_val = iter_val.into_pointer_value();
|
|
||||||
let (start, stop, step) = destructure_range(ctx, iter_val);
|
|
||||||
let diff = ctx.builder.build_int_sub(stop, start, "diff");
|
|
||||||
// add 1 to the length as the value is rounded to zero
|
|
||||||
// the length may be 1 more than the actual length if the division is exact, but the
|
|
||||||
// length is a upper bound only anyway so it does not matter.
|
|
||||||
let length = ctx.builder.build_int_signed_div(diff, step, "div");
|
|
||||||
let length = ctx.builder.build_int_add(length, int32.const_int(1, false), "add1");
|
|
||||||
// in case length is non-positive
|
|
||||||
let is_valid =
|
|
||||||
ctx.builder.build_int_compare(IntPredicate::SGT, length, zero_32, "check");
|
|
||||||
|
|
||||||
let list_alloc_size = ctx.builder.build_select(
|
|
||||||
is_valid,
|
|
||||||
ctx.builder.build_int_z_extend_or_bit_cast(length, size_t, "z_ext_len"),
|
|
||||||
zero_size_t,
|
|
||||||
"listcomp.alloc_size"
|
|
||||||
);
|
|
||||||
list = allocate_list(
|
|
||||||
generator,
|
|
||||||
ctx,
|
|
||||||
elem_ty,
|
|
||||||
list_alloc_size.into_int_value(),
|
|
||||||
Some("listcomp.addr")
|
|
||||||
);
|
|
||||||
list_content = ctx.build_gep_and_load(list, &[zero_size_t, zero_32], Some("listcomp.data.addr"))
|
|
||||||
.into_pointer_value();
|
|
||||||
|
|
||||||
let i = generator.gen_store_target(ctx, target, Some("i.addr"))?.unwrap();
|
|
||||||
ctx.builder.build_store(i, ctx.builder.build_int_sub(start, step, "start_init"));
|
|
||||||
|
|
||||||
ctx.builder.build_conditional_branch(
|
|
||||||
gen_in_range_check(ctx, start, stop, step),
|
|
||||||
test_bb,
|
|
||||||
cont_bb,
|
|
||||||
);
|
|
||||||
|
|
||||||
ctx.builder.position_at_end(test_bb);
|
|
||||||
// add and test
|
|
||||||
let tmp = ctx.builder.build_int_add(
|
|
||||||
ctx.builder.build_load(i, "i").into_int_value(),
|
|
||||||
step,
|
|
||||||
"start_loop",
|
|
||||||
);
|
|
||||||
ctx.builder.build_store(i, tmp);
|
|
||||||
ctx.builder.build_conditional_branch(
|
|
||||||
gen_in_range_check(ctx, tmp, stop, step),
|
|
||||||
body_bb,
|
|
||||||
cont_bb,
|
|
||||||
);
|
|
||||||
|
|
||||||
ctx.builder.position_at_end(body_bb);
|
|
||||||
} else {
|
|
||||||
let length = ctx
|
|
||||||
.build_gep_and_load(
|
|
||||||
iter_val.into_pointer_value(),
|
|
||||||
&[zero_size_t, int32.const_int(1, false)],
|
|
||||||
Some("length"),
|
|
||||||
)
|
|
||||||
.into_int_value();
|
|
||||||
list = allocate_list(generator, ctx, elem_ty, length, Some("listcomp"));
|
|
||||||
list_content =
|
|
||||||
ctx.build_gep_and_load(list, &[zero_size_t, zero_32], Some("list_content")).into_pointer_value();
|
|
||||||
let counter = generator.gen_var_alloc(ctx, size_t.into(), Some("counter.addr"))?;
|
|
||||||
// counter = -1
|
|
||||||
ctx.builder.build_store(counter, size_t.const_int(u64::MAX, true));
|
|
||||||
ctx.builder.build_unconditional_branch(test_bb);
|
|
||||||
|
|
||||||
ctx.builder.position_at_end(test_bb);
|
|
||||||
let tmp = ctx.builder.build_load(counter, "i").into_int_value();
|
|
||||||
let tmp = ctx.builder.build_int_add(tmp, size_t.const_int(1, false), "inc");
|
|
||||||
ctx.builder.build_store(counter, tmp);
|
|
||||||
let cmp = ctx.builder.build_int_compare(IntPredicate::SLT, tmp, length, "cmp");
|
|
||||||
ctx.builder.build_conditional_branch(cmp, body_bb, cont_bb);
|
|
||||||
|
|
||||||
ctx.builder.position_at_end(body_bb);
|
|
||||||
let arr_ptr = ctx
|
|
||||||
.build_gep_and_load(iter_val.into_pointer_value(), &[zero_size_t, zero_32], Some("arr.addr"))
|
|
||||||
.into_pointer_value();
|
|
||||||
let val = ctx.build_gep_and_load(arr_ptr, &[tmp], Some("val"));
|
|
||||||
generator.gen_assign(ctx, target, val.into())?;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Emits the content of `cont_bb`
|
return Ok(None)
|
||||||
let emit_cont_bb = |ctx: &CodeGenContext| {
|
};
|
||||||
ctx.builder.position_at_end(cont_bb);
|
let int32 = ctx.ctx.i32_type();
|
||||||
let len_ptr = unsafe {
|
let size_t = generator.get_size_type(ctx.ctx);
|
||||||
ctx.builder.build_gep(list, &[zero_size_t, int32.const_int(1, false)], "length")
|
let zero_size_t = size_t.const_zero();
|
||||||
};
|
let zero_32 = int32.const_zero();
|
||||||
ctx.builder.build_store(len_ptr, ctx.builder.build_load(index, "index"));
|
|
||||||
|
let index = generator.gen_var_alloc(ctx, size_t.into(), Some("index.addr"))?;
|
||||||
|
ctx.builder.build_store(index, zero_size_t);
|
||||||
|
|
||||||
|
let elem_ty = ctx.get_llvm_type(generator, elt.custom.unwrap());
|
||||||
|
let is_range = ctx.unifier.unioned(iter.custom.unwrap(), ctx.primitives.range);
|
||||||
|
let list;
|
||||||
|
let list_content;
|
||||||
|
|
||||||
|
if is_range {
|
||||||
|
let iter_val = iter_val.into_pointer_value();
|
||||||
|
let (start, stop, step) = destructure_range(ctx, iter_val);
|
||||||
|
let diff = ctx.builder.build_int_sub(stop, start, "diff");
|
||||||
|
// add 1 to the length as the value is rounded to zero
|
||||||
|
// the length may be 1 more than the actual length if the division is exact, but the
|
||||||
|
// length is a upper bound only anyway so it does not matter.
|
||||||
|
let length = ctx.builder.build_int_signed_div(diff, step, "div");
|
||||||
|
let length = ctx.builder.build_int_add(length, int32.const_int(1, false), "add1");
|
||||||
|
// in case length is non-positive
|
||||||
|
let is_valid =
|
||||||
|
ctx.builder.build_int_compare(IntPredicate::SGT, length, zero_32, "check");
|
||||||
|
|
||||||
|
let list_alloc_size = ctx.builder.build_select(
|
||||||
|
is_valid,
|
||||||
|
ctx.builder.build_int_z_extend_or_bit_cast(length, size_t, "z_ext_len"),
|
||||||
|
zero_size_t,
|
||||||
|
"listcomp.alloc_size"
|
||||||
|
);
|
||||||
|
list = allocate_list(
|
||||||
|
generator,
|
||||||
|
ctx,
|
||||||
|
elem_ty,
|
||||||
|
list_alloc_size.into_int_value(),
|
||||||
|
Some("listcomp.addr")
|
||||||
|
);
|
||||||
|
list_content = ctx.build_gep_and_load(list, &[zero_size_t, zero_32], Some("listcomp.data.addr"))
|
||||||
|
.into_pointer_value();
|
||||||
|
|
||||||
|
let i = generator.gen_store_target(ctx, target, Some("i.addr"))?.unwrap();
|
||||||
|
ctx.builder.build_store(i, ctx.builder.build_int_sub(start, step, "start_init"));
|
||||||
|
|
||||||
|
ctx.builder.build_conditional_branch(
|
||||||
|
gen_in_range_check(ctx, start, stop, step),
|
||||||
|
test_bb,
|
||||||
|
cont_bb,
|
||||||
|
);
|
||||||
|
|
||||||
|
ctx.builder.position_at_end(test_bb);
|
||||||
|
// add and test
|
||||||
|
let tmp = ctx.builder.build_int_add(
|
||||||
|
ctx.builder.build_load(i, "i").into_int_value(),
|
||||||
|
step,
|
||||||
|
"start_loop",
|
||||||
|
);
|
||||||
|
ctx.builder.build_store(i, tmp);
|
||||||
|
ctx.builder.build_conditional_branch(
|
||||||
|
gen_in_range_check(ctx, tmp, stop, step),
|
||||||
|
body_bb,
|
||||||
|
cont_bb,
|
||||||
|
);
|
||||||
|
|
||||||
|
ctx.builder.position_at_end(body_bb);
|
||||||
|
} else {
|
||||||
|
let length = ctx
|
||||||
|
.build_gep_and_load(
|
||||||
|
iter_val.into_pointer_value(),
|
||||||
|
&[zero_size_t, int32.const_int(1, false)],
|
||||||
|
Some("length"),
|
||||||
|
)
|
||||||
|
.into_int_value();
|
||||||
|
list = allocate_list(generator, ctx, elem_ty, length, Some("listcomp"));
|
||||||
|
list_content =
|
||||||
|
ctx.build_gep_and_load(list, &[zero_size_t, zero_32], Some("list_content")).into_pointer_value();
|
||||||
|
let counter = generator.gen_var_alloc(ctx, size_t.into(), Some("counter.addr"))?;
|
||||||
|
// counter = -1
|
||||||
|
ctx.builder.build_store(counter, size_t.const_int(u64::MAX, true));
|
||||||
|
ctx.builder.build_unconditional_branch(test_bb);
|
||||||
|
|
||||||
|
ctx.builder.position_at_end(test_bb);
|
||||||
|
let tmp = ctx.builder.build_load(counter, "i").into_int_value();
|
||||||
|
let tmp = ctx.builder.build_int_add(tmp, size_t.const_int(1, false), "inc");
|
||||||
|
ctx.builder.build_store(counter, tmp);
|
||||||
|
let cmp = ctx.builder.build_int_compare(IntPredicate::SLT, tmp, length, "cmp");
|
||||||
|
ctx.builder.build_conditional_branch(cmp, body_bb, cont_bb);
|
||||||
|
|
||||||
|
ctx.builder.position_at_end(body_bb);
|
||||||
|
let arr_ptr = ctx
|
||||||
|
.build_gep_and_load(iter_val.into_pointer_value(), &[zero_size_t, zero_32], Some("arr.addr"))
|
||||||
|
.into_pointer_value();
|
||||||
|
let val = ctx.build_gep_and_load(arr_ptr, &[tmp], Some("val"));
|
||||||
|
generator.gen_assign(ctx, target, val.into())?;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Emits the content of `cont_bb`
|
||||||
|
let emit_cont_bb = |ctx: &CodeGenContext| {
|
||||||
|
ctx.builder.position_at_end(cont_bb);
|
||||||
|
let len_ptr = unsafe {
|
||||||
|
ctx.builder.build_gep(list, &[zero_size_t, int32.const_int(1, false)], "length")
|
||||||
};
|
};
|
||||||
|
ctx.builder.build_store(len_ptr, ctx.builder.build_load(index, "index"));
|
||||||
|
};
|
||||||
|
|
||||||
for cond in ifs {
|
for cond in ifs {
|
||||||
let result = if let Some(v) = generator.gen_expr(ctx, cond)? {
|
let result = if let Some(v) = generator.gen_expr(ctx, cond)? {
|
||||||
v.to_basic_value_enum(ctx, generator, cond.custom.unwrap())?.into_int_value()
|
v.to_basic_value_enum(ctx, generator, cond.custom.unwrap())?.into_int_value()
|
||||||
} else {
|
} else {
|
||||||
// Bail if the predicate is an ellipsis - Emit cont_bb contents in case the
|
// Bail if the predicate is an ellipsis - Emit cont_bb contents in case the
|
||||||
// no element matches the predicate
|
// no element matches the predicate
|
||||||
emit_cont_bb(ctx);
|
|
||||||
|
|
||||||
return Ok(None)
|
|
||||||
};
|
|
||||||
let result = generator.bool_to_i1(ctx, result);
|
|
||||||
let succ = ctx.ctx.append_basic_block(current, "then");
|
|
||||||
ctx.builder.build_conditional_branch(result, succ, test_bb);
|
|
||||||
|
|
||||||
ctx.builder.position_at_end(succ);
|
|
||||||
}
|
|
||||||
|
|
||||||
let Some(elem) = generator.gen_expr(ctx, elt)? else {
|
|
||||||
// Similarly, bail if the generator expression is an ellipsis, but keep cont_bb contents
|
|
||||||
emit_cont_bb(ctx);
|
emit_cont_bb(ctx);
|
||||||
|
|
||||||
return Ok(None)
|
return Ok(None)
|
||||||
};
|
};
|
||||||
let i = ctx.builder.build_load(index, "i").into_int_value();
|
let result = generator.bool_to_i1(ctx, result);
|
||||||
let elem_ptr = unsafe { ctx.builder.build_gep(list_content, &[i], "elem_ptr") };
|
let succ = ctx.ctx.append_basic_block(current, "then");
|
||||||
let val = elem.to_basic_value_enum(ctx, generator, elt.custom.unwrap())?;
|
ctx.builder.build_conditional_branch(result, succ, test_bb);
|
||||||
ctx.builder.build_store(elem_ptr, val);
|
|
||||||
ctx.builder
|
|
||||||
.build_store(index, ctx.builder.build_int_add(i, size_t.const_int(1, false), "inc"));
|
|
||||||
ctx.builder.build_unconditional_branch(test_bb);
|
|
||||||
|
|
||||||
|
ctx.builder.position_at_end(succ);
|
||||||
|
}
|
||||||
|
|
||||||
|
let Some(elem) = generator.gen_expr(ctx, elt)? else {
|
||||||
|
// Similarly, bail if the generator expression is an ellipsis, but keep cont_bb contents
|
||||||
emit_cont_bb(ctx);
|
emit_cont_bb(ctx);
|
||||||
|
|
||||||
Ok(Some(list.into()))
|
return Ok(None)
|
||||||
} else {
|
};
|
||||||
unreachable!()
|
let i = ctx.builder.build_load(index, "i").into_int_value();
|
||||||
}
|
let elem_ptr = unsafe { ctx.builder.build_gep(list_content, &[i], "elem_ptr") };
|
||||||
|
let val = elem.to_basic_value_enum(ctx, generator, elt.custom.unwrap())?;
|
||||||
|
ctx.builder.build_store(elem_ptr, val);
|
||||||
|
ctx.builder
|
||||||
|
.build_store(index, ctx.builder.build_int_add(i, size_t.const_int(1, false), "inc"));
|
||||||
|
ctx.builder.build_unconditional_branch(test_bb);
|
||||||
|
|
||||||
|
emit_cont_bb(ctx);
|
||||||
|
|
||||||
|
Ok(Some(list.into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Generates LLVM IR for a [binary operator expression][expr].
|
/// Generates LLVM IR for a [binary operator expression][expr].
|
||||||
|
@ -1170,9 +1165,11 @@ pub fn gen_binop_expr<'ctx, G: CodeGenerator>(
|
||||||
.unwrap_left();
|
.unwrap_left();
|
||||||
Ok(Some(res.into()))
|
Ok(Some(res.into()))
|
||||||
} else {
|
} else {
|
||||||
let (op_name, id) = if let TypeEnum::TObj { fields, obj_id, .. } =
|
let left_ty_enum = ctx.unifier.get_ty_immutable(left.custom.unwrap());
|
||||||
ctx.unifier.get_ty_immutable(left.custom.unwrap()).as_ref()
|
let TypeEnum::TObj { fields, obj_id, .. } = left_ty_enum.as_ref() else {
|
||||||
{
|
unreachable!("must be tobj")
|
||||||
|
};
|
||||||
|
let (op_name, id) = {
|
||||||
let (binop_name, binop_assign_name) = (
|
let (binop_name, binop_assign_name) = (
|
||||||
binop_name(op).into(),
|
binop_name(op).into(),
|
||||||
binop_assign_name(op).into()
|
binop_assign_name(op).into()
|
||||||
|
@ -1183,34 +1180,33 @@ pub fn gen_binop_expr<'ctx, G: CodeGenerator>(
|
||||||
} else {
|
} else {
|
||||||
(binop_name, *obj_id)
|
(binop_name, *obj_id)
|
||||||
}
|
}
|
||||||
} else {
|
|
||||||
unreachable!("must be tobj")
|
|
||||||
};
|
};
|
||||||
|
|
||||||
let signature = match ctx.calls.get(&loc.into()) {
|
let signature = match ctx.calls.get(&loc.into()) {
|
||||||
Some(call) => ctx.unifier.get_call_signature(*call).unwrap(),
|
Some(call) => ctx.unifier.get_call_signature(*call).unwrap(),
|
||||||
None => {
|
None => {
|
||||||
if let TypeEnum::TObj { fields, .. } =
|
let left_enum_ty = ctx.unifier.get_ty_immutable(left.custom.unwrap());
|
||||||
ctx.unifier.get_ty_immutable(left.custom.unwrap()).as_ref()
|
let TypeEnum::TObj { fields, .. } = left_enum_ty.as_ref() else {
|
||||||
{
|
|
||||||
let fn_ty = fields.get(&op_name).unwrap().0;
|
|
||||||
if let TypeEnum::TFunc(sig) = ctx.unifier.get_ty_immutable(fn_ty).as_ref() {
|
|
||||||
sig.clone()
|
|
||||||
} else {
|
|
||||||
unreachable!("must be func sig")
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
unreachable!("must be tobj")
|
unreachable!("must be tobj")
|
||||||
}
|
};
|
||||||
|
|
||||||
|
let fn_ty = fields.get(&op_name).unwrap().0;
|
||||||
|
let fn_ty_enum = ctx.unifier.get_ty_immutable(fn_ty);
|
||||||
|
let TypeEnum::TFunc(sig) = fn_ty_enum.as_ref() else {
|
||||||
|
unreachable!()
|
||||||
|
};
|
||||||
|
|
||||||
|
sig.clone()
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
let fun_id = {
|
let fun_id = {
|
||||||
let defs = ctx.top_level.definitions.read();
|
let defs = ctx.top_level.definitions.read();
|
||||||
let obj_def = defs.get(id.0).unwrap().read();
|
let obj_def = defs.get(id.0).unwrap().read();
|
||||||
if let TopLevelDef::Class { methods, .. } = &*obj_def {
|
let TopLevelDef::Class { methods, .. } = &*obj_def else {
|
||||||
methods.iter().find(|method| method.0 == op_name).unwrap().2
|
|
||||||
} else {
|
|
||||||
unreachable!()
|
unreachable!()
|
||||||
}
|
};
|
||||||
|
|
||||||
|
methods.iter().find(|method| method.0 == op_name).unwrap().2
|
||||||
};
|
};
|
||||||
generator
|
generator
|
||||||
.gen_call(
|
.gen_call(
|
||||||
|
@ -1290,11 +1286,11 @@ pub fn gen_expr<'ctx, G: CodeGenerator>(
|
||||||
}
|
}
|
||||||
|
|
||||||
let ty = if elements.is_empty() {
|
let ty = if elements.is_empty() {
|
||||||
if let TypeEnum::TList { ty } = &*ctx.unifier.get_ty(expr.custom.unwrap()) {
|
let TypeEnum::TList { ty } = &*ctx.unifier.get_ty(expr.custom.unwrap()) else {
|
||||||
ctx.get_llvm_type(generator, *ty)
|
|
||||||
} else {
|
|
||||||
unreachable!()
|
unreachable!()
|
||||||
}
|
};
|
||||||
|
|
||||||
|
ctx.get_llvm_type(generator, *ty)
|
||||||
} else {
|
} else {
|
||||||
elements[0].get_type()
|
elements[0].get_type()
|
||||||
};
|
};
|
||||||
|
@ -1636,11 +1632,11 @@ pub fn gen_expr<'ctx, G: CodeGenerator>(
|
||||||
ctx.unifier.get_call_signature(*call).unwrap()
|
ctx.unifier.get_call_signature(*call).unwrap()
|
||||||
} else {
|
} else {
|
||||||
let ty = func.custom.unwrap();
|
let ty = func.custom.unwrap();
|
||||||
if let TypeEnum::TFunc(sign) = &*ctx.unifier.get_ty(ty) {
|
let TypeEnum::TFunc(sign) = &*ctx.unifier.get_ty(ty) else {
|
||||||
sign.clone()
|
|
||||||
} else {
|
|
||||||
unreachable!()
|
unreachable!()
|
||||||
}
|
};
|
||||||
|
|
||||||
|
sign.clone()
|
||||||
};
|
};
|
||||||
let func = func.as_ref();
|
let func = func.as_ref();
|
||||||
match &func.node {
|
match &func.node {
|
||||||
|
@ -1669,11 +1665,11 @@ pub fn gen_expr<'ctx, G: CodeGenerator>(
|
||||||
let fun_id = {
|
let fun_id = {
|
||||||
let defs = ctx.top_level.definitions.read();
|
let defs = ctx.top_level.definitions.read();
|
||||||
let obj_def = defs.get(id.0).unwrap().read();
|
let obj_def = defs.get(id.0).unwrap().read();
|
||||||
if let TopLevelDef::Class { methods, .. } = &*obj_def {
|
let TopLevelDef::Class { methods, .. } = &*obj_def else {
|
||||||
methods.iter().find(|method| method.0 == *attr).unwrap().2
|
|
||||||
} else {
|
|
||||||
unreachable!()
|
unreachable!()
|
||||||
}
|
};
|
||||||
|
|
||||||
|
methods.iter().find(|method| method.0 == *attr).unwrap().2
|
||||||
};
|
};
|
||||||
// directly generate code for option.unwrap
|
// directly generate code for option.unwrap
|
||||||
// since it needs to return static value to optimize for kernel invariant
|
// since it needs to return static value to optimize for kernel invariant
|
||||||
|
@ -1755,125 +1751,127 @@ pub fn gen_expr<'ctx, G: CodeGenerator>(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ExprKind::Subscript { value, slice, .. } => {
|
ExprKind::Subscript { value, slice, .. } => {
|
||||||
if let TypeEnum::TList { ty } = &*ctx.unifier.get_ty(value.custom.unwrap()) {
|
match &*ctx.unifier.get_ty(value.custom.unwrap()) {
|
||||||
let v = if let Some(v) = generator.gen_expr(ctx, value)? {
|
TypeEnum::TList { ty } => {
|
||||||
v.to_basic_value_enum(ctx, generator, value.custom.unwrap())?.into_pointer_value()
|
let v = if let Some(v) = generator.gen_expr(ctx, value)? {
|
||||||
} else {
|
v.to_basic_value_enum(ctx, generator, value.custom.unwrap())?.into_pointer_value()
|
||||||
return Ok(None)
|
|
||||||
};
|
|
||||||
let ty = ctx.get_llvm_type(generator, *ty);
|
|
||||||
let arr_ptr = ctx.build_gep_and_load(v, &[zero, zero], Some("arr.addr"))
|
|
||||||
.into_pointer_value();
|
|
||||||
if let ExprKind::Slice { lower, upper, step } = &slice.node {
|
|
||||||
let one = int32.const_int(1, false);
|
|
||||||
let Some((start, end, step)) =
|
|
||||||
handle_slice_indices(lower, upper, step, ctx, generator, v)? else {
|
|
||||||
return Ok(None)
|
|
||||||
};
|
|
||||||
let length = calculate_len_for_slice_range(
|
|
||||||
generator,
|
|
||||||
ctx,
|
|
||||||
start,
|
|
||||||
ctx.builder
|
|
||||||
.build_select(
|
|
||||||
ctx.builder.build_int_compare(
|
|
||||||
IntPredicate::SLT,
|
|
||||||
step,
|
|
||||||
zero,
|
|
||||||
"is_neg",
|
|
||||||
),
|
|
||||||
ctx.builder.build_int_sub(end, one, "e_min_one"),
|
|
||||||
ctx.builder.build_int_add(end, one, "e_add_one"),
|
|
||||||
"final_e",
|
|
||||||
)
|
|
||||||
.into_int_value(),
|
|
||||||
step,
|
|
||||||
);
|
|
||||||
let res_array_ret = allocate_list(generator, ctx, ty, length, Some("ret"));
|
|
||||||
let Some(res_ind) =
|
|
||||||
handle_slice_indices(&None, &None, &None, ctx, generator, res_array_ret)? else {
|
|
||||||
return Ok(None)
|
|
||||||
};
|
|
||||||
list_slice_assignment(
|
|
||||||
generator,
|
|
||||||
ctx,
|
|
||||||
ty,
|
|
||||||
res_array_ret,
|
|
||||||
res_ind,
|
|
||||||
v,
|
|
||||||
(start, end, step),
|
|
||||||
);
|
|
||||||
res_array_ret.into()
|
|
||||||
} else {
|
|
||||||
let len = ctx
|
|
||||||
.build_gep_and_load(v, &[zero, int32.const_int(1, false)], Some("len"))
|
|
||||||
.into_int_value();
|
|
||||||
let raw_index = if let Some(v) = generator.gen_expr(ctx, slice)? {
|
|
||||||
v.to_basic_value_enum(ctx, generator, slice.custom.unwrap())?.into_int_value()
|
|
||||||
} else {
|
} else {
|
||||||
return Ok(None)
|
return Ok(None)
|
||||||
};
|
};
|
||||||
let raw_index = ctx.builder.build_int_s_extend(
|
let ty = ctx.get_llvm_type(generator, *ty);
|
||||||
raw_index,
|
let arr_ptr = ctx.build_gep_and_load(v, &[zero, zero], Some("arr.addr"))
|
||||||
generator.get_size_type(ctx.ctx),
|
.into_pointer_value();
|
||||||
"sext",
|
if let ExprKind::Slice { lower, upper, step } = &slice.node {
|
||||||
);
|
let one = int32.const_int(1, false);
|
||||||
// handle negative index
|
let Some((start, end, step)) =
|
||||||
let is_negative = ctx.builder.build_int_compare(
|
handle_slice_indices(lower, upper, step, ctx, generator, v)? else {
|
||||||
IntPredicate::SLT,
|
return Ok(None)
|
||||||
raw_index,
|
};
|
||||||
generator.get_size_type(ctx.ctx).const_zero(),
|
let length = calculate_len_for_slice_range(
|
||||||
"is_neg",
|
generator,
|
||||||
);
|
ctx,
|
||||||
let adjusted = ctx.builder.build_int_add(raw_index, len, "adjusted");
|
start,
|
||||||
let index = ctx
|
ctx.builder
|
||||||
.builder
|
.build_select(
|
||||||
.build_select(is_negative, adjusted, raw_index, "index")
|
ctx.builder.build_int_compare(
|
||||||
.into_int_value();
|
IntPredicate::SLT,
|
||||||
// unsigned less than is enough, because negative index after adjustment is
|
step,
|
||||||
// bigger than the length (for unsigned cmp)
|
zero,
|
||||||
let bound_check = ctx.builder.build_int_compare(
|
"is_neg",
|
||||||
IntPredicate::ULT,
|
),
|
||||||
index,
|
ctx.builder.build_int_sub(end, one, "e_min_one"),
|
||||||
len,
|
ctx.builder.build_int_add(end, one, "e_add_one"),
|
||||||
"inbound",
|
"final_e",
|
||||||
);
|
)
|
||||||
ctx.make_assert(
|
.into_int_value(),
|
||||||
generator,
|
step,
|
||||||
bound_check,
|
);
|
||||||
"0:IndexError",
|
let res_array_ret = allocate_list(generator, ctx, ty, length, Some("ret"));
|
||||||
"index {0} out of bounds 0:{1}",
|
let Some(res_ind) =
|
||||||
[Some(raw_index), Some(len), None],
|
handle_slice_indices(&None, &None, &None, ctx, generator, res_array_ret)? else {
|
||||||
expr.location,
|
return Ok(None)
|
||||||
);
|
};
|
||||||
ctx.build_gep_and_load(arr_ptr, &[index], None).into()
|
list_slice_assignment(
|
||||||
}
|
generator,
|
||||||
} else if let TypeEnum::TTuple { .. } = &*ctx.unifier.get_ty(value.custom.unwrap()) {
|
ctx,
|
||||||
let index: u32 =
|
ty,
|
||||||
if let ExprKind::Constant { value: Constant::Int(v), .. } = &slice.node {
|
res_array_ret,
|
||||||
(*v).try_into().unwrap()
|
res_ind,
|
||||||
|
v,
|
||||||
|
(start, end, step),
|
||||||
|
);
|
||||||
|
res_array_ret.into()
|
||||||
} else {
|
} else {
|
||||||
unreachable!("tuple subscript must be const int after type check");
|
let len = ctx
|
||||||
};
|
.build_gep_and_load(v, &[zero, int32.const_int(1, false)], Some("len"))
|
||||||
match generator.gen_expr(ctx, value)? {
|
.into_int_value();
|
||||||
Some(ValueEnum::Dynamic(v)) => {
|
let raw_index = if let Some(v) = generator.gen_expr(ctx, slice)? {
|
||||||
let v = v.into_struct_value();
|
v.to_basic_value_enum(ctx, generator, slice.custom.unwrap())?.into_int_value()
|
||||||
ctx.builder.build_extract_value(v, index, "tup_elem").unwrap().into()
|
|
||||||
}
|
|
||||||
Some(ValueEnum::Static(v)) => {
|
|
||||||
if let Some(v) = v.get_tuple_element(index) {
|
|
||||||
v
|
|
||||||
} else {
|
} else {
|
||||||
let tup = v
|
return Ok(None)
|
||||||
.to_basic_value_enum(ctx, generator, value.custom.unwrap())?
|
};
|
||||||
.into_struct_value();
|
let raw_index = ctx.builder.build_int_s_extend(
|
||||||
ctx.builder.build_extract_value(tup, index, "tup_elem").unwrap().into()
|
raw_index,
|
||||||
}
|
generator.get_size_type(ctx.ctx),
|
||||||
|
"sext",
|
||||||
|
);
|
||||||
|
// handle negative index
|
||||||
|
let is_negative = ctx.builder.build_int_compare(
|
||||||
|
IntPredicate::SLT,
|
||||||
|
raw_index,
|
||||||
|
generator.get_size_type(ctx.ctx).const_zero(),
|
||||||
|
"is_neg",
|
||||||
|
);
|
||||||
|
let adjusted = ctx.builder.build_int_add(raw_index, len, "adjusted");
|
||||||
|
let index = ctx
|
||||||
|
.builder
|
||||||
|
.build_select(is_negative, adjusted, raw_index, "index")
|
||||||
|
.into_int_value();
|
||||||
|
// unsigned less than is enough, because negative index after adjustment is
|
||||||
|
// bigger than the length (for unsigned cmp)
|
||||||
|
let bound_check = ctx.builder.build_int_compare(
|
||||||
|
IntPredicate::ULT,
|
||||||
|
index,
|
||||||
|
len,
|
||||||
|
"inbound",
|
||||||
|
);
|
||||||
|
ctx.make_assert(
|
||||||
|
generator,
|
||||||
|
bound_check,
|
||||||
|
"0:IndexError",
|
||||||
|
"index {0} out of bounds 0:{1}",
|
||||||
|
[Some(raw_index), Some(len), None],
|
||||||
|
expr.location,
|
||||||
|
);
|
||||||
|
ctx.build_gep_and_load(arr_ptr, &[index], None).into()
|
||||||
}
|
}
|
||||||
None => return Ok(None),
|
|
||||||
}
|
}
|
||||||
} else {
|
TypeEnum::TTuple { .. } => {
|
||||||
unreachable!("should not be other subscriptable types after type check");
|
let index: u32 =
|
||||||
|
if let ExprKind::Constant { value: Constant::Int(v), .. } = &slice.node {
|
||||||
|
(*v).try_into().unwrap()
|
||||||
|
} else {
|
||||||
|
unreachable!("tuple subscript must be const int after type check");
|
||||||
|
};
|
||||||
|
match generator.gen_expr(ctx, value)? {
|
||||||
|
Some(ValueEnum::Dynamic(v)) => {
|
||||||
|
let v = v.into_struct_value();
|
||||||
|
ctx.builder.build_extract_value(v, index, "tup_elem").unwrap().into()
|
||||||
|
}
|
||||||
|
Some(ValueEnum::Static(v)) => {
|
||||||
|
if let Some(v) = v.get_tuple_element(index) {
|
||||||
|
v
|
||||||
|
} else {
|
||||||
|
let tup = v
|
||||||
|
.to_basic_value_enum(ctx, generator, value.custom.unwrap())?
|
||||||
|
.into_struct_value();
|
||||||
|
ctx.builder.build_extract_value(tup, index, "tup_elem").unwrap().into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None => return Ok(None),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => unreachable!("should not be other subscriptable types after type check"),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
ExprKind::ListComp { .. } => {
|
ExprKind::ListComp { .. } => {
|
||||||
|
|
|
@ -451,40 +451,38 @@ fn get_llvm_type<'ctx>(
|
||||||
// a struct with fields in the order of declaration
|
// a struct with fields in the order of declaration
|
||||||
let top_level_defs = top_level.definitions.read();
|
let top_level_defs = top_level.definitions.read();
|
||||||
let definition = top_level_defs.get(obj_id.0).unwrap();
|
let definition = top_level_defs.get(obj_id.0).unwrap();
|
||||||
let ty = if let TopLevelDef::Class { fields: fields_list, .. } =
|
let TopLevelDef::Class { fields: fields_list, .. } = &*definition.read() else {
|
||||||
&*definition.read()
|
|
||||||
{
|
|
||||||
let name = unifier.stringify(ty);
|
|
||||||
if let Some(t) = module.get_struct_type(&name) {
|
|
||||||
t.ptr_type(AddressSpace::default()).into()
|
|
||||||
} else {
|
|
||||||
let struct_type = ctx.opaque_struct_type(&name);
|
|
||||||
type_cache.insert(
|
|
||||||
unifier.get_representative(ty),
|
|
||||||
struct_type.ptr_type(AddressSpace::default()).into()
|
|
||||||
);
|
|
||||||
let fields = fields_list
|
|
||||||
.iter()
|
|
||||||
.map(|f| {
|
|
||||||
get_llvm_type(
|
|
||||||
ctx,
|
|
||||||
module,
|
|
||||||
generator,
|
|
||||||
unifier,
|
|
||||||
top_level,
|
|
||||||
type_cache,
|
|
||||||
primitives,
|
|
||||||
fields[&f.0].0,
|
|
||||||
)
|
|
||||||
})
|
|
||||||
.collect_vec();
|
|
||||||
struct_type.set_body(&fields, false);
|
|
||||||
struct_type.ptr_type(AddressSpace::default()).into()
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
unreachable!()
|
unreachable!()
|
||||||
};
|
};
|
||||||
return ty;
|
|
||||||
|
let name = unifier.stringify(ty);
|
||||||
|
let ty = if let Some(t) = module.get_struct_type(&name) {
|
||||||
|
t.ptr_type(AddressSpace::default()).into()
|
||||||
|
} else {
|
||||||
|
let struct_type = ctx.opaque_struct_type(&name);
|
||||||
|
type_cache.insert(
|
||||||
|
unifier.get_representative(ty),
|
||||||
|
struct_type.ptr_type(AddressSpace::default()).into()
|
||||||
|
);
|
||||||
|
let fields = fields_list
|
||||||
|
.iter()
|
||||||
|
.map(|f| {
|
||||||
|
get_llvm_type(
|
||||||
|
ctx,
|
||||||
|
module,
|
||||||
|
generator,
|
||||||
|
unifier,
|
||||||
|
top_level,
|
||||||
|
type_cache,
|
||||||
|
primitives,
|
||||||
|
fields[&f.0].0,
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.collect_vec();
|
||||||
|
struct_type.set_body(&fields, false);
|
||||||
|
struct_type.ptr_type(AddressSpace::default()).into()
|
||||||
|
};
|
||||||
|
return ty
|
||||||
}
|
}
|
||||||
TTuple { ty } => {
|
TTuple { ty } => {
|
||||||
// a struct with fields in the order present in the tuple
|
// a struct with fields in the order present in the tuple
|
||||||
|
@ -661,22 +659,21 @@ pub fn gen_func_impl<'ctx, G: CodeGenerator, F: FnOnce(&mut G, &mut CodeGenConte
|
||||||
// NOTE: special handling of option cannot use this type cache since it contains type var,
|
// NOTE: special handling of option cannot use this type cache since it contains type var,
|
||||||
// handled inside get_llvm_type instead
|
// handled inside get_llvm_type instead
|
||||||
|
|
||||||
let (args, ret) = if let ConcreteTypeEnum::TFunc { args, ret, .. } =
|
let ConcreteTypeEnum::TFunc { args, ret, .. } =
|
||||||
task.store.get(task.signature)
|
task.store.get(task.signature) else {
|
||||||
{
|
|
||||||
(
|
|
||||||
args.iter()
|
|
||||||
.map(|arg| FuncArg {
|
|
||||||
name: arg.name,
|
|
||||||
ty: task.store.to_unifier_type(&mut unifier, &primitives, arg.ty, &mut cache),
|
|
||||||
default_value: arg.default_value.clone(),
|
|
||||||
})
|
|
||||||
.collect_vec(),
|
|
||||||
task.store.to_unifier_type(&mut unifier, &primitives, *ret, &mut cache),
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
unreachable!()
|
unreachable!()
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let (args, ret) = (
|
||||||
|
args.iter()
|
||||||
|
.map(|arg| FuncArg {
|
||||||
|
name: arg.name,
|
||||||
|
ty: task.store.to_unifier_type(&mut unifier, &primitives, arg.ty, &mut cache),
|
||||||
|
default_value: arg.default_value.clone(),
|
||||||
|
})
|
||||||
|
.collect_vec(),
|
||||||
|
task.store.to_unifier_type(&mut unifier, &primitives, *ret, &mut cache),
|
||||||
|
);
|
||||||
let ret_type = if unifier.unioned(ret, primitives.none) {
|
let ret_type = if unifier.unioned(ret, primitives.none) {
|
||||||
None
|
None
|
||||||
} else {
|
} else {
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -528,11 +528,11 @@ impl dyn SymbolResolver + Send + Sync {
|
||||||
unifier.internal_stringify(
|
unifier.internal_stringify(
|
||||||
ty,
|
ty,
|
||||||
&mut |id| {
|
&mut |id| {
|
||||||
if let TopLevelDef::Class { name, .. } = &*top_level_defs[id].read() {
|
let TopLevelDef::Class { name, .. } = &*top_level_defs[id].read() else {
|
||||||
name.to_string()
|
|
||||||
} else {
|
|
||||||
unreachable!("expected class definition")
|
unreachable!("expected class definition")
|
||||||
}
|
};
|
||||||
|
|
||||||
|
name.to_string()
|
||||||
},
|
},
|
||||||
&mut |id| format!("typevar{id}"),
|
&mut |id| format!("typevar{id}"),
|
||||||
&mut None,
|
&mut None,
|
||||||
|
|
|
@ -421,11 +421,11 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
|
||||||
generator,
|
generator,
|
||||||
expect_ty,
|
expect_ty,
|
||||||
)?;
|
)?;
|
||||||
if let BasicValueEnum::PointerValue(ptr) = obj_val {
|
let BasicValueEnum::PointerValue(ptr) = obj_val else {
|
||||||
Ok(Some(ctx.builder.build_is_not_null(ptr, "is_some").into()))
|
|
||||||
} else {
|
|
||||||
unreachable!("option must be ptr")
|
unreachable!("option must be ptr")
|
||||||
}
|
};
|
||||||
|
|
||||||
|
Ok(Some(ctx.builder.build_is_not_null(ptr, "is_some").into()))
|
||||||
},
|
},
|
||||||
)))),
|
)))),
|
||||||
loc: None,
|
loc: None,
|
||||||
|
@ -446,11 +446,11 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
|
||||||
generator,
|
generator,
|
||||||
expect_ty,
|
expect_ty,
|
||||||
)?;
|
)?;
|
||||||
if let BasicValueEnum::PointerValue(ptr) = obj_val {
|
let BasicValueEnum::PointerValue(ptr) = obj_val else {
|
||||||
Ok(Some(ctx.builder.build_is_null(ptr, "is_none").into()))
|
|
||||||
} else {
|
|
||||||
unreachable!("option must be ptr")
|
unreachable!("option must be ptr")
|
||||||
}
|
};
|
||||||
|
|
||||||
|
Ok(Some(ctx.builder.build_is_null(ptr, "is_none").into()))
|
||||||
},
|
},
|
||||||
)))),
|
)))),
|
||||||
loc: None,
|
loc: None,
|
||||||
|
@ -686,7 +686,7 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
|
||||||
|
|
||||||
val
|
val
|
||||||
} else {
|
} else {
|
||||||
unreachable!();
|
unreachable!()
|
||||||
};
|
};
|
||||||
Ok(Some(res))
|
Ok(Some(res))
|
||||||
},
|
},
|
||||||
|
@ -762,7 +762,7 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
|
||||||
|
|
||||||
val
|
val
|
||||||
} else {
|
} else {
|
||||||
unreachable!();
|
unreachable!()
|
||||||
};
|
};
|
||||||
Ok(Some(res))
|
Ok(Some(res))
|
||||||
},
|
},
|
||||||
|
@ -1361,7 +1361,7 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
|
||||||
} else if is_type(m_ty, n_ty) && is_type(n_ty, float) {
|
} else if is_type(m_ty, n_ty) && is_type(n_ty, float) {
|
||||||
("llvm.minnum.f64", llvm_f64)
|
("llvm.minnum.f64", llvm_f64)
|
||||||
} else {
|
} else {
|
||||||
unreachable!();
|
unreachable!()
|
||||||
};
|
};
|
||||||
let intrinsic = ctx.module.get_function(fun_name).unwrap_or_else(|| {
|
let intrinsic = ctx.module.get_function(fun_name).unwrap_or_else(|| {
|
||||||
let fn_type = arg_ty.fn_type(&[arg_ty.into(), arg_ty.into()], false);
|
let fn_type = arg_ty.fn_type(&[arg_ty.into(), arg_ty.into()], false);
|
||||||
|
@ -1423,7 +1423,7 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
|
||||||
} else if is_type(m_ty, n_ty) && is_type(n_ty, float) {
|
} else if is_type(m_ty, n_ty) && is_type(n_ty, float) {
|
||||||
("llvm.maxnum.f64", llvm_f64)
|
("llvm.maxnum.f64", llvm_f64)
|
||||||
} else {
|
} else {
|
||||||
unreachable!();
|
unreachable!()
|
||||||
};
|
};
|
||||||
let intrinsic = ctx.module.get_function(fun_name).unwrap_or_else(|| {
|
let intrinsic = ctx.module.get_function(fun_name).unwrap_or_else(|| {
|
||||||
let fn_type = arg_ty.fn_type(&[arg_ty.into(), arg_ty.into()], false);
|
let fn_type = arg_ty.fn_type(&[arg_ty.into(), arg_ty.into()], false);
|
||||||
|
@ -1480,7 +1480,7 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
|
||||||
is_float = true;
|
is_float = true;
|
||||||
("llvm.fabs.f64", llvm_f64)
|
("llvm.fabs.f64", llvm_f64)
|
||||||
} else {
|
} else {
|
||||||
unreachable!();
|
unreachable!()
|
||||||
};
|
};
|
||||||
let intrinsic = ctx.module.get_function(fun_name).unwrap_or_else(|| {
|
let intrinsic = ctx.module.get_function(fun_name).unwrap_or_else(|| {
|
||||||
let fn_type = if is_float {
|
let fn_type = if is_float {
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -233,11 +233,11 @@ impl TopLevelComposer {
|
||||||
};
|
};
|
||||||
// check cycle
|
// check cycle
|
||||||
let no_cycle = result.iter().all(|x| {
|
let no_cycle = result.iter().all(|x| {
|
||||||
if let TypeAnnotation::CustomClass { id, .. } = x {
|
let TypeAnnotation::CustomClass { id, .. } = x else {
|
||||||
id.0 != p_id.0
|
|
||||||
} else {
|
|
||||||
unreachable!("must be class kind annotation")
|
unreachable!("must be class kind annotation")
|
||||||
}
|
};
|
||||||
|
|
||||||
|
id.0 != p_id.0
|
||||||
});
|
});
|
||||||
if no_cycle {
|
if no_cycle {
|
||||||
result.push(p);
|
result.push(p);
|
||||||
|
@ -260,14 +260,14 @@ impl TopLevelComposer {
|
||||||
};
|
};
|
||||||
let child_def = temp_def_list.get(child_id.0).unwrap();
|
let child_def = temp_def_list.get(child_id.0).unwrap();
|
||||||
let child_def = child_def.read();
|
let child_def = child_def.read();
|
||||||
if let TopLevelDef::Class { ancestors, .. } = &*child_def {
|
let TopLevelDef::Class { ancestors, .. } = &*child_def else {
|
||||||
if ancestors.is_empty() {
|
|
||||||
None
|
|
||||||
} else {
|
|
||||||
Some(ancestors[0].clone())
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
unreachable!("child must be top level class def")
|
unreachable!("child must be top level class def")
|
||||||
|
};
|
||||||
|
|
||||||
|
if ancestors.is_empty() {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
Some(ancestors[0].clone())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -292,39 +292,38 @@ impl TopLevelComposer {
|
||||||
let this = this.as_ref();
|
let this = this.as_ref();
|
||||||
let other = unifier.get_ty(other);
|
let other = unifier.get_ty(other);
|
||||||
let other = other.as_ref();
|
let other = other.as_ref();
|
||||||
if let (
|
let (
|
||||||
TypeEnum::TFunc(FunSignature { args: this_args, ret: this_ret, .. }),
|
TypeEnum::TFunc(FunSignature { args: this_args, ret: this_ret, .. }),
|
||||||
TypeEnum::TFunc(FunSignature { args: other_args, ret: other_ret, .. }),
|
TypeEnum::TFunc(FunSignature { args: other_args, ret: other_ret, .. }),
|
||||||
) = (this, other)
|
) = (this, other) else {
|
||||||
{
|
|
||||||
// check args
|
|
||||||
let args_ok = this_args
|
|
||||||
.iter()
|
|
||||||
.map(|FuncArg { name, ty, .. }| (name, type_var_to_concrete_def.get(ty).unwrap()))
|
|
||||||
.zip(other_args.iter().map(|FuncArg { name, ty, .. }| {
|
|
||||||
(name, type_var_to_concrete_def.get(ty).unwrap())
|
|
||||||
}))
|
|
||||||
.all(|(this, other)| {
|
|
||||||
if this.0 == &"self".into() && this.0 == other.0 {
|
|
||||||
true
|
|
||||||
} else {
|
|
||||||
this.0 == other.0
|
|
||||||
&& check_overload_type_annotation_compatible(this.1, other.1, unifier)
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// check rets
|
|
||||||
let ret_ok = check_overload_type_annotation_compatible(
|
|
||||||
type_var_to_concrete_def.get(this_ret).unwrap(),
|
|
||||||
type_var_to_concrete_def.get(other_ret).unwrap(),
|
|
||||||
unifier,
|
|
||||||
);
|
|
||||||
|
|
||||||
// return
|
|
||||||
args_ok && ret_ok
|
|
||||||
} else {
|
|
||||||
unreachable!("this function must be called with function type")
|
unreachable!("this function must be called with function type")
|
||||||
}
|
};
|
||||||
|
|
||||||
|
// check args
|
||||||
|
let args_ok = this_args
|
||||||
|
.iter()
|
||||||
|
.map(|FuncArg { name, ty, .. }| (name, type_var_to_concrete_def.get(ty).unwrap()))
|
||||||
|
.zip(other_args.iter().map(|FuncArg { name, ty, .. }| {
|
||||||
|
(name, type_var_to_concrete_def.get(ty).unwrap())
|
||||||
|
}))
|
||||||
|
.all(|(this, other)| {
|
||||||
|
if this.0 == &"self".into() && this.0 == other.0 {
|
||||||
|
true
|
||||||
|
} else {
|
||||||
|
this.0 == other.0
|
||||||
|
&& check_overload_type_annotation_compatible(this.1, other.1, unifier)
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// check rets
|
||||||
|
let ret_ok = check_overload_type_annotation_compatible(
|
||||||
|
type_var_to_concrete_def.get(this_ret).unwrap(),
|
||||||
|
type_var_to_concrete_def.get(other_ret).unwrap(),
|
||||||
|
unifier,
|
||||||
|
);
|
||||||
|
|
||||||
|
// return
|
||||||
|
args_ok && ret_ok
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn check_overload_field_type(
|
pub fn check_overload_field_type(
|
||||||
|
|
|
@ -163,11 +163,11 @@ pub fn parse_ast_to_type_annotation_kinds<T>(
|
||||||
let type_vars = {
|
let type_vars = {
|
||||||
let def_read = top_level_defs[obj_id.0].try_read();
|
let def_read = top_level_defs[obj_id.0].try_read();
|
||||||
if let Some(def_read) = def_read {
|
if let Some(def_read) = def_read {
|
||||||
if let TopLevelDef::Class { type_vars, .. } = &*def_read {
|
let TopLevelDef::Class { type_vars, .. } = &*def_read else {
|
||||||
type_vars.clone()
|
|
||||||
} else {
|
|
||||||
unreachable!("must be class here")
|
unreachable!("must be class here")
|
||||||
}
|
};
|
||||||
|
|
||||||
|
type_vars.clone()
|
||||||
} else {
|
} else {
|
||||||
locked.get(&obj_id).unwrap().clone()
|
locked.get(&obj_id).unwrap().clone()
|
||||||
}
|
}
|
||||||
|
@ -497,13 +497,11 @@ pub fn get_type_from_type_annotation_kinds(
|
||||||
TypeAnnotation::Primitive(ty) | TypeAnnotation::TypeVar(ty) => Ok(*ty),
|
TypeAnnotation::Primitive(ty) | TypeAnnotation::TypeVar(ty) => Ok(*ty),
|
||||||
TypeAnnotation::Constant { ty, value, .. } => {
|
TypeAnnotation::Constant { ty, value, .. } => {
|
||||||
let ty_enum = unifier.get_ty(*ty);
|
let ty_enum = unifier.get_ty(*ty);
|
||||||
let (ty, loc) = match &*ty_enum {
|
let TypeEnum::TVar { range: ntv_underlying_ty, loc, is_const_generic: true, .. } = &*ty_enum else {
|
||||||
TypeEnum::TVar { range: ntv_underlying_ty, loc, is_const_generic: true, .. } => {
|
unreachable!("{} ({})", unifier.stringify(*ty), ty_enum.get_type_name());
|
||||||
(ntv_underlying_ty[0], loc)
|
|
||||||
}
|
|
||||||
_ => unreachable!("{} ({})", unifier.stringify(*ty), ty_enum.get_type_name()),
|
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let ty = ntv_underlying_ty[0];
|
||||||
let var = unifier.get_fresh_constant(value.clone(), ty, *loc);
|
let var = unifier.get_fresh_constant(value.clone(), ty, *loc);
|
||||||
Ok(var)
|
Ok(var)
|
||||||
}
|
}
|
||||||
|
@ -596,15 +594,14 @@ pub fn check_overload_type_annotation_compatible(
|
||||||
let a = &*a;
|
let a = &*a;
|
||||||
let b = unifier.get_ty(*b);
|
let b = unifier.get_ty(*b);
|
||||||
let b = &*b;
|
let b = &*b;
|
||||||
if let (
|
let (
|
||||||
TypeEnum::TVar { id: a, fields: None, .. },
|
TypeEnum::TVar { id: a, fields: None, .. },
|
||||||
TypeEnum::TVar { id: b, fields: None, .. },
|
TypeEnum::TVar { id: b, fields: None, .. },
|
||||||
) = (a, b)
|
) = (a, b) else {
|
||||||
{
|
|
||||||
a == b
|
|
||||||
} else {
|
|
||||||
unreachable!("must be type var")
|
unreachable!("must be type var")
|
||||||
}
|
};
|
||||||
|
|
||||||
|
a == b
|
||||||
}
|
}
|
||||||
(TypeAnnotation::Virtual(a), TypeAnnotation::Virtual(b))
|
(TypeAnnotation::Virtual(a), TypeAnnotation::Virtual(b))
|
||||||
| (TypeAnnotation::List(a), TypeAnnotation::List(b)) => {
|
| (TypeAnnotation::List(a), TypeAnnotation::List(b)) => {
|
||||||
|
|
|
@ -241,35 +241,35 @@ impl<'a> Fold<()> for Inferencer<'a> {
|
||||||
let targets: Result<Vec<_>, _> = targets
|
let targets: Result<Vec<_>, _> = targets
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|target| {
|
.map(|target| {
|
||||||
if let ExprKind::Name { id, ctx } = target.node {
|
let ExprKind::Name { id, ctx } = target.node else {
|
||||||
self.defined_identifiers.insert(id);
|
|
||||||
let target_ty = if let Some(ty) = self.variable_mapping.get(&id)
|
|
||||||
{
|
|
||||||
*ty
|
|
||||||
} else {
|
|
||||||
let unifier: &mut Unifier = self.unifier;
|
|
||||||
self.function_data
|
|
||||||
.resolver
|
|
||||||
.get_symbol_type(
|
|
||||||
unifier,
|
|
||||||
&self.top_level.definitions.read(),
|
|
||||||
self.primitives,
|
|
||||||
id,
|
|
||||||
)
|
|
||||||
.unwrap_or_else(|_| {
|
|
||||||
self.variable_mapping.insert(id, value_ty);
|
|
||||||
value_ty
|
|
||||||
})
|
|
||||||
};
|
|
||||||
let location = target.location;
|
|
||||||
self.unifier.unify(value_ty, target_ty).map(|()| Located {
|
|
||||||
location,
|
|
||||||
node: ExprKind::Name { id, ctx },
|
|
||||||
custom: Some(target_ty),
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
unreachable!()
|
unreachable!()
|
||||||
}
|
};
|
||||||
|
|
||||||
|
self.defined_identifiers.insert(id);
|
||||||
|
let target_ty = if let Some(ty) = self.variable_mapping.get(&id)
|
||||||
|
{
|
||||||
|
*ty
|
||||||
|
} else {
|
||||||
|
let unifier: &mut Unifier = self.unifier;
|
||||||
|
self.function_data
|
||||||
|
.resolver
|
||||||
|
.get_symbol_type(
|
||||||
|
unifier,
|
||||||
|
&self.top_level.definitions.read(),
|
||||||
|
self.primitives,
|
||||||
|
id,
|
||||||
|
)
|
||||||
|
.unwrap_or_else(|_| {
|
||||||
|
self.variable_mapping.insert(id, value_ty);
|
||||||
|
value_ty
|
||||||
|
})
|
||||||
|
};
|
||||||
|
let location = target.location;
|
||||||
|
self.unifier.unify(value_ty, target_ty).map(|()| Located {
|
||||||
|
location,
|
||||||
|
node: ExprKind::Name { id, ctx },
|
||||||
|
custom: Some(target_ty),
|
||||||
|
})
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
let loc = node.location;
|
let loc = node.location;
|
||||||
|
@ -465,12 +465,12 @@ impl<'a> Fold<()> for Inferencer<'a> {
|
||||||
let var_map = params
|
let var_map = params
|
||||||
.iter()
|
.iter()
|
||||||
.map(|(id_var, ty)| {
|
.map(|(id_var, ty)| {
|
||||||
if let TypeEnum::TVar { id, range, name, loc, .. } = &*self.unifier.get_ty(*ty) {
|
let TypeEnum::TVar { id, range, name, loc, .. } = &*self.unifier.get_ty(*ty) else {
|
||||||
assert_eq!(*id, *id_var);
|
|
||||||
(*id, self.unifier.get_fresh_var_with_range(range, *name, *loc).0)
|
|
||||||
} else {
|
|
||||||
unreachable!()
|
unreachable!()
|
||||||
}
|
};
|
||||||
|
|
||||||
|
assert_eq!(*id, *id_var);
|
||||||
|
(*id, self.unifier.get_fresh_var_with_range(range, *name, *loc).0)
|
||||||
})
|
})
|
||||||
.collect::<HashMap<_, _>>();
|
.collect::<HashMap<_, _>>();
|
||||||
Some(self.unifier.subst(self.primitives.option, &var_map).unwrap())
|
Some(self.unifier.subst(self.primitives.option, &var_map).unwrap())
|
||||||
|
|
|
@ -499,12 +499,9 @@ impl Unifier {
|
||||||
let instantiated = self.instantiate_fun(b, signature);
|
let instantiated = self.instantiate_fun(b, signature);
|
||||||
let r = self.get_ty(instantiated);
|
let r = self.get_ty(instantiated);
|
||||||
let r = r.as_ref();
|
let r = r.as_ref();
|
||||||
let signature;
|
let TypeEnum::TFunc(signature) = r else {
|
||||||
if let TypeEnum::TFunc(s) = r {
|
unreachable!()
|
||||||
signature = s;
|
};
|
||||||
} else {
|
|
||||||
unreachable!();
|
|
||||||
}
|
|
||||||
// we check to make sure that all required arguments (those without default
|
// we check to make sure that all required arguments (those without default
|
||||||
// arguments) are provided, and do not provide the same argument twice.
|
// arguments) are provided, and do not provide the same argument twice.
|
||||||
let mut required = required.to_vec();
|
let mut required = required.to_vec();
|
||||||
|
@ -940,13 +937,12 @@ impl Unifier {
|
||||||
top_level.as_ref().map_or_else(
|
top_level.as_ref().map_or_else(
|
||||||
|| format!("{id}"),
|
|| format!("{id}"),
|
||||||
|top_level| {
|
|top_level| {
|
||||||
if let TopLevelDef::Class { name, .. } =
|
let top_level_def = &top_level.definitions.read()[id];
|
||||||
&*top_level.definitions.read()[id].read()
|
let TopLevelDef::Class { name, .. } = &*top_level_def.read() else {
|
||||||
{
|
|
||||||
name.to_string()
|
|
||||||
} else {
|
|
||||||
unreachable!("expected class definition")
|
unreachable!("expected class definition")
|
||||||
}
|
};
|
||||||
|
|
||||||
|
name.to_string()
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
},
|
},
|
||||||
|
|
|
@ -339,23 +339,21 @@ fn test_recursive_subst() {
|
||||||
let int = *env.type_mapping.get("int").unwrap();
|
let int = *env.type_mapping.get("int").unwrap();
|
||||||
let foo_id = *env.type_mapping.get("Foo").unwrap();
|
let foo_id = *env.type_mapping.get("Foo").unwrap();
|
||||||
let foo_ty = env.unifier.get_ty(foo_id);
|
let foo_ty = env.unifier.get_ty(foo_id);
|
||||||
let mapping: HashMap<_, _>;
|
|
||||||
with_fields(&mut env.unifier, foo_id, |_unifier, fields| {
|
with_fields(&mut env.unifier, foo_id, |_unifier, fields| {
|
||||||
fields.insert("rec".into(), (foo_id, true));
|
fields.insert("rec".into(), (foo_id, true));
|
||||||
});
|
});
|
||||||
if let TypeEnum::TObj { params, .. } = &*foo_ty {
|
let TypeEnum::TObj { params, .. } = &*foo_ty else {
|
||||||
mapping = params.iter().map(|(id, _)| (*id, int)).collect();
|
|
||||||
} else {
|
|
||||||
unreachable!()
|
unreachable!()
|
||||||
}
|
};
|
||||||
|
let mapping = params.iter().map(|(id, _)| (*id, int)).collect();
|
||||||
let instantiated = env.unifier.subst(foo_id, &mapping).unwrap();
|
let instantiated = env.unifier.subst(foo_id, &mapping).unwrap();
|
||||||
let instantiated_ty = env.unifier.get_ty(instantiated);
|
let instantiated_ty = env.unifier.get_ty(instantiated);
|
||||||
if let TypeEnum::TObj { fields, .. } = &*instantiated_ty {
|
|
||||||
assert!(env.unifier.unioned(fields.get(&"a".into()).unwrap().0, int));
|
let TypeEnum::TObj { fields, .. } = &*instantiated_ty else {
|
||||||
assert!(env.unifier.unioned(fields.get(&"rec".into()).unwrap().0, instantiated));
|
|
||||||
} else {
|
|
||||||
unreachable!()
|
unreachable!()
|
||||||
}
|
};
|
||||||
|
assert!(env.unifier.unioned(fields.get(&"a".into()).unwrap().0, int));
|
||||||
|
assert!(env.unifier.unioned(fields.get(&"rec".into()).unwrap().0, instantiated));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|
|
@ -363,12 +363,11 @@ fn main() {
|
||||||
.unwrap_or_else(|_| panic!("cannot find run() entry point"))
|
.unwrap_or_else(|_| panic!("cannot find run() entry point"))
|
||||||
.0]
|
.0]
|
||||||
.write();
|
.write();
|
||||||
if let TopLevelDef::Function { instance_to_stmt, instance_to_symbol, .. } = &mut *instance {
|
let TopLevelDef::Function { instance_to_stmt, instance_to_symbol, .. } = &mut *instance else {
|
||||||
instance_to_symbol.insert(String::new(), "run".to_string());
|
|
||||||
instance_to_stmt[""].clone()
|
|
||||||
} else {
|
|
||||||
unreachable!()
|
unreachable!()
|
||||||
}
|
};
|
||||||
|
instance_to_symbol.insert(String::new(), "run".to_string());
|
||||||
|
instance_to_stmt[""].clone()
|
||||||
};
|
};
|
||||||
|
|
||||||
let llvm_options = CodeGenLLVMOptions {
|
let llvm_options = CodeGenLLVMOptions {
|
||||||
|
|
Loading…
Reference in New Issue