TypeEnum::TObj.param is now RefCell for interior mutability
This commit is contained in:
parent
824a5cb01a
commit
3a93e2b048
@ -19,7 +19,7 @@ impl<'ctx> CodeGenContext<'ctx> {
|
|||||||
let mut vars = obj
|
let mut vars = obj
|
||||||
.map(|ty| {
|
.map(|ty| {
|
||||||
if let TypeEnum::TObj { params, .. } = &*self.unifier.get_ty(ty) {
|
if let TypeEnum::TObj { params, .. } = &*self.unifier.get_ty(ty) {
|
||||||
params.clone()
|
params.borrow().clone()
|
||||||
} else {
|
} else {
|
||||||
unreachable!()
|
unreachable!()
|
||||||
}
|
}
|
||||||
|
@ -143,8 +143,7 @@ impl dyn SymbolResolver {
|
|||||||
let ty = unifier.subst(*ty, &subst).unwrap_or(*ty);
|
let ty = unifier.subst(*ty, &subst).unwrap_or(*ty);
|
||||||
(attr.clone(), ty)
|
(attr.clone(), ty)
|
||||||
}));
|
}));
|
||||||
let fields = RefCell::new(fields);
|
Ok(unifier.add_ty(TypeEnum::TObj { obj_id, fields: fields.into(), params: subst.into() }))
|
||||||
Ok(unifier.add_ty(TypeEnum::TObj { obj_id, fields, params: subst }))
|
|
||||||
} else {
|
} else {
|
||||||
Err("Cannot use function name as type".into())
|
Err("Cannot use function name as type".into())
|
||||||
}
|
}
|
||||||
|
@ -57,14 +57,8 @@ pub struct TopLevelContext {
|
|||||||
pub conetexts: Arc<RwLock<Vec<Mutex<Context>>>>,
|
pub conetexts: Arc<RwLock<Vec<Mutex<Context>>>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn name_mangling(mut class_name: String, method_name: &str) -> String {
|
// like adding some info on top of the TopLevelDef for
|
||||||
// need to further extend to more name mangling like instantiations of typevar
|
// later parsing the class bases, method, and function sigatures
|
||||||
class_name.push_str(method_name);
|
|
||||||
class_name
|
|
||||||
}
|
|
||||||
|
|
||||||
// like adding some info on top of the TopLevelDef for later parsing the class bases, method,
|
|
||||||
// and function sigatures
|
|
||||||
pub struct TopLevelDefInfo {
|
pub struct TopLevelDefInfo {
|
||||||
// the definition entry
|
// the definition entry
|
||||||
def: TopLevelDef,
|
def: TopLevelDef,
|
||||||
@ -82,37 +76,42 @@ pub struct TopLevelComposer {
|
|||||||
pub primitives: PrimitiveStore,
|
pub primitives: PrimitiveStore,
|
||||||
// start as a primitive unifier, will add more top_level defs inside
|
// start as a primitive unifier, will add more top_level defs inside
|
||||||
pub unifier: Unifier,
|
pub unifier: Unifier,
|
||||||
// class method to definition id
|
// mangled class method name to def_id
|
||||||
pub class_method_to_def_id: HashMap<String, DefinitionId>,
|
pub class_method_to_def_id: HashMap<String, DefinitionId>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TopLevelComposer {
|
impl TopLevelComposer {
|
||||||
|
fn name_mangling(mut class_name: String, method_name: &str) -> String {
|
||||||
|
class_name.push_str(method_name);
|
||||||
|
class_name
|
||||||
|
}
|
||||||
|
|
||||||
pub fn make_primitives() -> (PrimitiveStore, Unifier) {
|
pub fn make_primitives() -> (PrimitiveStore, Unifier) {
|
||||||
let mut unifier = Unifier::new();
|
let mut unifier = Unifier::new();
|
||||||
let int32 = unifier.add_ty(TypeEnum::TObj {
|
let int32 = unifier.add_ty(TypeEnum::TObj {
|
||||||
obj_id: DefinitionId(0),
|
obj_id: DefinitionId(0),
|
||||||
fields: HashMap::new().into(),
|
fields: HashMap::new().into(),
|
||||||
params: HashMap::new(),
|
params: HashMap::new().into(),
|
||||||
});
|
});
|
||||||
let int64 = unifier.add_ty(TypeEnum::TObj {
|
let int64 = unifier.add_ty(TypeEnum::TObj {
|
||||||
obj_id: DefinitionId(1),
|
obj_id: DefinitionId(1),
|
||||||
fields: HashMap::new().into(),
|
fields: HashMap::new().into(),
|
||||||
params: HashMap::new(),
|
params: HashMap::new().into(),
|
||||||
});
|
});
|
||||||
let float = unifier.add_ty(TypeEnum::TObj {
|
let float = unifier.add_ty(TypeEnum::TObj {
|
||||||
obj_id: DefinitionId(2),
|
obj_id: DefinitionId(2),
|
||||||
fields: HashMap::new().into(),
|
fields: HashMap::new().into(),
|
||||||
params: HashMap::new(),
|
params: HashMap::new().into(),
|
||||||
});
|
});
|
||||||
let bool = unifier.add_ty(TypeEnum::TObj {
|
let bool = unifier.add_ty(TypeEnum::TObj {
|
||||||
obj_id: DefinitionId(3),
|
obj_id: DefinitionId(3),
|
||||||
fields: HashMap::new().into(),
|
fields: HashMap::new().into(),
|
||||||
params: HashMap::new(),
|
params: HashMap::new().into(),
|
||||||
});
|
});
|
||||||
let none = unifier.add_ty(TypeEnum::TObj {
|
let none = unifier.add_ty(TypeEnum::TObj {
|
||||||
obj_id: DefinitionId(4),
|
obj_id: DefinitionId(4),
|
||||||
fields: HashMap::new().into(),
|
fields: HashMap::new().into(),
|
||||||
params: HashMap::new(),
|
params: HashMap::new().into(),
|
||||||
});
|
});
|
||||||
let primitives = PrimitiveStore { int32, int64, float, bool, none };
|
let primitives = PrimitiveStore { int32, int64, float, bool, none };
|
||||||
crate::typecheck::magic_methods::set_primitives_magic_methods(&primitives, &mut unifier);
|
crate::typecheck::magic_methods::set_primitives_magic_methods(&primitives, &mut unifier);
|
||||||
@ -213,13 +212,21 @@ impl TopLevelComposer {
|
|||||||
params: Default::default(),
|
params: Default::default(),
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// add the class to the definition list
|
||||||
|
def_list.push(TopLevelDefInfo {
|
||||||
|
def: Self::make_top_level_class_def(class_def_id, resolver.clone()),
|
||||||
|
// NOTE: Temporarily none here since function body need to be read later
|
||||||
|
ast: None,
|
||||||
|
ty,
|
||||||
|
});
|
||||||
|
|
||||||
// parse class def body and register class methods into the def list
|
// parse class def body and register class methods into the def list
|
||||||
// module's symbol resolver would not know the name of the class methods,
|
// module's symbol resolver would not know the name of the class methods,
|
||||||
// thus cannot return their definition_id? so we have to manage it ourselves
|
// thus cannot return their definition_id? so we have to manage it ourselves
|
||||||
// by using the field `class_method_to_def_id`
|
// by using the field `class_method_to_def_id`
|
||||||
for b in body {
|
for b in body {
|
||||||
if let ast::StmtKind::FunctionDef { name, .. } = &b.node {
|
if let ast::StmtKind::FunctionDef { name, .. } = &b.node {
|
||||||
let fun_name = name_mangling(class_name.clone(), name);
|
let fun_name = Self::name_mangling(class_name.clone(), name);
|
||||||
let def_id = def_list.len();
|
let def_id = def_list.len();
|
||||||
|
|
||||||
// add to unifier
|
// add to unifier
|
||||||
@ -240,19 +247,14 @@ impl TopLevelComposer {
|
|||||||
});
|
});
|
||||||
|
|
||||||
// class method, do not let the symbol manager manage it, use our own map
|
// class method, do not let the symbol manager manage it, use our own map
|
||||||
// FIXME: maybe not do name magling, use map to map instead
|
self.class_method_to_def_id.insert(fun_name, DefinitionId(def_id));
|
||||||
self.class_method_to_def_id.insert(
|
|
||||||
fun_name,
|
|
||||||
DefinitionId(def_id)
|
|
||||||
);
|
|
||||||
|
|
||||||
// if it is the contructor, special handling is needed. In the above
|
// if it is the contructor, special handling is needed. In the above
|
||||||
// handling, we still add __init__ function to the class method
|
// handling, we still add __init__ function to the class method
|
||||||
if name == "__init__" {
|
if name == "__init__" {
|
||||||
|
// FIXME: how can this later be fetched?
|
||||||
def_list.push(TopLevelDefInfo {
|
def_list.push(TopLevelDefInfo {
|
||||||
def: TopLevelDef::Initializer {
|
def: TopLevelDef::Initializer { class_id: DefinitionId(class_def_id) },
|
||||||
class_id: DefinitionId(class_def_id),
|
|
||||||
},
|
|
||||||
// arbitary picked one for the constructor
|
// arbitary picked one for the constructor
|
||||||
ty: self.primitives.none,
|
ty: self.primitives.none,
|
||||||
// it is inside the class def body statments, so None
|
// it is inside the class def body statments, so None
|
||||||
@ -262,13 +264,10 @@ impl TopLevelComposer {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// add the class to the definition list
|
// move the ast to the entry of the class in the def_list
|
||||||
def_list.push(TopLevelDefInfo {
|
def_list.get_mut(class_def_id).unwrap().ast = Some(ast);
|
||||||
def: Self::make_top_level_class_def(class_def_id, resolver),
|
|
||||||
ast: Some(ast),
|
|
||||||
ty,
|
|
||||||
});
|
|
||||||
|
|
||||||
|
// return
|
||||||
Ok((class_name, DefinitionId(class_def_id), ty))
|
Ok((class_name, DefinitionId(class_def_id), ty))
|
||||||
},
|
},
|
||||||
|
|
||||||
@ -313,28 +312,28 @@ impl TopLevelComposer {
|
|||||||
..
|
..
|
||||||
} => {
|
} => {
|
||||||
// get the mutable reference of the entry in the definition list, get the `TopLevelDef`
|
// get the mutable reference of the entry in the definition list, get the `TopLevelDef`
|
||||||
let (_,
|
let (
|
||||||
ancestors,
|
ancestors,
|
||||||
fields,
|
fields,
|
||||||
methods,
|
methods,
|
||||||
type_vars,
|
type_vars,
|
||||||
resolver,
|
resolver,
|
||||||
) = if let TopLevelDef::Class {
|
) = if let TopLevelDef::Class {
|
||||||
object_id,
|
object_id: _,
|
||||||
ancestors,
|
ancestors,
|
||||||
fields,
|
fields,
|
||||||
methods,
|
methods,
|
||||||
type_vars,
|
type_vars,
|
||||||
resolver: Some(resolver)
|
resolver: Some(resolver)
|
||||||
} = &mut d.def {
|
} = &mut d.def {
|
||||||
(object_id, ancestors, fields, methods, type_vars, resolver.lock())
|
(ancestors, fields, methods, type_vars, resolver.lock())
|
||||||
} else { unreachable!() };
|
} else { unreachable!() };
|
||||||
|
|
||||||
// try to get mutable reference of the entry in the unification table, get the `TypeEnum`
|
// try to get mutable reference of the entry in the unification table, get the `TypeEnum`
|
||||||
let (params,
|
let (params,
|
||||||
fields
|
fields
|
||||||
) = if let TypeEnum::TObj {
|
) = if let TypeEnum::TObj {
|
||||||
// FIXME: this params is immutable, even if this is mutable, what
|
// FIXME: this params is immutable, and what
|
||||||
// should the key be, get the original typevar's var_id?
|
// should the key be, get the original typevar's var_id?
|
||||||
params,
|
params,
|
||||||
fields,
|
fields,
|
||||||
|
@ -51,27 +51,27 @@ impl TestEnvironment {
|
|||||||
let int32 = unifier.add_ty(TypeEnum::TObj {
|
let int32 = unifier.add_ty(TypeEnum::TObj {
|
||||||
obj_id: DefinitionId(0),
|
obj_id: DefinitionId(0),
|
||||||
fields: HashMap::new().into(),
|
fields: HashMap::new().into(),
|
||||||
params: HashMap::new(),
|
params: HashMap::new().into(),
|
||||||
});
|
});
|
||||||
let int64 = unifier.add_ty(TypeEnum::TObj {
|
let int64 = unifier.add_ty(TypeEnum::TObj {
|
||||||
obj_id: DefinitionId(1),
|
obj_id: DefinitionId(1),
|
||||||
fields: HashMap::new().into(),
|
fields: HashMap::new().into(),
|
||||||
params: HashMap::new(),
|
params: HashMap::new().into(),
|
||||||
});
|
});
|
||||||
let float = unifier.add_ty(TypeEnum::TObj {
|
let float = unifier.add_ty(TypeEnum::TObj {
|
||||||
obj_id: DefinitionId(2),
|
obj_id: DefinitionId(2),
|
||||||
fields: HashMap::new().into(),
|
fields: HashMap::new().into(),
|
||||||
params: HashMap::new(),
|
params: HashMap::new().into(),
|
||||||
});
|
});
|
||||||
let bool = unifier.add_ty(TypeEnum::TObj {
|
let bool = unifier.add_ty(TypeEnum::TObj {
|
||||||
obj_id: DefinitionId(3),
|
obj_id: DefinitionId(3),
|
||||||
fields: HashMap::new().into(),
|
fields: HashMap::new().into(),
|
||||||
params: HashMap::new(),
|
params: HashMap::new().into(),
|
||||||
});
|
});
|
||||||
let none = unifier.add_ty(TypeEnum::TObj {
|
let none = unifier.add_ty(TypeEnum::TObj {
|
||||||
obj_id: DefinitionId(4),
|
obj_id: DefinitionId(4),
|
||||||
fields: HashMap::new().into(),
|
fields: HashMap::new().into(),
|
||||||
params: HashMap::new(),
|
params: HashMap::new().into(),
|
||||||
});
|
});
|
||||||
let primitives = PrimitiveStore { int32, int64, float, bool, none };
|
let primitives = PrimitiveStore { int32, int64, float, bool, none };
|
||||||
set_primitives_magic_methods(&primitives, &mut unifier);
|
set_primitives_magic_methods(&primitives, &mut unifier);
|
||||||
@ -123,27 +123,27 @@ impl TestEnvironment {
|
|||||||
let int32 = unifier.add_ty(TypeEnum::TObj {
|
let int32 = unifier.add_ty(TypeEnum::TObj {
|
||||||
obj_id: DefinitionId(0),
|
obj_id: DefinitionId(0),
|
||||||
fields: HashMap::new().into(),
|
fields: HashMap::new().into(),
|
||||||
params: HashMap::new(),
|
params: HashMap::new().into(),
|
||||||
});
|
});
|
||||||
let int64 = unifier.add_ty(TypeEnum::TObj {
|
let int64 = unifier.add_ty(TypeEnum::TObj {
|
||||||
obj_id: DefinitionId(1),
|
obj_id: DefinitionId(1),
|
||||||
fields: HashMap::new().into(),
|
fields: HashMap::new().into(),
|
||||||
params: HashMap::new(),
|
params: HashMap::new().into(),
|
||||||
});
|
});
|
||||||
let float = unifier.add_ty(TypeEnum::TObj {
|
let float = unifier.add_ty(TypeEnum::TObj {
|
||||||
obj_id: DefinitionId(2),
|
obj_id: DefinitionId(2),
|
||||||
fields: HashMap::new().into(),
|
fields: HashMap::new().into(),
|
||||||
params: HashMap::new(),
|
params: HashMap::new().into(),
|
||||||
});
|
});
|
||||||
let bool = unifier.add_ty(TypeEnum::TObj {
|
let bool = unifier.add_ty(TypeEnum::TObj {
|
||||||
obj_id: DefinitionId(3),
|
obj_id: DefinitionId(3),
|
||||||
fields: HashMap::new().into(),
|
fields: HashMap::new().into(),
|
||||||
params: HashMap::new(),
|
params: HashMap::new().into(),
|
||||||
});
|
});
|
||||||
let none = unifier.add_ty(TypeEnum::TObj {
|
let none = unifier.add_ty(TypeEnum::TObj {
|
||||||
obj_id: DefinitionId(4),
|
obj_id: DefinitionId(4),
|
||||||
fields: HashMap::new().into(),
|
fields: HashMap::new().into(),
|
||||||
params: HashMap::new(),
|
params: HashMap::new().into(),
|
||||||
});
|
});
|
||||||
identifier_mapping.insert("None".into(), none);
|
identifier_mapping.insert("None".into(), none);
|
||||||
for i in 0..5 {
|
for i in 0..5 {
|
||||||
@ -164,7 +164,7 @@ impl TestEnvironment {
|
|||||||
let foo_ty = unifier.add_ty(TypeEnum::TObj {
|
let foo_ty = unifier.add_ty(TypeEnum::TObj {
|
||||||
obj_id: DefinitionId(5),
|
obj_id: DefinitionId(5),
|
||||||
fields: [("a".into(), v0)].iter().cloned().collect::<HashMap<_, _>>().into(),
|
fields: [("a".into(), v0)].iter().cloned().collect::<HashMap<_, _>>().into(),
|
||||||
params: [(id, v0)].iter().cloned().collect(),
|
params: [(id, v0)].iter().cloned().collect::<HashMap<_, _>>().into(),
|
||||||
});
|
});
|
||||||
top_level_defs.push(RwLock::new(TopLevelDef::Class {
|
top_level_defs.push(RwLock::new(TopLevelDef::Class {
|
||||||
object_id: DefinitionId(5),
|
object_id: DefinitionId(5),
|
||||||
|
@ -68,7 +68,7 @@ pub enum TypeEnum {
|
|||||||
TObj {
|
TObj {
|
||||||
obj_id: DefinitionId,
|
obj_id: DefinitionId,
|
||||||
fields: RefCell<Mapping<String>>,
|
fields: RefCell<Mapping<String>>,
|
||||||
params: VarMap,
|
params: RefCell<VarMap>,
|
||||||
},
|
},
|
||||||
TVirtual {
|
TVirtual {
|
||||||
ty: Type,
|
ty: Type,
|
||||||
@ -216,6 +216,7 @@ impl Unifier {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
TypeEnum::TObj { params, .. } => {
|
TypeEnum::TObj { params, .. } => {
|
||||||
|
let params = params.borrow();
|
||||||
let (keys, params): (Vec<&u32>, Vec<&Type>) = params.iter().unzip();
|
let (keys, params): (Vec<&u32>, Vec<&Type>) = params.iter().unzip();
|
||||||
let params = params
|
let params = params
|
||||||
.into_iter()
|
.into_iter()
|
||||||
@ -253,7 +254,7 @@ impl Unifier {
|
|||||||
TList { ty } => self.is_concrete(*ty, allowed_typevars),
|
TList { ty } => self.is_concrete(*ty, allowed_typevars),
|
||||||
TTuple { ty } => ty.iter().all(|ty| self.is_concrete(*ty, allowed_typevars)),
|
TTuple { ty } => ty.iter().all(|ty| self.is_concrete(*ty, allowed_typevars)),
|
||||||
TObj { params: vars, .. } => {
|
TObj { params: vars, .. } => {
|
||||||
vars.values().all(|ty| self.is_concrete(*ty, allowed_typevars))
|
vars.borrow().values().all(|ty| self.is_concrete(*ty, allowed_typevars))
|
||||||
}
|
}
|
||||||
// functions are instantiated for each call sites, so the function type can contain
|
// functions are instantiated for each call sites, so the function type can contain
|
||||||
// type variables.
|
// type variables.
|
||||||
@ -437,7 +438,7 @@ impl Unifier {
|
|||||||
if id1 != id2 {
|
if id1 != id2 {
|
||||||
return Err(format!("Cannot unify objects with ID {} and {}", id1.0, id2.0));
|
return Err(format!("Cannot unify objects with ID {} and {}", id1.0, id2.0));
|
||||||
}
|
}
|
||||||
for (x, y) in zip(params1.values(), params2.values()) {
|
for (x, y) in zip(params1.borrow().values(), params2.borrow().values()) {
|
||||||
self.unify(*x, *y)?;
|
self.unify(*x, *y)?;
|
||||||
}
|
}
|
||||||
self.set_a_to_b(a, b);
|
self.set_a_to_b(a, b);
|
||||||
@ -573,6 +574,7 @@ impl Unifier {
|
|||||||
}
|
}
|
||||||
TypeEnum::TObj { obj_id, params, .. } => {
|
TypeEnum::TObj { obj_id, params, .. } => {
|
||||||
let name = obj_to_name(obj_id.0);
|
let name = obj_to_name(obj_id.0);
|
||||||
|
let params = params.borrow();
|
||||||
if !params.is_empty() {
|
if !params.is_empty() {
|
||||||
let mut params =
|
let mut params =
|
||||||
params.values().map(|v| self.stringify(*v, obj_to_name, var_to_name));
|
params.values().map(|v| self.stringify(*v, obj_to_name, var_to_name));
|
||||||
@ -679,6 +681,7 @@ impl Unifier {
|
|||||||
// If the mapping does not contain any type variables in the
|
// If the mapping does not contain any type variables in the
|
||||||
// parameter list, we don't need to substitute the fields.
|
// parameter list, we don't need to substitute the fields.
|
||||||
// This is also used to prevent infinite substitution...
|
// This is also used to prevent infinite substitution...
|
||||||
|
let params = params.borrow();
|
||||||
let need_subst = params.values().any(|v| {
|
let need_subst = params.values().any(|v| {
|
||||||
let ty = self.unification_table.probe_value(*v);
|
let ty = self.unification_table.probe_value(*v);
|
||||||
if let TypeEnum::TVar { id, .. } = ty.as_ref() {
|
if let TypeEnum::TVar { id, .. } = ty.as_ref() {
|
||||||
@ -693,7 +696,7 @@ impl Unifier {
|
|||||||
let fields = self
|
let fields = self
|
||||||
.subst_map(&fields.borrow(), mapping)
|
.subst_map(&fields.borrow(), mapping)
|
||||||
.unwrap_or_else(|| fields.borrow().clone());
|
.unwrap_or_else(|| fields.borrow().clone());
|
||||||
Some(self.add_ty(TypeEnum::TObj { obj_id, params, fields: fields.into() }))
|
Some(self.add_ty(TypeEnum::TObj { obj_id, params: params.into(), fields: fields.into() }))
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
@ -776,7 +779,7 @@ impl Unifier {
|
|||||||
self.occur_check(a, *ty)?;
|
self.occur_check(a, *ty)?;
|
||||||
}
|
}
|
||||||
TypeEnum::TObj { params: map, .. } => {
|
TypeEnum::TObj { params: map, .. } => {
|
||||||
for t in map.values() {
|
for t in map.borrow().values() {
|
||||||
self.occur_check(a, *t)?;
|
self.occur_check(a, *t)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -43,7 +43,7 @@ impl Unifier {
|
|||||||
(
|
(
|
||||||
TypeEnum::TObj { obj_id: id1, params: params1, .. },
|
TypeEnum::TObj { obj_id: id1, params: params1, .. },
|
||||||
TypeEnum::TObj { obj_id: id2, params: params2, .. },
|
TypeEnum::TObj { obj_id: id2, params: params2, .. },
|
||||||
) => id1 == id2 && self.map_eq(params1, params2),
|
) => id1 == id2 && self.map_eq(¶ms1.borrow(), ¶ms2.borrow()),
|
||||||
// TCall and TFunc are not yet implemented
|
// TCall and TFunc are not yet implemented
|
||||||
_ => false,
|
_ => false,
|
||||||
}
|
}
|
||||||
@ -80,7 +80,7 @@ impl TestEnvironment {
|
|||||||
unifier.add_ty(TypeEnum::TObj {
|
unifier.add_ty(TypeEnum::TObj {
|
||||||
obj_id: DefinitionId(0),
|
obj_id: DefinitionId(0),
|
||||||
fields: HashMap::new().into(),
|
fields: HashMap::new().into(),
|
||||||
params: HashMap::new(),
|
params: HashMap::new().into(),
|
||||||
}),
|
}),
|
||||||
);
|
);
|
||||||
type_mapping.insert(
|
type_mapping.insert(
|
||||||
@ -88,7 +88,7 @@ impl TestEnvironment {
|
|||||||
unifier.add_ty(TypeEnum::TObj {
|
unifier.add_ty(TypeEnum::TObj {
|
||||||
obj_id: DefinitionId(1),
|
obj_id: DefinitionId(1),
|
||||||
fields: HashMap::new().into(),
|
fields: HashMap::new().into(),
|
||||||
params: HashMap::new(),
|
params: HashMap::new().into(),
|
||||||
}),
|
}),
|
||||||
);
|
);
|
||||||
type_mapping.insert(
|
type_mapping.insert(
|
||||||
@ -96,7 +96,7 @@ impl TestEnvironment {
|
|||||||
unifier.add_ty(TypeEnum::TObj {
|
unifier.add_ty(TypeEnum::TObj {
|
||||||
obj_id: DefinitionId(2),
|
obj_id: DefinitionId(2),
|
||||||
fields: HashMap::new().into(),
|
fields: HashMap::new().into(),
|
||||||
params: HashMap::new(),
|
params: HashMap::new().into(),
|
||||||
}),
|
}),
|
||||||
);
|
);
|
||||||
let (v0, id) = unifier.get_fresh_var();
|
let (v0, id) = unifier.get_fresh_var();
|
||||||
@ -105,7 +105,7 @@ impl TestEnvironment {
|
|||||||
unifier.add_ty(TypeEnum::TObj {
|
unifier.add_ty(TypeEnum::TObj {
|
||||||
obj_id: DefinitionId(3),
|
obj_id: DefinitionId(3),
|
||||||
fields: [("a".into(), v0)].iter().cloned().collect::<HashMap<_, _>>().into(),
|
fields: [("a".into(), v0)].iter().cloned().collect::<HashMap<_, _>>().into(),
|
||||||
params: [(id, v0)].iter().cloned().collect(),
|
params: [(id, v0)].iter().cloned().collect::<HashMap<_, _>>().into(),
|
||||||
}),
|
}),
|
||||||
);
|
);
|
||||||
|
|
||||||
@ -164,6 +164,7 @@ impl TestEnvironment {
|
|||||||
let mut ty = *self.type_mapping.get(x).unwrap();
|
let mut ty = *self.type_mapping.get(x).unwrap();
|
||||||
let te = self.unifier.get_ty(ty);
|
let te = self.unifier.get_ty(ty);
|
||||||
if let TypeEnum::TObj { params, .. } = &*te.as_ref() {
|
if let TypeEnum::TObj { params, .. } = &*te.as_ref() {
|
||||||
|
let params = params.borrow();
|
||||||
if !params.is_empty() {
|
if !params.is_empty() {
|
||||||
assert!(&s[0..1] == "[");
|
assert!(&s[0..1] == "[");
|
||||||
let mut p = Vec::new();
|
let mut p = Vec::new();
|
||||||
@ -340,7 +341,7 @@ fn test_virtual() {
|
|||||||
.cloned()
|
.cloned()
|
||||||
.collect::<HashMap<_, _>>()
|
.collect::<HashMap<_, _>>()
|
||||||
.into(),
|
.into(),
|
||||||
params: HashMap::new(),
|
params: HashMap::new().into(),
|
||||||
});
|
});
|
||||||
let v0 = env.unifier.get_fresh_var().0;
|
let v0 = env.unifier.get_fresh_var().0;
|
||||||
let v1 = env.unifier.get_fresh_var().0;
|
let v1 = env.unifier.get_fresh_var().0;
|
||||||
|
Loading…
Reference in New Issue
Block a user