change the parse type annotation parameter type, refactoring top level
This commit is contained in:
parent
f5b8b58826
commit
40b062ce0f
@ -56,7 +56,7 @@ impl<'ctx, 'a> CodeGenContext<'ctx, 'a> {
|
|||||||
// we cannot have other types, virtual type should be handled by function calls
|
// we cannot have other types, virtual type should be handled by function calls
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
};
|
};
|
||||||
let def = &self.top_level.definitions.read()[obj_id.0];
|
let def = &self.top_level.definitions[obj_id.0];
|
||||||
let index = if let TopLevelDef::Class { fields, .. } = &*def.read() {
|
let index = if let TopLevelDef::Class { fields, .. } = &*def.read() {
|
||||||
fields.iter().find_position(|x| x.0 == attr).unwrap().0
|
fields.iter().find_position(|x| x.0 == attr).unwrap().0
|
||||||
} else {
|
} else {
|
||||||
@ -104,8 +104,7 @@ impl<'ctx, 'a> CodeGenContext<'ctx, 'a> {
|
|||||||
ret: Type,
|
ret: Type,
|
||||||
) -> Option<BasicValueEnum<'ctx>> {
|
) -> Option<BasicValueEnum<'ctx>> {
|
||||||
let key = self.get_subst_key(obj.map(|(a, _)| a), fun.0);
|
let key = self.get_subst_key(obj.map(|(a, _)| a), fun.0);
|
||||||
let defs = self.top_level.definitions.read();
|
let definition = self.top_level.definitions.get(fun.1 .0).unwrap();
|
||||||
let definition = defs.get(fun.1 .0).unwrap();
|
|
||||||
let val = if let TopLevelDef::Function { instance_to_symbol, .. } = &*definition.read() {
|
let val = if let TopLevelDef::Function { instance_to_symbol, .. } = &*definition.read() {
|
||||||
let symbol = instance_to_symbol.get(&key).unwrap_or_else(|| {
|
let symbol = instance_to_symbol.get(&key).unwrap_or_else(|| {
|
||||||
// TODO: codegen for function that are not yet generated
|
// TODO: codegen for function that are not yet generated
|
||||||
|
@ -206,8 +206,7 @@ fn get_llvm_type<'ctx>(
|
|||||||
match &*unifier.get_ty(ty) {
|
match &*unifier.get_ty(ty) {
|
||||||
TObj { obj_id, fields, .. } => {
|
TObj { obj_id, fields, .. } => {
|
||||||
// a struct with fields in the order of declaration
|
// a struct with fields in the order of declaration
|
||||||
let defs = top_level.definitions.read();
|
let definition = top_level.definitions.get(obj_id.0).unwrap();
|
||||||
let definition = defs.get(obj_id.0).unwrap();
|
|
||||||
let ty = if let TopLevelDef::Class { fields: fields_list, .. } = &*definition.read()
|
let ty = if let TopLevelDef::Class { fields: fields_list, .. } = &*definition.read()
|
||||||
{
|
{
|
||||||
let fields = fields.borrow();
|
let fields = fields.borrow();
|
||||||
|
@ -1,13 +1,14 @@
|
|||||||
use std::cell::RefCell;
|
use std::{cell::RefCell, sync::Arc};
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
use crate::top_level::{DefinitionId, TopLevelContext, TopLevelDef};
|
use crate::top_level::{DefinitionId, TopLevelDef};
|
||||||
use crate::typecheck::{
|
use crate::typecheck::{
|
||||||
type_inferencer::PrimitiveStore,
|
type_inferencer::PrimitiveStore,
|
||||||
typedef::{Type, Unifier},
|
typedef::{Type, Unifier},
|
||||||
};
|
};
|
||||||
use crate::{location::Location, typecheck::typedef::TypeEnum};
|
use crate::{location::Location, typecheck::typedef::TypeEnum};
|
||||||
use itertools::{chain, izip};
|
use itertools::{chain, izip};
|
||||||
|
use parking_lot::RwLock;
|
||||||
use rustpython_parser::ast::Expr;
|
use rustpython_parser::ast::Expr;
|
||||||
|
|
||||||
#[derive(Clone, PartialEq)]
|
#[derive(Clone, PartialEq)]
|
||||||
@ -39,7 +40,7 @@ pub trait SymbolResolver {
|
|||||||
// convert type annotation into type
|
// convert type annotation into type
|
||||||
pub fn parse_type_annotation<T>(
|
pub fn parse_type_annotation<T>(
|
||||||
resolver: &dyn SymbolResolver,
|
resolver: &dyn SymbolResolver,
|
||||||
top_level: &TopLevelContext,
|
top_level_defs: &[Arc<RwLock<TopLevelDef>>],
|
||||||
unifier: &mut Unifier,
|
unifier: &mut Unifier,
|
||||||
primitives: &PrimitiveStore,
|
primitives: &PrimitiveStore,
|
||||||
expr: &Expr<T>,
|
expr: &Expr<T>,
|
||||||
@ -55,8 +56,7 @@ pub fn parse_type_annotation<T>(
|
|||||||
x => {
|
x => {
|
||||||
let obj_id = resolver.get_identifier_def(x);
|
let obj_id = resolver.get_identifier_def(x);
|
||||||
if let Some(obj_id) = obj_id {
|
if let Some(obj_id) = obj_id {
|
||||||
let defs = top_level.definitions.read();
|
let def = top_level_defs[obj_id.0].read();
|
||||||
let def = defs[obj_id.0].read();
|
|
||||||
if let TopLevelDef::Class { fields, methods, type_vars, .. } = &*def {
|
if let TopLevelDef::Class { fields, methods, type_vars, .. } = &*def {
|
||||||
if !type_vars.is_empty() {
|
if !type_vars.is_empty() {
|
||||||
return Err(format!(
|
return Err(format!(
|
||||||
@ -96,26 +96,25 @@ pub fn parse_type_annotation<T>(
|
|||||||
if let Name { id, .. } = &value.node {
|
if let Name { id, .. } = &value.node {
|
||||||
if id == "virtual" {
|
if id == "virtual" {
|
||||||
let ty =
|
let ty =
|
||||||
parse_type_annotation(resolver, top_level, unifier, primitives, slice)?;
|
parse_type_annotation(resolver, top_level_defs, unifier, primitives, slice)?;
|
||||||
Ok(unifier.add_ty(TypeEnum::TVirtual { ty }))
|
Ok(unifier.add_ty(TypeEnum::TVirtual { ty }))
|
||||||
} else {
|
} else {
|
||||||
let types = if let Tuple { elts, .. } = &slice.node {
|
let types = if let Tuple { elts, .. } = &slice.node {
|
||||||
elts.iter()
|
elts.iter()
|
||||||
.map(|v| {
|
.map(|v| {
|
||||||
parse_type_annotation(resolver, top_level, unifier, primitives, v)
|
parse_type_annotation(resolver, top_level_defs, unifier, primitives, v)
|
||||||
})
|
})
|
||||||
.collect::<Result<Vec<_>, _>>()?
|
.collect::<Result<Vec<_>, _>>()?
|
||||||
} else {
|
} else {
|
||||||
vec![parse_type_annotation(
|
vec![parse_type_annotation(
|
||||||
resolver, top_level, unifier, primitives, slice,
|
resolver, top_level_defs, unifier, primitives, slice,
|
||||||
)?]
|
)?]
|
||||||
};
|
};
|
||||||
|
|
||||||
let obj_id = resolver
|
let obj_id = resolver
|
||||||
.get_identifier_def(id)
|
.get_identifier_def(id)
|
||||||
.ok_or_else(|| format!("Unknown type annotation {}", id))?;
|
.ok_or_else(|| format!("Unknown type annotation {}", id))?;
|
||||||
let defs = top_level.definitions.read();
|
let def = top_level_defs[obj_id.0].read();
|
||||||
let def = defs[obj_id.0].read();
|
|
||||||
if let TopLevelDef::Class { fields, methods, type_vars, .. } = &*def {
|
if let TopLevelDef::Class { fields, methods, type_vars, .. } = &*def {
|
||||||
if types.len() != type_vars.len() {
|
if types.len() != type_vars.len() {
|
||||||
return Err(format!(
|
return Err(format!(
|
||||||
@ -164,11 +163,11 @@ pub fn parse_type_annotation<T>(
|
|||||||
impl dyn SymbolResolver + Send + Sync {
|
impl dyn SymbolResolver + Send + Sync {
|
||||||
pub fn parse_type_annotation<T>(
|
pub fn parse_type_annotation<T>(
|
||||||
&self,
|
&self,
|
||||||
top_level: &TopLevelContext,
|
top_level_defs: &[Arc<RwLock<TopLevelDef>>],
|
||||||
unifier: &mut Unifier,
|
unifier: &mut Unifier,
|
||||||
primitives: &PrimitiveStore,
|
primitives: &PrimitiveStore,
|
||||||
expr: &Expr<T>,
|
expr: &Expr<T>,
|
||||||
) -> Result<Type, String> {
|
) -> Result<Type, String> {
|
||||||
parse_type_annotation(self, top_level, unifier, primitives, expr)
|
parse_type_annotation(self, top_level_defs, unifier, primitives, expr)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -4,9 +4,9 @@ use std::{collections::HashMap, collections::HashSet, sync::Arc};
|
|||||||
|
|
||||||
use super::typecheck::type_inferencer::PrimitiveStore;
|
use super::typecheck::type_inferencer::PrimitiveStore;
|
||||||
use super::typecheck::typedef::{SharedUnifier, Type, TypeEnum, Unifier};
|
use super::typecheck::typedef::{SharedUnifier, Type, TypeEnum, Unifier};
|
||||||
use crate::typecheck::typedef::{FunSignature, FuncArg};
|
use crate::typecheck::{typedef::{FunSignature, FuncArg}};
|
||||||
use crate::{symbol_resolver::SymbolResolver, typecheck::typedef::Mapping};
|
use crate::{symbol_resolver::SymbolResolver, typecheck::typedef::Mapping};
|
||||||
use itertools::Itertools;
|
use itertools::{Itertools, izip};
|
||||||
use parking_lot::{Mutex, RwLock};
|
use parking_lot::{Mutex, RwLock};
|
||||||
use rustpython_parser::ast::{self, Stmt};
|
use rustpython_parser::ast::{self, Stmt};
|
||||||
|
|
||||||
@ -55,50 +55,44 @@ pub enum TopLevelDef {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TopLevelDef {
|
pub struct TopLevelContext {
|
||||||
fn get_function_type(&self) -> Result<Type, String> {
|
pub definitions: Arc<Vec<Arc<RwLock<TopLevelDef>>>>,
|
||||||
if let Self::Function { signature, .. } = self {
|
pub unifiers: Arc<RwLock<Vec<(SharedUnifier, PrimitiveStore)>>>,
|
||||||
Ok(*signature)
|
|
||||||
} else {
|
|
||||||
Err("only expect function def here".into())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct TopLevelContext {
|
impl TopLevelContext {
|
||||||
pub definitions: Arc<RwLock<Vec<Arc<RwLock<TopLevelDef>>>>>,
|
pub fn read_top_level_def_list(&self) -> &[Arc<RwLock<TopLevelDef>>] {
|
||||||
pub unifiers: Arc<RwLock<Vec<(SharedUnifier, PrimitiveStore)>>>,
|
self.definitions.as_slice()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct TopLevelComposer {
|
pub struct TopLevelComposer {
|
||||||
// list of top level definitions, same as top level context
|
// list of top level definitions, same as top level context
|
||||||
pub definition_ast_list: Arc<RwLock<Vec<(Arc<RwLock<TopLevelDef>>, Option<ast::Stmt<()>>)>>>,
|
pub definition_ast_list: Vec<(Arc<RwLock<TopLevelDef>>, Option<ast::Stmt<()>>)>,
|
||||||
// start as a primitive unifier, will add more top_level defs inside
|
// start as a primitive unifier, will add more top_level defs inside
|
||||||
pub unifier: Unifier,
|
pub unifier: Unifier,
|
||||||
// primitive store
|
// primitive store
|
||||||
pub primitives: PrimitiveStore,
|
pub primitives: PrimitiveStore,
|
||||||
// mangled class method name to def_id
|
// mangled class method name to def_id
|
||||||
pub class_method_to_def_id: HashMap<String, DefinitionId>,
|
// pub class_method_to_def_id: HashMap<String, DefinitionId>,
|
||||||
// record the def id of the classes whoses fields and methods are to be analyzed
|
// record the def id of the classes whoses fields and methods are to be analyzed
|
||||||
pub to_be_analyzed_class: Vec<DefinitionId>,
|
// pub to_be_analyzed_class: Vec<DefinitionId>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TopLevelComposer {
|
impl TopLevelComposer {
|
||||||
pub fn to_top_level_context(&self) -> TopLevelContext {
|
pub fn to_top_level_context(self) -> TopLevelContext {
|
||||||
let def_list =
|
|
||||||
self.definition_ast_list.read().iter().map(|(x, _)| x.clone()).collect::<Vec<_>>();
|
|
||||||
TopLevelContext {
|
TopLevelContext {
|
||||||
definitions: RwLock::new(def_list).into(),
|
definitions: self
|
||||||
|
.definition_ast_list
|
||||||
|
.into_iter()
|
||||||
|
.map(|(x, ..)| x)
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.into(),
|
||||||
// FIXME: all the big unifier or?
|
// FIXME: all the big unifier or?
|
||||||
unifiers: Default::default(),
|
unifiers: Default::default(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn name_mangling(mut class_name: String, method_name: &str) -> String {
|
|
||||||
class_name.push_str(method_name);
|
|
||||||
class_name
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn make_primitives() -> (PrimitiveStore, Unifier) {
|
pub fn make_primitives() -> (PrimitiveStore, Unifier) {
|
||||||
let mut unifier = Unifier::new();
|
let mut unifier = Unifier::new();
|
||||||
let int32 = unifier.add_ty(TypeEnum::TObj {
|
let int32 = unifier.add_ty(TypeEnum::TObj {
|
||||||
@ -147,14 +141,11 @@ impl TopLevelComposer {
|
|||||||
let ast_list: Vec<Option<ast::Stmt<()>>> = vec![None, None, None, None, None];
|
let ast_list: Vec<Option<ast::Stmt<()>>> = vec![None, None, None, None, None];
|
||||||
|
|
||||||
let composer = TopLevelComposer {
|
let composer = TopLevelComposer {
|
||||||
definition_ast_list: RwLock::new(
|
definition_ast_list: izip!(top_level_def_list, ast_list).collect_vec(),
|
||||||
top_level_def_list.into_iter().zip(ast_list).collect_vec(),
|
|
||||||
)
|
|
||||||
.into(),
|
|
||||||
primitives: primitives.0,
|
primitives: primitives.0,
|
||||||
unifier: primitives.1,
|
unifier: primitives.1,
|
||||||
class_method_to_def_id: Default::default(),
|
// class_method_to_def_id: Default::default(),
|
||||||
to_be_analyzed_class: Default::default(),
|
// to_be_analyzed_class: Default::default(),
|
||||||
};
|
};
|
||||||
(
|
(
|
||||||
vec![
|
vec![
|
||||||
@ -201,19 +192,35 @@ impl TopLevelComposer {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// fn get_class_method_def_id(class_name: &str, method_name: &str, resolver: &dyn SymbolResolver) -> Result<DefinitionId, String> {
|
||||||
|
// let class_def = resolver.get_identifier_def(class_name).ok_or_else(|| "no such class".to_string())?;
|
||||||
|
|
||||||
|
// }
|
||||||
|
|
||||||
|
fn name_mangling(class_name: String, method_name: &str) -> String {
|
||||||
|
class_name.push_str(method_name);
|
||||||
|
class_name
|
||||||
|
}
|
||||||
|
|
||||||
|
fn extract_def_list(&self) -> Vec<Arc<RwLock<TopLevelDef>>> {
|
||||||
|
self
|
||||||
|
.definition_ast_list
|
||||||
|
.iter()
|
||||||
|
.map(|(def, ..)| def.clone())
|
||||||
|
.collect_vec()
|
||||||
|
}
|
||||||
|
|
||||||
/// step 0, register, just remeber the names of top level classes/function
|
/// step 0, register, just remeber the names of top level classes/function
|
||||||
pub fn register_top_level(
|
pub fn register_top_level(
|
||||||
&mut self,
|
&mut self,
|
||||||
ast: ast::Stmt<()>,
|
ast: ast::Stmt<()>,
|
||||||
resolver: Option<Arc<Mutex<dyn SymbolResolver + Send + Sync>>>,
|
resolver: Option<Arc<Mutex<dyn SymbolResolver + Send + Sync>>>,
|
||||||
) -> Result<(String, DefinitionId), String> {
|
) -> Result<(String, DefinitionId), String> {
|
||||||
let mut def_list = self.definition_ast_list.write();
|
|
||||||
match &ast.node {
|
match &ast.node {
|
||||||
ast::StmtKind::ClassDef { name, body, .. } => {
|
ast::StmtKind::ClassDef { name, body, .. } => {
|
||||||
let class_name = name.to_string();
|
let class_name = name.to_string();
|
||||||
let class_def_id = def_list.len();
|
let class_def_id = self.definition_ast_list.len();
|
||||||
|
|
||||||
// add the class to the definition lists
|
|
||||||
// since later when registering class method, ast will still be used,
|
// since later when registering class method, ast will still be used,
|
||||||
// here push None temporarly, later will move the ast inside
|
// here push None temporarly, later will move the ast inside
|
||||||
let mut class_def_ast = (
|
let mut class_def_ast = (
|
||||||
@ -232,49 +239,60 @@ impl TopLevelComposer {
|
|||||||
String,
|
String,
|
||||||
Arc<RwLock<TopLevelDef>>,
|
Arc<RwLock<TopLevelDef>>,
|
||||||
DefinitionId,
|
DefinitionId,
|
||||||
|
Type
|
||||||
)> = Vec::new();
|
)> = Vec::new();
|
||||||
let mut class_method_index_offset = 0;
|
let mut class_method_index_offset = 0;
|
||||||
for b in body {
|
for b in body {
|
||||||
if let ast::StmtKind::FunctionDef { name: method_name, .. } = &b.node {
|
if let ast::StmtKind::FunctionDef { name: method_name, .. } = &b.node {
|
||||||
let method_name = Self::name_mangling(class_name.clone(), method_name);
|
let method_def_id = self.definition_ast_list.len() + {
|
||||||
let method_def_id = def_list.len() + {
|
|
||||||
class_method_index_offset += 1;
|
class_method_index_offset += 1;
|
||||||
class_method_index_offset
|
class_method_index_offset
|
||||||
};
|
};
|
||||||
|
|
||||||
// dummy method define here
|
// dummy method define here
|
||||||
// the ast of class method is in the class, push None in to the list here
|
let dummy_method_type = self.unifier.get_fresh_var();
|
||||||
class_method_name_def_ids.push((
|
class_method_name_def_ids.push((
|
||||||
method_name.clone(),
|
method_name.clone(),
|
||||||
RwLock::new(Self::make_top_level_function_def(
|
RwLock::new(Self::make_top_level_function_def(
|
||||||
method_name.clone(),
|
Self::name_mangling(class_name, method_name),
|
||||||
self.primitives.none,
|
// later unify with parsed type
|
||||||
|
dummy_method_type.0,
|
||||||
resolver.clone(),
|
resolver.clone(),
|
||||||
))
|
))
|
||||||
.into(),
|
.into(),
|
||||||
DefinitionId(method_def_id),
|
DefinitionId(method_def_id),
|
||||||
|
dummy_method_type.0
|
||||||
));
|
));
|
||||||
|
|
||||||
|
} else {
|
||||||
|
// do nothing
|
||||||
|
continue
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// move the ast to the entry of the class in the ast_list
|
// move the ast to the entry of the class in the ast_list
|
||||||
class_def_ast.1 = Some(ast);
|
class_def_ast.1 = Some(ast);
|
||||||
|
// get the methods into the class_def
|
||||||
|
for (name, _, id, ty) in class_method_name_def_ids {
|
||||||
|
if let TopLevelDef::Class { methods, .. } = class_def_ast.0.get_mut() {
|
||||||
|
methods.push((name, ty, id))
|
||||||
|
} else { unreachable!() }
|
||||||
|
}
|
||||||
// now class_def_ast and class_method_def_ast_ids are ok, put them into actual def list in correct order
|
// now class_def_ast and class_method_def_ast_ids are ok, put them into actual def list in correct order
|
||||||
def_list.push(class_def_ast);
|
self.definition_ast_list.push(class_def_ast);
|
||||||
for (name, def, id) in class_method_name_def_ids {
|
for (_, def, ..) in class_method_name_def_ids {
|
||||||
def_list.push((def, None));
|
self.definition_ast_list.push((def, None));
|
||||||
self.class_method_to_def_id.insert(name, id);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// put the constructor into the def_list
|
// put the constructor into the def_list
|
||||||
def_list.push((
|
self.definition_ast_list.push((
|
||||||
RwLock::new(TopLevelDef::Initializer { class_id: DefinitionId(class_def_id) })
|
RwLock::new(TopLevelDef::Initializer { class_id: DefinitionId(class_def_id) })
|
||||||
.into(),
|
.into(),
|
||||||
None,
|
None,
|
||||||
));
|
));
|
||||||
|
|
||||||
// class, put its def_id into the to be analyzed set
|
// class, put its def_id into the to be analyzed set
|
||||||
self.to_be_analyzed_class.push(DefinitionId(class_def_id));
|
// self.to_be_analyzed_class.push(DefinitionId(class_def_id));
|
||||||
|
|
||||||
Ok((class_name, DefinitionId(class_def_id)))
|
Ok((class_name, DefinitionId(class_def_id)))
|
||||||
}
|
}
|
||||||
@ -283,10 +301,11 @@ impl TopLevelComposer {
|
|||||||
let fun_name = name.to_string();
|
let fun_name = name.to_string();
|
||||||
|
|
||||||
// add to the definition list
|
// add to the definition list
|
||||||
def_list.push((
|
self.definition_ast_list.push((
|
||||||
RwLock::new(Self::make_top_level_function_def(
|
RwLock::new(Self::make_top_level_function_def(
|
||||||
name.into(),
|
name.into(),
|
||||||
self.primitives.none,
|
// unify with correct type later
|
||||||
|
self.unifier.get_fresh_var().0,
|
||||||
resolver,
|
resolver,
|
||||||
))
|
))
|
||||||
.into(),
|
.into(),
|
||||||
@ -294,7 +313,7 @@ impl TopLevelComposer {
|
|||||||
));
|
));
|
||||||
|
|
||||||
// return
|
// return
|
||||||
Ok((fun_name, DefinitionId(def_list.len() - 1)))
|
Ok((fun_name, DefinitionId(self.definition_ast_list.len() - 1)))
|
||||||
}
|
}
|
||||||
|
|
||||||
_ => Err("only registrations of top level classes/functions are supprted".into()),
|
_ => Err("only registrations of top level classes/functions are supprted".into()),
|
||||||
@ -303,12 +322,7 @@ impl TopLevelComposer {
|
|||||||
|
|
||||||
/// step 1, analyze the type vars associated with top level class
|
/// step 1, analyze the type vars associated with top level class
|
||||||
fn analyze_top_level_class_type_var(&mut self) -> Result<(), String> {
|
fn analyze_top_level_class_type_var(&mut self) -> Result<(), String> {
|
||||||
let mut def_list = self.definition_ast_list.write();
|
for (class_def, class_ast) in self.definition_ast_list {
|
||||||
let converted_top_level = &self.to_top_level_context();
|
|
||||||
let primitives = &self.primitives;
|
|
||||||
let unifier = &mut self.unifier;
|
|
||||||
|
|
||||||
for (class_def, class_ast) in def_list.iter_mut() {
|
|
||||||
// only deal with class def here
|
// only deal with class def here
|
||||||
let mut class_def = class_def.write();
|
let mut class_def = class_def.write();
|
||||||
let (class_bases_ast, class_def_type_vars, class_resolver) = {
|
let (class_bases_ast, class_def_type_vars, class_resolver) = {
|
||||||
@ -326,7 +340,7 @@ impl TopLevelComposer {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
let class_resolver = class_resolver.as_ref().unwrap().lock();
|
let class_resolver = class_resolver.as_ref().unwrap().lock();
|
||||||
|
let class_resolver = class_resolver.deref();
|
||||||
let mut is_generic = false;
|
let mut is_generic = false;
|
||||||
for b in class_bases_ast {
|
for b in class_bases_ast {
|
||||||
match &b.node {
|
match &b.node {
|
||||||
@ -335,65 +349,56 @@ impl TopLevelComposer {
|
|||||||
// things like `class A(Generic[T, V, ImportedModule.T])` is not supported
|
// things like `class A(Generic[T, V, ImportedModule.T])` is not supported
|
||||||
// i.e. only simple names are allowed in the subscript
|
// i.e. only simple names are allowed in the subscript
|
||||||
// should update the TopLevelDef::Class.typevars and the TypeEnum::TObj.params
|
// should update the TopLevelDef::Class.typevars and the TypeEnum::TObj.params
|
||||||
ast::ExprKind::Subscript { value, slice, .. } if matches!(&value.node, ast::ExprKind::Name { id, .. } if id == "Generic") =>
|
ast::ExprKind::Subscript { value, slice, .. }
|
||||||
{
|
if {
|
||||||
|
matches!(&value.node, ast::ExprKind::Name { id, .. } if id == "Generic")
|
||||||
|
} => {
|
||||||
if !is_generic {
|
if !is_generic {
|
||||||
is_generic = true;
|
is_generic = true;
|
||||||
} else {
|
} else {
|
||||||
return Err("Only single Generic[...] can be in bases".into());
|
return Err("Only single Generic[...] can be in bases".into());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let type_var_list: Vec<&ast::Expr<()>> = vec![];
|
||||||
// if `class A(Generic[T, V, G])`
|
// if `class A(Generic[T, V, G])`
|
||||||
if let ast::ExprKind::Tuple { elts, .. } = &slice.node {
|
if let ast::ExprKind::Tuple { elts, .. } = &slice.node {
|
||||||
// parse the type vars
|
type_var_list.extend(elts.iter());
|
||||||
let type_vars = elts
|
|
||||||
.iter()
|
|
||||||
.map(|e| {
|
|
||||||
class_resolver.parse_type_annotation(
|
|
||||||
converted_top_level,
|
|
||||||
unifier.borrow_mut(),
|
|
||||||
primitives,
|
|
||||||
e,
|
|
||||||
)
|
|
||||||
})
|
|
||||||
.collect::<Result<Vec<_>, _>>()?;
|
|
||||||
|
|
||||||
// check if all are unique type vars
|
|
||||||
let mut occured_type_var_id: HashSet<u32> = HashSet::new();
|
|
||||||
let all_unique_type_var = type_vars.iter().all(|x| {
|
|
||||||
let ty = unifier.get_ty(*x);
|
|
||||||
if let TypeEnum::TVar { id, .. } = ty.as_ref() {
|
|
||||||
occured_type_var_id.insert(*id)
|
|
||||||
} else {
|
|
||||||
false
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
if !all_unique_type_var {
|
|
||||||
return Err("expect unique type variables".into());
|
|
||||||
}
|
|
||||||
|
|
||||||
// add to TopLevelDef
|
|
||||||
class_def_type_vars.extend(type_vars);
|
|
||||||
|
|
||||||
// `class A(Generic[T])`
|
// `class A(Generic[T])`
|
||||||
} else {
|
} else {
|
||||||
let ty = class_resolver.parse_type_annotation(
|
type_var_list.push(slice.deref());
|
||||||
converted_top_level,
|
|
||||||
unifier.borrow_mut(),
|
|
||||||
primitives,
|
|
||||||
&slice,
|
|
||||||
)?;
|
|
||||||
// check if it is type var
|
|
||||||
let is_type_var =
|
|
||||||
matches!(unifier.get_ty(ty).as_ref(), &TypeEnum::TVar { .. });
|
|
||||||
if !is_type_var {
|
|
||||||
return Err("expect type variable here".into());
|
|
||||||
}
|
|
||||||
|
|
||||||
// add to TopLevelDef
|
|
||||||
class_def_type_vars.push(ty);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// parse the type vars
|
||||||
|
let type_vars = type_var_list
|
||||||
|
.into_iter()
|
||||||
|
.map(|e| {
|
||||||
|
let temp_def_list = self.extract_def_list();
|
||||||
|
class_resolver.parse_type_annotation(
|
||||||
|
&temp_def_list,
|
||||||
|
self.unifier.borrow_mut(),
|
||||||
|
&self.primitives,
|
||||||
|
e
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.collect::<Result<Vec<_>, _>>()?;
|
||||||
|
|
||||||
|
// check if all are unique type vars
|
||||||
|
let mut occured_type_var_id: HashSet<u32> = HashSet::new();
|
||||||
|
let all_unique_type_var = type_vars.iter().all(|x| {
|
||||||
|
let ty = self.unifier.get_ty(*x);
|
||||||
|
if let TypeEnum::TVar { id, .. } = ty.as_ref() {
|
||||||
|
occured_type_var_id.insert(*id)
|
||||||
|
} else {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
if !all_unique_type_var {
|
||||||
|
return Err("expect unique type variables".into());
|
||||||
|
}
|
||||||
|
|
||||||
|
// add to TopLevelDef
|
||||||
|
class_def_type_vars.extend(type_vars);
|
||||||
}
|
}
|
||||||
|
|
||||||
// if others, do nothing in this function
|
// if others, do nothing in this function
|
||||||
@ -410,12 +415,7 @@ impl TopLevelComposer {
|
|||||||
/// if the type var associated with class `B` has not been handled properly,
|
/// if the type var associated with class `B` has not been handled properly,
|
||||||
/// the parse of type annotation of `B[int, bool]` will fail
|
/// the parse of type annotation of `B[int, bool]` will fail
|
||||||
fn analyze_top_level_class_bases(&mut self) -> Result<(), String> {
|
fn analyze_top_level_class_bases(&mut self) -> Result<(), String> {
|
||||||
let mut def_list = self.definition_ast_list.write();
|
for (class_def, class_ast) in self.definition_ast_list.iter_mut() {
|
||||||
let converted_top_level = &self.to_top_level_context();
|
|
||||||
let primitives = &self.primitives;
|
|
||||||
let unifier = &mut self.unifier;
|
|
||||||
|
|
||||||
for (class_def, class_ast) in def_list.iter_mut() {
|
|
||||||
let mut class_def = class_def.write();
|
let mut class_def = class_def.write();
|
||||||
let (class_bases, class_ancestors, class_resolver) = {
|
let (class_bases, class_ancestors, class_resolver) = {
|
||||||
if let TopLevelDef::Class { ancestors, resolver, .. } = class_def.deref_mut() {
|
if let TopLevelDef::Class { ancestors, resolver, .. } = class_def.deref_mut() {
|
||||||
@ -432,52 +432,51 @@ impl TopLevelComposer {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
let class_resolver = class_resolver.as_ref().unwrap().lock();
|
let class_resolver = class_resolver.as_ref().unwrap().lock();
|
||||||
|
let class_resolver = class_resolver.deref();
|
||||||
|
|
||||||
|
let mut has_base = false;
|
||||||
for b in class_bases {
|
for b in class_bases {
|
||||||
// type vars have already been handled, so skip on `Generic[...]`
|
// type vars have already been handled, so skip on `Generic[...]`
|
||||||
if let ast::ExprKind::Subscript { value, .. } = &b.node {
|
if matches!(
|
||||||
if let ast::ExprKind::Name { id, .. } = &value.node {
|
&b.node,
|
||||||
if id == "Generic" {
|
ast::ExprKind::Subscript { value, .. }
|
||||||
continue;
|
if matches!(
|
||||||
}
|
&value.node,
|
||||||
}
|
ast::ExprKind::Name { id, .. } if id == "Generic"
|
||||||
|
)
|
||||||
|
) { continue }
|
||||||
|
has_base = true;
|
||||||
|
if has_base {
|
||||||
|
return Err("a class def can only have at most one base class \
|
||||||
|
declaration and one generic declaration".into())
|
||||||
}
|
}
|
||||||
// get the def id of the base class
|
|
||||||
|
let temp_def_list = self.extract_def_list();
|
||||||
let base_ty = class_resolver.parse_type_annotation(
|
let base_ty = class_resolver.parse_type_annotation(
|
||||||
converted_top_level,
|
&temp_def_list,
|
||||||
unifier.borrow_mut(),
|
self.unifier.borrow_mut(),
|
||||||
primitives,
|
&self.primitives,
|
||||||
b,
|
b
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
let base_id =
|
let base_id =
|
||||||
if let TypeEnum::TObj { obj_id, .. } = unifier.get_ty(base_ty).as_ref() {
|
if let TypeEnum::TObj { obj_id, .. } = self.unifier.get_ty(base_ty).as_ref() {
|
||||||
*obj_id
|
*obj_id
|
||||||
} else {
|
} else {
|
||||||
return Err("expect concrete class/type to be base class".into());
|
return Err("expect concrete class/type to be base class".into());
|
||||||
};
|
};
|
||||||
|
|
||||||
// write to the class ancestors, make sure the uniqueness
|
|
||||||
if !class_ancestors.contains(&base_id) {
|
// TODO: when base class is generic, record the generic type parameter
|
||||||
class_ancestors.push(base_id);
|
// TODO: check to prevent cyclic base class
|
||||||
} else {
|
class_ancestors.push(base_id);
|
||||||
return Err("cannot specify the same base class twice".into());
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// step 3, class fields and methods
|
/// step 3, class fields and methods
|
||||||
// FIXME: analyze base classes here
|
|
||||||
// FIXME: deal with self type
|
|
||||||
// NOTE: prevent cycles only roughly done
|
|
||||||
fn analyze_top_level_class_fields_methods(&mut self) -> Result<(), String> {
|
fn analyze_top_level_class_fields_methods(&mut self) -> Result<(), String> {
|
||||||
let mut def_ast_list = self.definition_ast_list.write();
|
|
||||||
let converted_top_level = &self.to_top_level_context();
|
|
||||||
let primitives = &self.primitives;
|
|
||||||
let to_be_analyzed_class = &mut self.to_be_analyzed_class;
|
|
||||||
let unifier = &mut self.unifier;
|
|
||||||
|
|
||||||
// NOTE: roughly prevent infinite loop
|
|
||||||
let mut max_iter = to_be_analyzed_class.len() * 4;
|
let mut max_iter = to_be_analyzed_class.len() * 4;
|
||||||
'class: loop {
|
'class: loop {
|
||||||
if to_be_analyzed_class.is_empty() && {
|
if to_be_analyzed_class.is_empty() && {
|
||||||
@ -506,6 +505,8 @@ impl TopLevelComposer {
|
|||||||
unreachable!("should be class def ast")
|
unreachable!("should be class def ast")
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
let class_resolver = class_resolver.as_ref().lock();
|
||||||
|
let class_resolver = class_resolver.deref();
|
||||||
|
|
||||||
let all_base_class_analyzed = {
|
let all_base_class_analyzed = {
|
||||||
let not_yet_analyzed =
|
let not_yet_analyzed =
|
||||||
@ -524,16 +525,7 @@ impl TopLevelComposer {
|
|||||||
let class_bases_ty = class_bases_ast
|
let class_bases_ty = class_bases_ast
|
||||||
.iter()
|
.iter()
|
||||||
.filter_map(|x| {
|
.filter_map(|x| {
|
||||||
class_resolver
|
self.parse_type_annotation(class_resolver, x).ok()
|
||||||
.as_ref()
|
|
||||||
.lock()
|
|
||||||
.parse_type_annotation(
|
|
||||||
converted_top_level,
|
|
||||||
unifier.borrow_mut(),
|
|
||||||
primitives,
|
|
||||||
x,
|
|
||||||
)
|
|
||||||
.ok()
|
|
||||||
})
|
})
|
||||||
.collect_vec();
|
.collect_vec();
|
||||||
|
|
||||||
@ -564,12 +556,7 @@ impl TopLevelComposer {
|
|||||||
})?
|
})?
|
||||||
.as_ref();
|
.as_ref();
|
||||||
|
|
||||||
let ty = class_resolver.as_ref().lock().parse_type_annotation(
|
let ty = self.parse_type_annotation(class_resolver, annotation)?;
|
||||||
converted_top_level,
|
|
||||||
unifier.borrow_mut(),
|
|
||||||
primitives,
|
|
||||||
annotation,
|
|
||||||
)?;
|
|
||||||
if !Self::check_ty_analyzed(ty, unifier, to_be_analyzed_class) {
|
if !Self::check_ty_analyzed(ty, unifier, to_be_analyzed_class) {
|
||||||
to_be_analyzed_class.push(DefinitionId(class_ind));
|
to_be_analyzed_class.push(DefinitionId(class_ind));
|
||||||
continue 'class;
|
continue 'class;
|
||||||
@ -600,12 +587,7 @@ impl TopLevelComposer {
|
|||||||
let ty = method_returns_ast
|
let ty = method_returns_ast
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.map(|x| {
|
.map(|x| {
|
||||||
class_resolver.as_ref().lock().parse_type_annotation(
|
self.parse_type_annotation(class_resolver, x)
|
||||||
converted_top_level,
|
|
||||||
unifier.borrow_mut(),
|
|
||||||
primitives,
|
|
||||||
x.as_ref(),
|
|
||||||
)
|
|
||||||
})
|
})
|
||||||
.ok_or_else(|| "return type annotation error".to_string())??;
|
.ok_or_else(|| "return type annotation error".to_string())??;
|
||||||
if !Self::check_ty_analyzed(ty, unifier, to_be_analyzed_class) {
|
if !Self::check_ty_analyzed(ty, unifier, to_be_analyzed_class) {
|
||||||
@ -640,12 +622,7 @@ impl TopLevelComposer {
|
|||||||
} =>
|
} =>
|
||||||
{
|
{
|
||||||
let field_ty =
|
let field_ty =
|
||||||
class_resolver.as_ref().lock().parse_type_annotation(
|
self.parse_type_annotation(class_resolver, annotation)?;
|
||||||
converted_top_level,
|
|
||||||
unifier.borrow_mut(),
|
|
||||||
primitives,
|
|
||||||
annotation.as_ref(),
|
|
||||||
)?;
|
|
||||||
if !Self::check_ty_analyzed(
|
if !Self::check_ty_analyzed(
|
||||||
field_ty,
|
field_ty,
|
||||||
unifier,
|
unifier,
|
||||||
|
@ -83,7 +83,7 @@ impl<'a> fold::Fold<()> for Inferencer<'a> {
|
|||||||
None
|
None
|
||||||
};
|
};
|
||||||
let annotation_type = self.function_data.resolver.parse_type_annotation(
|
let annotation_type = self.function_data.resolver.parse_type_annotation(
|
||||||
self.top_level,
|
self.top_level.read_top_level_def_list(),
|
||||||
self.unifier,
|
self.unifier,
|
||||||
&self.primitives,
|
&self.primitives,
|
||||||
annotation.as_ref(),
|
annotation.as_ref(),
|
||||||
@ -345,7 +345,7 @@ impl<'a> Inferencer<'a> {
|
|||||||
let arg0 = self.fold_expr(args.remove(0))?;
|
let arg0 = self.fold_expr(args.remove(0))?;
|
||||||
let ty = if let Some(arg) = args.pop() {
|
let ty = if let Some(arg) = args.pop() {
|
||||||
self.function_data.resolver.parse_type_annotation(
|
self.function_data.resolver.parse_type_annotation(
|
||||||
self.top_level,
|
self.top_level.read_top_level_def_list(),
|
||||||
self.unifier,
|
self.unifier,
|
||||||
self.primitives,
|
self.primitives,
|
||||||
&arg,
|
&arg,
|
||||||
|
@ -269,7 +269,7 @@ impl TestEnvironment {
|
|||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
let top_level = TopLevelContext {
|
let top_level = TopLevelContext {
|
||||||
definitions: Arc::new(RwLock::new(top_level_defs)),
|
definitions: Arc::new(top_level_defs),
|
||||||
unifiers: Default::default(),
|
unifiers: Default::default(),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user