1
0
forked from M-Labs/nac3

meta: Remove redundant path prefixes

This commit is contained in:
David Mak 2023-10-26 13:52:40 +08:00
parent 68556da5fd
commit 5182453bd9
19 changed files with 81 additions and 81 deletions

View File

@ -413,7 +413,7 @@ fn rpc_codegen_callback_fn<'ctx, 'a>(
args: Vec<(Option<StrRef>, ValueEnum<'ctx>)>, args: Vec<(Option<StrRef>, ValueEnum<'ctx>)>,
generator: &mut dyn CodeGenerator, generator: &mut dyn CodeGenerator,
) -> Result<Option<BasicValueEnum<'ctx>>, String> { ) -> Result<Option<BasicValueEnum<'ctx>>, String> {
let ptr_type = ctx.ctx.i8_type().ptr_type(inkwell::AddressSpace::default()); let ptr_type = ctx.ctx.i8_type().ptr_type(AddressSpace::default());
let size_type = generator.get_size_type(ctx.ctx); let size_type = generator.get_size_type(ctx.ctx);
let int8 = ctx.ctx.i8_type(); let int8 = ctx.ctx.i8_type();
let int32 = ctx.ctx.i32_type(); let int32 = ctx.ctx.i32_type();

View File

@ -17,8 +17,8 @@ use nac3core::codegen::{CodeGenLLVMOptions, CodeGenTargetMachineOptions, gen_fun
use nac3core::toplevel::builtins::get_exn_constructor; use nac3core::toplevel::builtins::get_exn_constructor;
use nac3core::typecheck::typedef::{TypeEnum, Unifier}; use nac3core::typecheck::typedef::{TypeEnum, Unifier};
use nac3parser::{ use nac3parser::{
ast::{self, ExprKind, Stmt, StmtKind, StrRef}, ast::{ExprKind, Stmt, StmtKind, StrRef},
parser::{self, parse_program}, parser::parse_program,
}; };
use pyo3::prelude::*; use pyo3::prelude::*;
use pyo3::{exceptions, types::PyBytes, types::PyDict, types::PySet}; use pyo3::{exceptions, types::PyBytes, types::PyDict, types::PySet};
@ -120,16 +120,16 @@ impl Nac3 {
let source = fs::read_to_string(&source_file).map_err(|e| { let source = fs::read_to_string(&source_file).map_err(|e| {
exceptions::PyIOError::new_err(format!("failed to read input file: {}", e)) exceptions::PyIOError::new_err(format!("failed to read input file: {}", e))
})?; })?;
let parser_result = parser::parse_program(&source, source_file.into()) let parser_result = parse_program(&source, source_file.into())
.map_err(|e| exceptions::PySyntaxError::new_err(format!("parse error: {}", e)))?; .map_err(|e| exceptions::PySyntaxError::new_err(format!("parse error: {}", e)))?;
for mut stmt in parser_result.into_iter() { for mut stmt in parser_result.into_iter() {
let include = match stmt.node { let include = match stmt.node {
ast::StmtKind::ClassDef { StmtKind::ClassDef {
ref decorator_list, ref mut body, ref mut bases, .. ref decorator_list, ref mut body, ref mut bases, ..
} => { } => {
let nac3_class = decorator_list.iter().any(|decorator| { let nac3_class = decorator_list.iter().any(|decorator| {
if let ast::ExprKind::Name { id, .. } = decorator.node { if let ExprKind::Name { id, .. } = decorator.node {
id.to_string() == "nac3" id.to_string() == "nac3"
} else { } else {
false false
@ -143,7 +143,7 @@ impl Nac3 {
Python::with_gil(|py| -> PyResult<bool> { Python::with_gil(|py| -> PyResult<bool> {
let id_fn = PyModule::import(py, "builtins")?.getattr("id")?; let id_fn = PyModule::import(py, "builtins")?.getattr("id")?;
match &base.node { match &base.node {
ast::ExprKind::Name { id, .. } => { ExprKind::Name { id, .. } => {
if *id == "Exception".into() { if *id == "Exception".into() {
Ok(true) Ok(true)
} else { } else {
@ -158,9 +158,9 @@ impl Nac3 {
.unwrap() .unwrap()
}); });
body.retain(|stmt| { body.retain(|stmt| {
if let ast::StmtKind::FunctionDef { ref decorator_list, .. } = stmt.node { if let StmtKind::FunctionDef { ref decorator_list, .. } = stmt.node {
decorator_list.iter().any(|decorator| { decorator_list.iter().any(|decorator| {
if let ast::ExprKind::Name { id, .. } = decorator.node { if let ExprKind::Name { id, .. } = decorator.node {
id.to_string() == "kernel" id.to_string() == "kernel"
|| id.to_string() == "portable" || id.to_string() == "portable"
|| id.to_string() == "rpc" || id.to_string() == "rpc"
@ -174,9 +174,9 @@ impl Nac3 {
}); });
true true
} }
ast::StmtKind::FunctionDef { ref decorator_list, .. } => { StmtKind::FunctionDef { ref decorator_list, .. } => {
decorator_list.iter().any(|decorator| { decorator_list.iter().any(|decorator| {
if let ast::ExprKind::Name { id, .. } = decorator.node { if let ExprKind::Name { id, .. } = decorator.node {
let id = id.to_string(); let id = id.to_string();
id == "extern" || id == "portable" || id == "kernel" || id == "rpc" id == "extern" || id == "portable" || id == "kernel" || id == "rpc"
} else { } else {
@ -639,7 +639,7 @@ impl Nac3 {
let mut function_iter = main.get_first_function(); let mut function_iter = main.get_first_function();
while let Some(func) = function_iter { while let Some(func) = function_iter {
if func.count_basic_blocks() > 0 && func.get_name().to_str().unwrap() != "__modinit__" { if func.count_basic_blocks() > 0 && func.get_name().to_str().unwrap() != "__modinit__" {
func.set_linkage(inkwell::module::Linkage::Private); func.set_linkage(Linkage::Private);
} }
function_iter = func.get_next_function(); function_iter = func.get_next_function();
} }

View File

@ -42,7 +42,7 @@ fn main() {
let output = std::str::from_utf8(&output.stdout).unwrap().replace("\r\n", "\n"); let output = std::str::from_utf8(&output.stdout).unwrap().replace("\r\n", "\n");
let mut filtered_output = String::with_capacity(output.len()); let mut filtered_output = String::with_capacity(output.len());
let regex_filter = regex::Regex::new(r"(?ms:^define.*?\}$)|(?m:^declare.*?$)").unwrap(); let regex_filter = Regex::new(r"(?ms:^define.*?\}$)|(?m:^declare.*?$)").unwrap();
for f in regex_filter.captures_iter(&output) { for f in regex_filter.captures_iter(&output) {
assert!(f.len() == 1); assert!(f.len() == 1);
filtered_output.push_str(&f[0]); filtered_output.push_str(&f[0]);

View File

@ -499,7 +499,7 @@ impl<'ctx, 'a> CodeGenContext<'ctx, 'a> {
generator: &mut dyn CodeGenerator, generator: &mut dyn CodeGenerator,
s: S, s: S,
) -> BasicValueEnum<'ctx> { ) -> BasicValueEnum<'ctx> {
self.gen_const(generator, &nac3parser::ast::Constant::Str(s.into()), self.primitives.str).unwrap() self.gen_const(generator, &Constant::Str(s.into()), self.primitives.str).unwrap()
} }
pub fn raise_exn( pub fn raise_exn(
@ -1860,7 +1860,7 @@ pub fn gen_expr<'ctx, 'a, G: CodeGenerator>(
} }
} else if let TypeEnum::TTuple { .. } = &*ctx.unifier.get_ty(value.custom.unwrap()) { } else if let TypeEnum::TTuple { .. } = &*ctx.unifier.get_ty(value.custom.unwrap()) {
let index: u32 = let index: u32 =
if let ExprKind::Constant { value: ast::Constant::Int(v), .. } = &slice.node { if let ExprKind::Constant { value: Constant::Int(v), .. } = &slice.node {
(*v).try_into().unwrap() (*v).try_into().unwrap()
} else { } else {
unreachable!("tuple subscript must be const int after type check"); unreachable!("tuple subscript must be const int after type check");

View File

@ -348,7 +348,7 @@ pub fn list_slice_assignment<'ctx, 'a>(
let src_end = ctx.builder let src_end = ctx.builder
.build_select( .build_select(
ctx.builder.build_int_compare( ctx.builder.build_int_compare(
inkwell::IntPredicate::SLT, IntPredicate::SLT,
src_idx.2, src_idx.2,
zero, zero,
"is_neg", "is_neg",
@ -361,7 +361,7 @@ pub fn list_slice_assignment<'ctx, 'a>(
let dest_end = ctx.builder let dest_end = ctx.builder
.build_select( .build_select(
ctx.builder.build_int_compare( ctx.builder.build_int_compare(
inkwell::IntPredicate::SLT, IntPredicate::SLT,
dest_idx.2, dest_idx.2,
zero, zero,
"is_neg", "is_neg",

View File

@ -383,8 +383,8 @@ fn get_llvm_type<'ctx>(
match (unifier.get_ty(ty).as_ref(), unifier.get_ty(primitives.option).as_ref()) match (unifier.get_ty(ty).as_ref(), unifier.get_ty(primitives.option).as_ref())
{ {
( (
TypeEnum::TObj { obj_id, params, .. }, TObj { obj_id, params, .. },
TypeEnum::TObj { obj_id: opt_id, .. }, TObj { obj_id: opt_id, .. },
) if *obj_id == *opt_id => { ) if *obj_id == *opt_id => {
return get_llvm_type( return get_llvm_type(
ctx, ctx,

View File

@ -298,7 +298,7 @@ pub trait SymbolResolver {
ctx: &mut CodeGenContext<'ctx, 'a>, ctx: &mut CodeGenContext<'ctx, 'a>,
) -> Option<ValueEnum<'ctx>>; ) -> Option<ValueEnum<'ctx>>;
fn get_default_param_value(&self, expr: &nac3parser::ast::Expr) -> Option<SymbolValue>; fn get_default_param_value(&self, expr: &Expr) -> Option<SymbolValue>;
fn get_string_id(&self, s: &str) -> i32; fn get_string_id(&self, s: &str) -> i32;
fn get_exception_id(&self, tyid: usize) -> usize; fn get_exception_id(&self, tyid: usize) -> usize;

View File

@ -1904,7 +1904,7 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
})), })),
]; ];
let ast_list: Vec<Option<ast::Stmt<()>>> = let ast_list: Vec<Option<Stmt<()>>> =
(0..top_level_def_list.len()).map(|_| None).collect(); (0..top_level_def_list.len()).map(|_| None).collect();
izip!(top_level_def_list, ast_list).collect_vec() izip!(top_level_def_list, ast_list).collect_vec()

View File

@ -20,7 +20,7 @@ impl Default for ComposerConfig {
} }
} }
type DefAst = (Arc<RwLock<TopLevelDef>>, Option<ast::Stmt<()>>); type DefAst = (Arc<RwLock<TopLevelDef>>, Option<Stmt<()>>);
pub struct TopLevelComposer { pub struct TopLevelComposer {
// list of top level definitions, same as top level context // list of top level definitions, same as top level context
pub definition_ast_list: Vec<DefAst>, pub definition_ast_list: Vec<DefAst>,
@ -175,7 +175,7 @@ impl TopLevelComposer {
/// and check duplicate class/method/function definition /// and check duplicate class/method/function definition
pub fn register_top_level( pub fn register_top_level(
&mut self, &mut self,
ast: ast::Stmt<()>, ast: Stmt<()>,
resolver: Option<Arc<dyn SymbolResolver + Send + Sync>>, resolver: Option<Arc<dyn SymbolResolver + Send + Sync>>,
mod_path: String, mod_path: String,
allow_no_constructor: bool, allow_no_constructor: bool,
@ -230,7 +230,7 @@ impl TopLevelComposer {
Arc<RwLock<TopLevelDef>>, Arc<RwLock<TopLevelDef>>,
DefinitionId, DefinitionId,
Type, Type,
ast::Stmt<()>, Stmt<()>,
); );
let mut class_method_name_def_ids: Vec<MethodInfo> = Vec::new(); let mut class_method_name_def_ids: Vec<MethodInfo> = Vec::new();
// we do not push anything to the def list, so we keep track of the index // we do not push anything to the def list, so we keep track of the index

View File

@ -334,7 +334,7 @@ impl TopLevelComposer {
) )
} }
pub fn get_all_assigned_field(stmts: &[ast::Stmt<()>]) -> Result<HashSet<StrRef>, String> { pub fn get_all_assigned_field(stmts: &[Stmt<()>]) -> Result<HashSet<StrRef>, String> {
let mut result = HashSet::new(); let mut result = HashSet::new();
for s in stmts { for s in stmts {
match &s.node { match &s.node {

View File

@ -36,7 +36,7 @@ struct Resolver(Arc<ResolverInternal>);
impl SymbolResolver for Resolver { impl SymbolResolver for Resolver {
fn get_default_param_value( fn get_default_param_value(
&self, &self,
_: &nac3parser::ast::Expr, _: &ast::Expr,
) -> Option<crate::symbol_resolver::SymbolValue> { ) -> Option<crate::symbol_resolver::SymbolValue> {
unimplemented!() unimplemented!()
} }

View File

@ -20,7 +20,7 @@ impl<'a> Inferencer<'a> {
defined_identifiers: &mut HashSet<StrRef>, defined_identifiers: &mut HashSet<StrRef>,
) -> Result<(), String> { ) -> Result<(), String> {
match &pattern.node { match &pattern.node {
ast::ExprKind::Name { id, .. } if id == &"none".into() => ExprKind::Name { id, .. } if id == &"none".into() =>
Err(format!("cannot assign to a `none` (at {})", pattern.location)), Err(format!("cannot assign to a `none` (at {})", pattern.location)),
ExprKind::Name { id, .. } => { ExprKind::Name { id, .. } => {
if !defined_identifiers.contains(id) { if !defined_identifiers.contains(id) {

View File

@ -2,7 +2,7 @@ use crate::typecheck::{
type_inferencer::*, type_inferencer::*,
typedef::{FunSignature, FuncArg, Type, TypeEnum, Unifier}, typedef::{FunSignature, FuncArg, Type, TypeEnum, Unifier},
}; };
use nac3parser::ast::{self, StrRef}; use nac3parser::ast::StrRef;
use nac3parser::ast::{Cmpop, Operator, Unaryop}; use nac3parser::ast::{Cmpop, Operator, Unaryop};
use std::collections::HashMap; use std::collections::HashMap;
use std::rc::Rc; use std::rc::Rc;
@ -87,7 +87,7 @@ pub fn impl_binop(
ty: Type, ty: Type,
other_ty: &[Type], other_ty: &[Type],
ret_ty: Type, ret_ty: Type,
ops: &[ast::Operator], ops: &[Operator],
) { ) {
with_fields(unifier, ty, |unifier, fields| { with_fields(unifier, ty, |unifier, fields| {
let (other_ty, other_var_id) = if other_ty.len() == 1 { let (other_ty, other_var_id) = if other_ty.len() == 1 {
@ -137,7 +137,7 @@ pub fn impl_binop(
}); });
} }
pub fn impl_unaryop(unifier: &mut Unifier, ty: Type, ret_ty: Type, ops: &[ast::Unaryop]) { pub fn impl_unaryop(unifier: &mut Unifier, ty: Type, ret_ty: Type, ops: &[Unaryop]) {
with_fields(unifier, ty, |unifier, fields| { with_fields(unifier, ty, |unifier, fields| {
for op in ops { for op in ops {
fields.insert( fields.insert(
@ -160,7 +160,7 @@ pub fn impl_cmpop(
store: &PrimitiveStore, store: &PrimitiveStore,
ty: Type, ty: Type,
other_ty: Type, other_ty: Type,
ops: &[ast::Cmpop], ops: &[Cmpop],
) { ) {
with_fields(unifier, ty, |unifier, fields| { with_fields(unifier, ty, |unifier, fields| {
for op in ops { for op in ops {
@ -197,7 +197,7 @@ pub fn impl_basic_arithmetic(
ty, ty,
other_ty, other_ty,
ret_ty, ret_ty,
&[ast::Operator::Add, ast::Operator::Sub, ast::Operator::Mult], &[Operator::Add, Operator::Sub, Operator::Mult],
) )
} }
@ -209,7 +209,7 @@ pub fn impl_pow(
other_ty: &[Type], other_ty: &[Type],
ret_ty: Type, ret_ty: Type,
) { ) {
impl_binop(unifier, store, ty, other_ty, ret_ty, &[ast::Operator::Pow]) impl_binop(unifier, store, ty, other_ty, ret_ty, &[Operator::Pow])
} }
/// BitOr, BitXor, BitAnd /// BitOr, BitXor, BitAnd
@ -220,18 +220,18 @@ pub fn impl_bitwise_arithmetic(unifier: &mut Unifier, store: &PrimitiveStore, ty
ty, ty,
&[ty], &[ty],
ty, ty,
&[ast::Operator::BitAnd, ast::Operator::BitOr, ast::Operator::BitXor], &[Operator::BitAnd, Operator::BitOr, Operator::BitXor],
) )
} }
/// LShift, RShift /// LShift, RShift
pub fn impl_bitwise_shift(unifier: &mut Unifier, store: &PrimitiveStore, ty: Type) { pub fn impl_bitwise_shift(unifier: &mut Unifier, store: &PrimitiveStore, ty: Type) {
impl_binop(unifier, store, ty, &[store.int32, store.uint32], ty, &[ast::Operator::LShift, ast::Operator::RShift]); impl_binop(unifier, store, ty, &[store.int32, store.uint32], ty, &[Operator::LShift, Operator::RShift]);
} }
/// Div /// Div
pub fn impl_div(unifier: &mut Unifier, store: &PrimitiveStore, ty: Type, other_ty: &[Type]) { pub fn impl_div(unifier: &mut Unifier, store: &PrimitiveStore, ty: Type, other_ty: &[Type]) {
impl_binop(unifier, store, ty, other_ty, store.float, &[ast::Operator::Div]) impl_binop(unifier, store, ty, other_ty, store.float, &[Operator::Div])
} }
/// FloorDiv /// FloorDiv
@ -242,7 +242,7 @@ pub fn impl_floordiv(
other_ty: &[Type], other_ty: &[Type],
ret_ty: Type, ret_ty: Type,
) { ) {
impl_binop(unifier, store, ty, other_ty, ret_ty, &[ast::Operator::FloorDiv]) impl_binop(unifier, store, ty, other_ty, ret_ty, &[Operator::FloorDiv])
} }
/// Mod /// Mod
@ -253,22 +253,22 @@ pub fn impl_mod(
other_ty: &[Type], other_ty: &[Type],
ret_ty: Type, ret_ty: Type,
) { ) {
impl_binop(unifier, store, ty, other_ty, ret_ty, &[ast::Operator::Mod]) impl_binop(unifier, store, ty, other_ty, ret_ty, &[Operator::Mod])
} }
/// UAdd, USub /// UAdd, USub
pub fn impl_sign(unifier: &mut Unifier, _store: &PrimitiveStore, ty: Type) { pub fn impl_sign(unifier: &mut Unifier, _store: &PrimitiveStore, ty: Type) {
impl_unaryop(unifier, ty, ty, &[ast::Unaryop::UAdd, ast::Unaryop::USub]) impl_unaryop(unifier, ty, ty, &[Unaryop::UAdd, Unaryop::USub])
} }
/// Invert /// Invert
pub fn impl_invert(unifier: &mut Unifier, _store: &PrimitiveStore, ty: Type) { pub fn impl_invert(unifier: &mut Unifier, _store: &PrimitiveStore, ty: Type) {
impl_unaryop(unifier, ty, ty, &[ast::Unaryop::Invert]) impl_unaryop(unifier, ty, ty, &[Unaryop::Invert])
} }
/// Not /// Not
pub fn impl_not(unifier: &mut Unifier, store: &PrimitiveStore, ty: Type) { pub fn impl_not(unifier: &mut Unifier, store: &PrimitiveStore, ty: Type) {
impl_unaryop(unifier, ty, store.bool, &[ast::Unaryop::Not]) impl_unaryop(unifier, ty, store.bool, &[Unaryop::Not])
} }
/// Lt, LtE, Gt, GtE /// Lt, LtE, Gt, GtE
@ -278,13 +278,13 @@ pub fn impl_comparison(unifier: &mut Unifier, store: &PrimitiveStore, ty: Type,
store, store,
ty, ty,
other_ty, other_ty,
&[ast::Cmpop::Lt, ast::Cmpop::Gt, ast::Cmpop::LtE, ast::Cmpop::GtE], &[Cmpop::Lt, Cmpop::Gt, Cmpop::LtE, Cmpop::GtE],
) )
} }
/// Eq, NotEq /// Eq, NotEq
pub fn impl_eq(unifier: &mut Unifier, store: &PrimitiveStore, ty: Type) { pub fn impl_eq(unifier: &mut Unifier, store: &PrimitiveStore, ty: Type) {
impl_cmpop(unifier, store, ty, ty, &[ast::Cmpop::Eq, ast::Cmpop::NotEq]) impl_cmpop(unifier, store, ty, ty, &[Cmpop::Eq, Cmpop::NotEq])
} }
pub fn set_primitives_magic_methods(store: &PrimitiveStore, unifier: &mut Unifier) { pub fn set_primitives_magic_methods(store: &PrimitiveStore, unifier: &mut Unifier) {

View File

@ -62,7 +62,7 @@ pub struct Inferencer<'a> {
} }
struct NaiveFolder(); struct NaiveFolder();
impl fold::Fold<()> for NaiveFolder { impl Fold<()> for NaiveFolder {
type TargetU = Option<Type>; type TargetU = Option<Type>;
type Error = String; type Error = String;
fn map_user(&mut self, _: ()) -> Result<Self::TargetU, Self::Error> { fn map_user(&mut self, _: ()) -> Result<Self::TargetU, Self::Error> {
@ -74,7 +74,7 @@ fn report_error<T>(msg: &str, location: Location) -> Result<T, String> {
Err(format!("{} at {}", msg, location)) Err(format!("{} at {}", msg, location))
} }
impl<'a> fold::Fold<()> for Inferencer<'a> { impl<'a> Fold<()> for Inferencer<'a> {
type TargetU = Option<Type>; type TargetU = Option<Type>;
type Error = String; type Error = String;
@ -231,14 +231,14 @@ impl<'a> fold::Fold<()> for Inferencer<'a> {
*ctx = ExprContext::Store; *ctx = ExprContext::Store;
} }
} }
if targets.iter().all(|t| matches!(t.node, ast::ExprKind::Name { .. })) { if targets.iter().all(|t| matches!(t.node, ExprKind::Name { .. })) {
if let ast::StmtKind::Assign { targets, value, .. } = node.node { if let ast::StmtKind::Assign { targets, value, .. } = node.node {
let value = self.fold_expr(*value)?; let value = self.fold_expr(*value)?;
let value_ty = value.custom.unwrap(); let value_ty = value.custom.unwrap();
let targets: Result<Vec<_>, _> = targets let targets: Result<Vec<_>, _> = targets
.into_iter() .into_iter()
.map(|target| { .map(|target| {
if let ast::ExprKind::Name { id, ctx } = target.node { if let ExprKind::Name { id, ctx } = target.node {
self.defined_identifiers.insert(id); self.defined_identifiers.insert(id);
let target_ty = if let Some(ty) = self.variable_mapping.get(&id) let target_ty = if let Some(ty) = self.variable_mapping.get(&id)
{ {
@ -261,7 +261,7 @@ impl<'a> fold::Fold<()> for Inferencer<'a> {
let location = target.location; let location = target.location;
self.unifier.unify(value_ty, target_ty).map(|_| Located { self.unifier.unify(value_ty, target_ty).map(|_| Located {
location, location,
node: ast::ExprKind::Name { id, ctx }, node: ExprKind::Name { id, ctx },
custom: Some(target_ty), custom: Some(target_ty),
}) })
} else { } else {
@ -440,22 +440,22 @@ impl<'a> fold::Fold<()> for Inferencer<'a> {
fn fold_expr(&mut self, node: ast::Expr<()>) -> Result<ast::Expr<Self::TargetU>, Self::Error> { fn fold_expr(&mut self, node: ast::Expr<()>) -> Result<ast::Expr<Self::TargetU>, Self::Error> {
let expr = match node.node { let expr = match node.node {
ast::ExprKind::Call { func, args, keywords } => { ExprKind::Call { func, args, keywords } => {
return self.fold_call(node.location, *func, args, keywords); return self.fold_call(node.location, *func, args, keywords);
} }
ast::ExprKind::Lambda { args, body } => { ExprKind::Lambda { args, body } => {
return self.fold_lambda(node.location, *args, *body); return self.fold_lambda(node.location, *args, *body);
} }
ast::ExprKind::ListComp { elt, generators } => { ExprKind::ListComp { elt, generators } => {
return self.fold_listcomp(node.location, *elt, generators); return self.fold_listcomp(node.location, *elt, generators);
} }
_ => fold::fold_expr(self, node)?, _ => fold::fold_expr(self, node)?,
}; };
let custom = match &expr.node { let custom = match &expr.node {
ast::ExprKind::Constant { value, .. } => { ExprKind::Constant { value, .. } => {
Some(self.infer_constant(value, &expr.location)?) Some(self.infer_constant(value, &expr.location)?)
} }
ast::ExprKind::Name { id, .. } => { ExprKind::Name { id, .. } => {
// the name `none` is special since it may have different types // the name `none` is special since it may have different types
if id == &"none".into() { if id == &"none".into() {
if let TypeEnum::TObj { params, .. } = if let TypeEnum::TObj { params, .. } =
@ -498,29 +498,29 @@ impl<'a> fold::Fold<()> for Inferencer<'a> {
Some(self.infer_identifier(*id)?) Some(self.infer_identifier(*id)?)
} }
} }
ast::ExprKind::List { elts, .. } => Some(self.infer_list(elts)?), ExprKind::List { elts, .. } => Some(self.infer_list(elts)?),
ast::ExprKind::Tuple { elts, .. } => Some(self.infer_tuple(elts)?), ExprKind::Tuple { elts, .. } => Some(self.infer_tuple(elts)?),
ast::ExprKind::Attribute { value, attr, ctx } => { ExprKind::Attribute { value, attr, ctx } => {
Some(self.infer_attribute(value, *attr, ctx)?) Some(self.infer_attribute(value, *attr, ctx)?)
} }
ast::ExprKind::BoolOp { values, .. } => Some(self.infer_bool_ops(values)?), ExprKind::BoolOp { values, .. } => Some(self.infer_bool_ops(values)?),
ast::ExprKind::BinOp { left, op, right } => { ExprKind::BinOp { left, op, right } => {
Some(self.infer_bin_ops(expr.location, left, op, right, false)?) Some(self.infer_bin_ops(expr.location, left, op, right, false)?)
} }
ast::ExprKind::UnaryOp { op, operand } => Some(self.infer_unary_ops(op, operand)?), ExprKind::UnaryOp { op, operand } => Some(self.infer_unary_ops(op, operand)?),
ast::ExprKind::Compare { left, ops, comparators } => { ExprKind::Compare { left, ops, comparators } => {
Some(self.infer_compare(left, ops, comparators)?) Some(self.infer_compare(left, ops, comparators)?)
} }
ast::ExprKind::Subscript { value, slice, ctx, .. } => { ExprKind::Subscript { value, slice, ctx, .. } => {
Some(self.infer_subscript(value.as_ref(), slice.as_ref(), ctx)?) Some(self.infer_subscript(value.as_ref(), slice.as_ref(), ctx)?)
} }
ast::ExprKind::IfExp { test, body, orelse } => { ExprKind::IfExp { test, body, orelse } => {
Some(self.infer_if_expr(test, body.as_ref(), orelse.as_ref())?) Some(self.infer_if_expr(test, body.as_ref(), orelse.as_ref())?)
} }
ast::ExprKind::ListComp { .. } ExprKind::ListComp { .. }
| ast::ExprKind::Lambda { .. } | ExprKind::Lambda { .. }
| ast::ExprKind::Call { .. } => expr.custom, // already computed | ExprKind::Call { .. } => expr.custom, // already computed
ast::ExprKind::Slice { .. } => None, // we don't need it for slice ExprKind::Slice { .. } => None, // we don't need it for slice
_ => return report_error("not supported", expr.location), _ => return report_error("not supported", expr.location),
}; };
Ok(ast::Expr { custom, location: expr.location, node: expr.node }) Ok(ast::Expr { custom, location: expr.location, node: expr.node })
@ -748,7 +748,7 @@ impl<'a> Inferencer<'a> {
custom: Some(new_context.unifier.add_ty(TypeEnum::TList { ty: elt.custom.unwrap() })), custom: Some(new_context.unifier.add_ty(TypeEnum::TList { ty: elt.custom.unwrap() })),
node: ExprKind::ListComp { node: ExprKind::ListComp {
elt: Box::new(elt), elt: Box::new(elt),
generators: vec![ast::Comprehension { generators: vec![Comprehension {
target: Box::new(target), target: Box::new(target),
iter: Box::new(iter), iter: Box::new(iter),
ifs, ifs,
@ -1094,7 +1094,7 @@ impl<'a> Inferencer<'a> {
) -> InferenceResult { ) -> InferenceResult {
let ty = self.unifier.get_dummy_var().0; let ty = self.unifier.get_dummy_var().0;
match &slice.node { match &slice.node {
ast::ExprKind::Slice { lower, upper, step } => { ExprKind::Slice { lower, upper, step } => {
for v in [lower.as_ref(), upper.as_ref(), step.as_ref()].iter().flatten() { for v in [lower.as_ref(), upper.as_ref(), step.as_ref()].iter().flatten() {
self.constrain(v.custom.unwrap(), self.primitives.int32, &v.location)?; self.constrain(v.custom.unwrap(), self.primitives.int32, &v.location)?;
} }
@ -1102,7 +1102,7 @@ impl<'a> Inferencer<'a> {
self.constrain(value.custom.unwrap(), list, &value.location)?; self.constrain(value.custom.unwrap(), list, &value.location)?;
Ok(list) Ok(list)
} }
ast::ExprKind::Constant { value: ast::Constant::Int(val), .. } => { ExprKind::Constant { value: ast::Constant::Int(val), .. } => {
// the index is a constant, so value can be a sequence. // the index is a constant, so value can be a sequence.
let ind: Option<i32> = (*val).try_into().ok(); let ind: Option<i32> = (*val).try_into().ok();
let ind = ind.ok_or_else(|| "Index must be int32".to_string())?; let ind = ind.ok_or_else(|| "Index must be int32".to_string())?;

View File

@ -20,7 +20,7 @@ struct Resolver {
impl SymbolResolver for Resolver { impl SymbolResolver for Resolver {
fn get_default_param_value( fn get_default_param_value(
&self, &self,
_: &nac3parser::ast::Expr, _: &ast::Expr,
) -> Option<crate::symbol_resolver::SymbolValue> { ) -> Option<crate::symbol_resolver::SymbolValue> {
unimplemented!() unimplemented!()
} }
@ -62,7 +62,7 @@ struct TestEnvironment {
pub primitives: PrimitiveStore, pub primitives: PrimitiveStore,
pub id_to_name: HashMap<usize, StrRef>, pub id_to_name: HashMap<usize, StrRef>,
pub identifier_mapping: HashMap<StrRef, Type>, pub identifier_mapping: HashMap<StrRef, Type>,
pub virtual_checks: Vec<(Type, Type, nac3parser::ast::Location)>, pub virtual_checks: Vec<(Type, Type, Location)>,
pub calls: HashMap<CodeLocation, CallId>, pub calls: HashMap<CodeLocation, CallId>,
pub top_level: TopLevelContext, pub top_level: TopLevelContext,
} }

View File

@ -641,7 +641,7 @@ impl Unifier {
.get_intersection(a, b) .get_intersection(a, b)
.map_err(|_| TypeError::new(TypeErrorKind::IncompatibleTypes(a, b), None))? .map_err(|_| TypeError::new(TypeErrorKind::IncompatibleTypes(a, b), None))?
.unwrap(); .unwrap();
let range = if let TypeEnum::TVar { range, .. } = &*self.get_ty(intersection) { let range = if let TVar { range, .. } = &*self.get_ty(intersection) {
range.clone() range.clone()
} else { } else {
unreachable!() unreachable!()
@ -649,7 +649,7 @@ impl Unifier {
self.unification_table.unify(a, b); self.unification_table.unify(a, b);
self.unification_table.set_value( self.unification_table.set_value(
a, a,
Rc::new(TypeEnum::TVar { Rc::new(TVar {
id: name1.map_or(*id2, |_| *id), id: name1.map_or(*id2, |_| *id),
fields: new_fields, fields: new_fields,
range, range,
@ -1217,7 +1217,7 @@ impl Unifier {
cache: &mut HashMap<Type, Option<Type>>, cache: &mut HashMap<Type, Option<Type>>,
) -> Option<Mapping<K>> ) -> Option<Mapping<K>>
where where
K: std::hash::Hash + std::cmp::Eq + std::clone::Clone, K: std::hash::Hash + Eq + Clone,
{ {
let mut map2 = None; let mut map2 = None;
for (k, v) in map.iter() { for (k, v) in map.iter() {
@ -1238,7 +1238,7 @@ impl Unifier {
cache: &mut HashMap<Type, Option<Type>>, cache: &mut HashMap<Type, Option<Type>>,
) -> Option<Mapping<K, (Type, bool)>> ) -> Option<Mapping<K, (Type, bool)>>
where where
K: std::hash::Hash + std::cmp::Eq + std::clone::Clone, K: std::hash::Hash + Eq + Clone,
{ {
let mut map2 = None; let mut map2 = None;
for (k, (v, mutability)) in map.iter() { for (k, (v, mutability)) in map.iter() {

View File

@ -47,7 +47,7 @@ impl Unifier {
fn map_eq<K>(&mut self, map1: &Mapping<K>, map2: &Mapping<K>) -> bool fn map_eq<K>(&mut self, map1: &Mapping<K>, map2: &Mapping<K>) -> bool
where where
K: std::hash::Hash + std::cmp::Eq + std::clone::Clone, K: std::hash::Hash + Eq + Clone,
{ {
if map1.len() != map2.len() { if map1.len() != map2.len() {
return false; return false;
@ -62,7 +62,7 @@ impl Unifier {
fn map_eq2<K>(&mut self, map1: &Mapping<K, RecordField>, map2: &Mapping<K, RecordField>) -> bool fn map_eq2<K>(&mut self, map1: &Mapping<K, RecordField>, map2: &Mapping<K, RecordField>) -> bool
where where
K: std::hash::Hash + std::cmp::Eq + std::clone::Clone, K: std::hash::Hash + Eq + Clone,
{ {
if map1.len() != map2.len() { if map1.len() != map2.len() {
return false; return false;

View File

@ -1,7 +1,7 @@
use dwarf::*; use dwarf::*;
use elf::*; use elf::*;
use std::collections::HashMap; use std::collections::HashMap;
use std::{convert, mem, ptr, slice, str}; use std::{mem, ptr, slice, str};
extern crate byteorder; extern crate byteorder;
use byteorder::{ByteOrder, LittleEndian}; use byteorder::{ByteOrder, LittleEndian};
@ -21,7 +21,7 @@ pub enum Error {
Lookup(&'static str), Lookup(&'static str),
} }
impl convert::From<&'static str> for Error { impl From<&'static str> for Error {
fn from(desc: &'static str) -> Error { fn from(desc: &'static str) -> Error {
Error::Parsing(desc) Error::Parsing(desc)
} }

View File

@ -486,8 +486,8 @@ where
} }
} }
match p { match p {
0xD800..=0xDFFF => Ok(std::char::REPLACEMENT_CHARACTER), 0xD800..=0xDFFF => Ok(char::REPLACEMENT_CHARACTER),
_ => std::char::from_u32(p).ok_or(unicode_error), _ => char::from_u32(p).ok_or(unicode_error),
} }
} }