add file name to AST node location

escape-analysis
ychenfo 2021-12-28 01:28:55 +08:00
parent 1bd966965e
commit 88f0da7bdd
7 changed files with 45 additions and 36 deletions

View File

@ -97,10 +97,10 @@ impl Nac3 {
))
})?;
let source = fs::read_to_string(source_file).map_err(|e| {
let source = fs::read_to_string(&source_file).map_err(|e| {
exceptions::PyIOError::new_err(format!("failed to read input file: {}", e))
})?;
let parser_result = parser::parse_program(&source)
let parser_result = parser::parse_program(&source, source_file.into())
.map_err(|e| exceptions::PySyntaxError::new_err(format!("parse error: {}", e)))?;
for mut stmt in parser_result.into_iter() {
@ -476,7 +476,7 @@ impl Nac3 {
arg_names.join(", ")
)
};
let mut synthesized = parse_program(&synthesized).unwrap();
let mut synthesized = parse_program(&synthesized, Default::default()).unwrap();
let resolver = Arc::new(Resolver(Arc::new(InnerResolver {
id_to_type: self.builtins_ty.clone().into(),
id_to_def: self.builtins_def.clone().into(),

View File

@ -9,6 +9,6 @@ mod impls;
mod location;
pub use ast_gen::*;
pub use location::Location;
pub use location::{Location, FileName};
pub type Suite<U = ()> = Vec<Stmt<U>>;

View File

@ -1,17 +1,32 @@
//! Datatypes to support source location information.
use crate::ast_gen::StrRef;
use std::fmt;
#[derive(Clone, Copy, Debug, PartialEq)]
pub struct FileName(StrRef);
impl Default for FileName {
fn default() -> Self {
FileName("unknown".into())
}
}
impl From<String> for FileName {
fn from(s: String) -> Self {
FileName(s.into())
}
}
/// A location somewhere in the sourcecode.
#[derive(Clone, Copy, Debug, Default, PartialEq)]
pub struct Location {
row: usize,
column: usize,
file: FileName
}
impl fmt::Display for Location {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "line {} column {}", self.row, self.column)
write!(f, "{}: line {} column {}", self.file.0, self.row, self.column)
}
}
@ -47,8 +62,8 @@ impl Location {
}
impl Location {
pub fn new(row: usize, column: usize) -> Self {
Location { row, column }
pub fn new(row: usize, column: usize, file: FileName) -> Self {
Location { row, column, file }
}
pub fn row(&self) -> usize {

View File

@ -163,9 +163,8 @@ impl TopLevelComposer {
ast::StmtKind::ClassDef { name: class_name, body, .. } => {
if self.keyword_list.contains(class_name) {
return Err(format!(
"cannot use keyword `{}` as a class name ({} at {})",
"cannot use keyword `{}` as a class name (at {})",
class_name,
mod_path,
ast.location
));
}
@ -175,9 +174,8 @@ impl TopLevelComposer {
n
}) {
return Err(format!(
"duplicate definition of class `{}` ({} at {})",
"duplicate definition of class `{}` (at {})",
class_name,
mod_path,
ast.location
));
}
@ -223,9 +221,8 @@ impl TopLevelComposer {
}
if self.keyword_list.contains(method_name) {
return Err(format!(
"cannot use keyword `{}` as a method name ({} at {})",
"cannot use keyword `{}` as a method name (at {})",
method_name,
mod_path,
b.location
));
}
@ -242,9 +239,8 @@ impl TopLevelComposer {
};
if !defined_names.insert(global_class_method_name.clone()) {
return Err(format!(
"class method `{}` defined twice ({} at {})",
"class method `{}` defined twice (at {})",
&global_class_method_name[mod_path.len()..],
mod_path,
b.location
));
}
@ -309,9 +305,8 @@ impl TopLevelComposer {
};
if !defined_names.insert(global_fun_name.clone()) {
return Err(format!(
"top level function `{}` defined twice ({} at {})",
"top level function `{}` defined twice (at {})",
&global_fun_name[mod_path.len()..],
mod_path,
ast.location
));
}
@ -340,8 +335,7 @@ impl TopLevelComposer {
}
_ => Err(format!(
"registrations of constructs other than top level classes/functions are not supported ({} at {})",
mod_path,
"registrations of constructs other than top level classes/functions are not supported (at {})",
ast.location
)),
}
@ -794,7 +788,7 @@ impl TopLevelComposer {
&type_annotation,
primitives_store,
unifier
).map_err(|err| format!("{} at {}", err, x.location))?;
).map_err(|err| format!("{} (at {})", err, x.location))?;
v
})
}
@ -865,7 +859,7 @@ impl TopLevelComposer {
));
unifier
.unify(*dummy_ty, function_ty)
.map_err(|old| format!("{} at {}", old, function_ast.location))?;
.map_err(|old| format!("{} (at {})", old, function_ast.location))?;
} else {
unreachable!("must be both function");
}
@ -1028,7 +1022,7 @@ impl TopLevelComposer {
Some({
let v = Self::parse_parameter_default_value(default, class_resolver)?;
Self::check_default_param_type(&v, &type_ann, primitives, unifier)
.map_err(|err| format!("{} at {}", err, x.location))?;
.map_err(|err| format!("{} (at {})", err, x.location))?;
v
})
}
@ -1351,7 +1345,7 @@ impl TopLevelComposer {
));
self.unifier
.unify(constructor.unwrap(), contor_type)
.map_err(|old| format!("{} at {}", old, ast.as_ref().unwrap().location))?;
.map_err(|old| format!("{} (at {})", old, ast.as_ref().unwrap().location))?;
// class field instantiation check
if let (Some(init_id), false) = (init_id, fields.is_empty()) {
@ -1568,7 +1562,7 @@ impl TopLevelComposer {
&mut |id| format!("tvar{}", id),
);
return Err(format!(
"expected return type of `{}` in function `{}` at {}",
"expected return type of `{}` in function `{}` (at {})",
ret_str,
name,
ast.as_ref().unwrap().location

View File

@ -3,7 +3,7 @@
//! This means source code is translated into separate tokens.
pub use super::token::Tok;
use crate::ast::Location;
use crate::ast::{Location, FileName};
use crate::error::{LexicalError, LexicalErrorType};
use std::char;
use std::cmp::Ordering;
@ -111,8 +111,8 @@ pub type Spanned = (Location, Tok, Location);
pub type LexResult = Result<Spanned, LexicalError>;
#[inline]
pub fn make_tokenizer(source: &str) -> impl Iterator<Item = LexResult> + '_ {
make_tokenizer_located(source, Location::new(0, 0))
pub fn make_tokenizer(source: &str, file: FileName) -> impl Iterator<Item = LexResult> + '_ {
make_tokenizer_located(source, Location::new(0, 0, file))
}
pub fn make_tokenizer_located(

View File

@ -7,7 +7,7 @@
use std::iter;
use crate::ast;
use crate::ast::{self, FileName};
use crate::error::ParseError;
use crate::lexer;
pub use crate::mode::Mode;
@ -20,8 +20,8 @@ use crate::python;
*/
/// Parse a full python program, containing usually multiple lines.
pub fn parse_program(source: &str) -> Result<ast::Suite, ParseError> {
parse(source, Mode::Module).map(|top| match top {
pub fn parse_program(source: &str, file: FileName) -> Result<ast::Suite, ParseError> {
parse(source, Mode::Module, file).map(|top| match top {
ast::Mod::Module { body, .. } => body,
_ => unreachable!(),
})
@ -63,15 +63,15 @@ pub fn parse_program(source: &str) -> Result<ast::Suite, ParseError> {
///
/// ```
pub fn parse_expression(source: &str) -> Result<ast::Expr, ParseError> {
parse(source, Mode::Expression).map(|top| match top {
parse(source, Mode::Expression, Default::default()).map(|top| match top {
ast::Mod::Expression { body } => *body,
_ => unreachable!(),
})
}
// Parse a given source code
pub fn parse(source: &str, mode: Mode) -> Result<ast::Mod, ParseError> {
let lxr = lexer::make_tokenizer(source);
pub fn parse(source: &str, mode: Mode, file: FileName) -> Result<ast::Mod, ParseError> {
let lxr = lexer::make_tokenizer(source, file);
let marker_token = (Default::default(), mode.to_marker(), Default::default());
let tokenizer = iter::once(Ok(marker_token)).chain(lxr);

View File

@ -35,7 +35,7 @@ fn main() {
Target::initialize_all(&InitializationConfig::default());
let program = match fs::read_to_string(demo_name + ".py") {
let program = match fs::read_to_string(demo_name.clone() + ".py") {
Ok(program) => program,
Err(err) => {
println!("Cannot open input file: {}", err);
@ -64,7 +64,7 @@ fn main() {
setup_time.duration_since(start).unwrap().as_millis()
);
let parser_result = parser::parse_program(&program).unwrap();
let parser_result = parser::parse_program(&program, format!("{}.py", demo_name).into()).unwrap();
let parse_time = SystemTime::now();
println!(
"parse time: {}ms",