Compare commits
No commits in common. "master" and "master" have entirely different histories.
@ -1,27 +0,0 @@
|
|||||||
from min_artiq import kernel, KernelInvariant, nac3
|
|
||||||
import min_artiq as artiq
|
|
||||||
|
|
||||||
|
|
||||||
@nac3
|
|
||||||
class Demo:
|
|
||||||
core: KernelInvariant[artiq.Core]
|
|
||||||
led0: KernelInvariant[artiq.TTLOut]
|
|
||||||
led1: KernelInvariant[artiq.TTLOut]
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.core = artiq.Core()
|
|
||||||
self.led0 = artiq.TTLOut(self.core, 18)
|
|
||||||
self.led1 = artiq.TTLOut(self.core, 19)
|
|
||||||
|
|
||||||
@kernel
|
|
||||||
def run(self):
|
|
||||||
self.core.reset()
|
|
||||||
while True:
|
|
||||||
with artiq.parallel:
|
|
||||||
self.led0.pulse(100.*artiq.ms)
|
|
||||||
self.led1.pulse(100.*artiq.ms)
|
|
||||||
self.core.delay(100.*artiq.ms)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
Demo().run()
|
|
@ -1,9 +1,9 @@
|
|||||||
from inspect import getfullargspec
|
from inspect import getfullargspec
|
||||||
from functools import wraps
|
from functools import wraps
|
||||||
|
from types import SimpleNamespace
|
||||||
|
from numpy import int32, int64
|
||||||
|
from typing import Generic, TypeVar
|
||||||
from math import floor, ceil
|
from math import floor, ceil
|
||||||
from numpy import int32, int64, uint32, uint64, float64, bool_, str_, ndarray
|
|
||||||
from types import GenericAlias, ModuleType, SimpleNamespace
|
|
||||||
from typing import _GenericAlias, Generic, TypeVar
|
|
||||||
|
|
||||||
import nac3artiq
|
import nac3artiq
|
||||||
|
|
||||||
@ -40,10 +40,10 @@ class Option(Generic[T]):
|
|||||||
|
|
||||||
def is_none(self):
|
def is_none(self):
|
||||||
return self._nac3_option is None
|
return self._nac3_option is None
|
||||||
|
|
||||||
def is_some(self):
|
def is_some(self):
|
||||||
return not self.is_none()
|
return not self.is_none()
|
||||||
|
|
||||||
def unwrap(self):
|
def unwrap(self):
|
||||||
if self.is_none():
|
if self.is_none():
|
||||||
raise UnwrapNoneError()
|
raise UnwrapNoneError()
|
||||||
@ -54,7 +54,7 @@ class Option(Generic[T]):
|
|||||||
return "none"
|
return "none"
|
||||||
else:
|
else:
|
||||||
return "Some({})".format(repr(self._nac3_option))
|
return "Some({})".format(repr(self._nac3_option))
|
||||||
|
|
||||||
def __str__(self) -> str:
|
def __str__(self) -> str:
|
||||||
if self.is_none():
|
if self.is_none():
|
||||||
return "none"
|
return "none"
|
||||||
@ -85,46 +85,13 @@ def ceil64(x):
|
|||||||
import device_db
|
import device_db
|
||||||
core_arguments = device_db.device_db["core"]["arguments"]
|
core_arguments = device_db.device_db["core"]["arguments"]
|
||||||
|
|
||||||
builtins = {
|
artiq_builtins = {
|
||||||
"int": int,
|
"none": none,
|
||||||
"float": float,
|
"virtual": virtual,
|
||||||
"bool": bool,
|
"_ConstGenericMarker": _ConstGenericMarker,
|
||||||
"str": str,
|
"Option": Option,
|
||||||
"list": list,
|
|
||||||
"tuple": tuple,
|
|
||||||
"Exception": Exception,
|
|
||||||
|
|
||||||
"types": {
|
|
||||||
"GenericAlias": GenericAlias,
|
|
||||||
"ModuleType": ModuleType,
|
|
||||||
},
|
|
||||||
|
|
||||||
"typing": {
|
|
||||||
"_GenericAlias": _GenericAlias,
|
|
||||||
"TypeVar": TypeVar,
|
|
||||||
},
|
|
||||||
|
|
||||||
"numpy": {
|
|
||||||
"int32": int32,
|
|
||||||
"int64": int64,
|
|
||||||
"uint32": uint32,
|
|
||||||
"uint64": uint64,
|
|
||||||
"float64": float64,
|
|
||||||
"bool_": bool_,
|
|
||||||
"str_": str_,
|
|
||||||
"ndarray": ndarray,
|
|
||||||
},
|
|
||||||
|
|
||||||
"artiq": {
|
|
||||||
"Kernel": Kernel,
|
|
||||||
"KernelInvariant": KernelInvariant,
|
|
||||||
"_ConstGenericMarker": _ConstGenericMarker,
|
|
||||||
"none": none,
|
|
||||||
"virtual": virtual,
|
|
||||||
"Option": Option,
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
compiler = nac3artiq.NAC3(core_arguments["target"], builtins)
|
compiler = nac3artiq.NAC3(core_arguments["target"], artiq_builtins)
|
||||||
allow_registration = True
|
allow_registration = True
|
||||||
# Delay NAC3 analysis until all referenced variables are supposed to exist on the CPython side.
|
# Delay NAC3 analysis until all referenced variables are supposed to exist on the CPython side.
|
||||||
registered_functions = set()
|
registered_functions = set()
|
||||||
@ -185,9 +152,9 @@ def nac3(cls):
|
|||||||
return cls
|
return cls
|
||||||
|
|
||||||
|
|
||||||
ms: KernelInvariant[float] = 1e-3
|
ms = 1e-3
|
||||||
us: KernelInvariant[float] = 1e-6
|
us = 1e-6
|
||||||
ns: KernelInvariant[float] = 1e-9
|
ns = 1e-9
|
||||||
|
|
||||||
@extern
|
@extern
|
||||||
def rtio_init():
|
def rtio_init():
|
||||||
@ -368,9 +335,9 @@ class UnwrapNoneError(Exception):
|
|||||||
"""raised when unwrapping a none value"""
|
"""raised when unwrapping a none value"""
|
||||||
artiq_builtin = True
|
artiq_builtin = True
|
||||||
|
|
||||||
parallel: KernelInvariant[KernelContextManager] = KernelContextManager()
|
parallel = KernelContextManager()
|
||||||
legacy_parallel: KernelInvariant[KernelContextManager] = KernelContextManager()
|
legacy_parallel = KernelContextManager()
|
||||||
sequential: KernelInvariant[KernelContextManager] = KernelContextManager()
|
sequential = KernelContextManager()
|
||||||
|
|
||||||
special_ids = {
|
special_ids = {
|
||||||
"parallel": id(parallel),
|
"parallel": id(parallel),
|
||||||
|
@ -1015,7 +1015,7 @@ pub fn attributes_writeback<'ctx>(
|
|||||||
*field_ty,
|
*field_ty,
|
||||||
ctx.build_gep_and_load(
|
ctx.build_gep_and_load(
|
||||||
obj.into_pointer_value(),
|
obj.into_pointer_value(),
|
||||||
&[zero, int32.const_int(index.unwrap() as u64, false)],
|
&[zero, int32.const_int(index as u64, false)],
|
||||||
None,
|
None,
|
||||||
),
|
),
|
||||||
));
|
));
|
||||||
@ -1056,7 +1056,7 @@ pub fn attributes_writeback<'ctx>(
|
|||||||
*field_ty,
|
*field_ty,
|
||||||
ctx.build_gep_and_load(
|
ctx.build_gep_and_load(
|
||||||
obj.into_pointer_value(),
|
obj.into_pointer_value(),
|
||||||
&[zero, int32.const_int(index.unwrap() as u64, false)],
|
&[zero, int32.const_int(index as u64, false)],
|
||||||
None,
|
None,
|
||||||
),
|
),
|
||||||
));
|
));
|
||||||
|
@ -1,273 +0,0 @@
|
|||||||
use itertools::Itertools;
|
|
||||||
|
|
||||||
use nac3core::{toplevel::TopLevelDef, typecheck::typedef::Unifier};
|
|
||||||
|
|
||||||
use super::{InnerResolver, symbol_resolver::PyValueHandle};
|
|
||||||
|
|
||||||
impl InnerResolver {
|
|
||||||
pub fn debug_str(&self, tld: Option<&[TopLevelDef]>, unifier: &Option<&mut Unifier>) -> String {
|
|
||||||
fn fmt_elems(elems: &str) -> String {
|
|
||||||
if elems.is_empty() { String::new() } else { format!("\n{elems}\n\t") }
|
|
||||||
}
|
|
||||||
fn stringify_pyvalue_handle(handle: &PyValueHandle) -> String {
|
|
||||||
format!("(id: {}, value: {})", handle.0, handle.1)
|
|
||||||
}
|
|
||||||
fn stringify_tld(tld: &TopLevelDef) -> String {
|
|
||||||
match tld {
|
|
||||||
TopLevelDef::Module { name, .. } => {
|
|
||||||
format!("TopLevelDef::Module {{ name: {name} }}")
|
|
||||||
}
|
|
||||||
TopLevelDef::Class { name, .. } => {
|
|
||||||
format!("TopLevelDef::Class {{ name: {name} }}")
|
|
||||||
}
|
|
||||||
TopLevelDef::Function { name, .. } => {
|
|
||||||
format!("TopLevelDef::Function {{ name: {name} }}")
|
|
||||||
}
|
|
||||||
TopLevelDef::Variable { name, .. } => {
|
|
||||||
format!("TopLevelDef::Variable {{ name: {name} }}")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut str = String::new();
|
|
||||||
str.push_str("nac3artiq::InnerResolver {");
|
|
||||||
|
|
||||||
{
|
|
||||||
let id_to_type = self.id_to_type.read();
|
|
||||||
str.push_str(
|
|
||||||
format!(
|
|
||||||
"\n\tid_to_type: {{{}}},",
|
|
||||||
fmt_elems(
|
|
||||||
id_to_type
|
|
||||||
.iter()
|
|
||||||
.sorted_by_cached_key(|(k, _)| k.to_string())
|
|
||||||
.map(|(k, v)| {
|
|
||||||
let ty_str = unifier.as_ref().map_or_else(
|
|
||||||
|| format!("{v:?}"),
|
|
||||||
|unifier| unifier.stringify(*v),
|
|
||||||
);
|
|
||||||
format!("\t\t{k} -> {ty_str}")
|
|
||||||
})
|
|
||||||
.join(",\n")
|
|
||||||
.as_str()
|
|
||||||
),
|
|
||||||
)
|
|
||||||
.as_str(),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
let id_to_def = self.id_to_def.read();
|
|
||||||
str.push_str(
|
|
||||||
format!(
|
|
||||||
"\n\tid_to_def: {{{}}},",
|
|
||||||
fmt_elems(
|
|
||||||
id_to_def
|
|
||||||
.iter()
|
|
||||||
.sorted_by_cached_key(|(k, _)| k.to_string())
|
|
||||||
.map(|(k, v)| {
|
|
||||||
let tld_str = tld.map_or_else(
|
|
||||||
|| format!("{v:?}"),
|
|
||||||
|tlds| stringify_tld(&tlds[v.0]),
|
|
||||||
);
|
|
||||||
format!("\t\t{k} -> {tld_str}")
|
|
||||||
})
|
|
||||||
.join(",\n")
|
|
||||||
.as_str()
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.as_str(),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
let id_to_pyval = self.id_to_pyval.read();
|
|
||||||
str.push_str(
|
|
||||||
format!(
|
|
||||||
"\n\tid_to_pyval: {{{}}},",
|
|
||||||
fmt_elems(
|
|
||||||
id_to_pyval
|
|
||||||
.iter()
|
|
||||||
.sorted_by_cached_key(|(k, _)| k.to_string())
|
|
||||||
.map(|(k, v)| { format!("\t\t{k} -> {}", stringify_pyvalue_handle(v)) })
|
|
||||||
.join(",\n")
|
|
||||||
.as_str()
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.as_str(),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
let id_to_primitive = self.id_to_primitive.read();
|
|
||||||
str.push_str(
|
|
||||||
format!(
|
|
||||||
"\n\tid_to_primitive: {{{}}},",
|
|
||||||
fmt_elems(
|
|
||||||
id_to_primitive
|
|
||||||
.iter()
|
|
||||||
.sorted_by_key(|(k, _)| *k)
|
|
||||||
.map(|(k, v)| { format!("\t\t{k} -> {v:?}") })
|
|
||||||
.join(",\n")
|
|
||||||
.as_str()
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.as_str(),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
let field_to_val = self.field_to_val.read();
|
|
||||||
str.push_str(
|
|
||||||
format!(
|
|
||||||
"\n\tfield_to_val: {{{}}},",
|
|
||||||
fmt_elems(
|
|
||||||
field_to_val
|
|
||||||
.iter()
|
|
||||||
.sorted_by_key(|((id, _), _)| *id)
|
|
||||||
.map(|((id, name), pyval)| {
|
|
||||||
format!(
|
|
||||||
"\t\t({id}, {name}) -> {}",
|
|
||||||
pyval.as_ref().map_or_else(
|
|
||||||
|| String::from("None"),
|
|
||||||
|pyval| format!(
|
|
||||||
"Some({})",
|
|
||||||
stringify_pyvalue_handle(pyval)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
})
|
|
||||||
.join(",\n")
|
|
||||||
.as_str()
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.as_str(),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
let global_value_ids = self.global_value_ids.read();
|
|
||||||
str.push_str(
|
|
||||||
format!(
|
|
||||||
"\n\tglobal_value_ids: {{{}}},",
|
|
||||||
fmt_elems(
|
|
||||||
global_value_ids
|
|
||||||
.iter()
|
|
||||||
.sorted_by_key(|(k, _)| *k)
|
|
||||||
.map(|(k, v)| format!("\t\t{k} -> {v}"))
|
|
||||||
.join(",\n")
|
|
||||||
.as_str()
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.as_str(),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
let pyid_to_def = self.pyid_to_def.read();
|
|
||||||
str.push_str(
|
|
||||||
format!(
|
|
||||||
"\n\tpyid_to_def: {{{}}},",
|
|
||||||
fmt_elems(
|
|
||||||
pyid_to_def
|
|
||||||
.iter()
|
|
||||||
.sorted_by_key(|(k, _)| *k)
|
|
||||||
.map(|(k, v)| {
|
|
||||||
let tld_str = tld.map_or_else(
|
|
||||||
|| format!("{v:?}"),
|
|
||||||
|tlds| stringify_tld(&tlds[v.0]),
|
|
||||||
);
|
|
||||||
format!("\t\t{k} -> {tld_str}")
|
|
||||||
})
|
|
||||||
.join(",\n")
|
|
||||||
.as_str()
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.as_str(),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
let pyid_to_type = self.pyid_to_type.read();
|
|
||||||
str.push_str(
|
|
||||||
format!(
|
|
||||||
"\n\tpyid_to_type: {{{}}},",
|
|
||||||
fmt_elems(
|
|
||||||
pyid_to_type
|
|
||||||
.iter()
|
|
||||||
.sorted_by_key(|(k, _)| *k)
|
|
||||||
.map(|(k, v)| {
|
|
||||||
let ty_str = unifier.as_ref().map_or_else(
|
|
||||||
|| format!("{v:?}"),
|
|
||||||
|unifier| unifier.stringify(*v),
|
|
||||||
);
|
|
||||||
format!("\t\t{k} -> {ty_str}")
|
|
||||||
})
|
|
||||||
.join(",\n")
|
|
||||||
.as_str()
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.as_str(),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
let string_store = self.string_store.read();
|
|
||||||
str.push_str(
|
|
||||||
format!(
|
|
||||||
"\n\tstring_store: {{{}}},",
|
|
||||||
fmt_elems(
|
|
||||||
string_store
|
|
||||||
.iter()
|
|
||||||
.sorted_by_key(|(k, _)| *k)
|
|
||||||
.map(|(k, v)| format!("\t\t{k} -> {v}"))
|
|
||||||
.join(",\n")
|
|
||||||
.as_str()
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.as_str(),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
let exception_ids = self.exception_ids.read();
|
|
||||||
str.push_str(
|
|
||||||
format!(
|
|
||||||
"\n\texception_ids: {{{}}},",
|
|
||||||
fmt_elems(
|
|
||||||
exception_ids
|
|
||||||
.iter()
|
|
||||||
.sorted_by_key(|(k, _)| *k)
|
|
||||||
.map(|(k, v)| format!("\t\t{k} -> {v}"))
|
|
||||||
.join(",\n")
|
|
||||||
.as_str()
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.as_str(),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
let name_to_pyid = &self.name_to_pyid;
|
|
||||||
str.push_str(
|
|
||||||
format!(
|
|
||||||
"\n\tname_to_pyid: {{{}}},",
|
|
||||||
fmt_elems(
|
|
||||||
name_to_pyid
|
|
||||||
.iter()
|
|
||||||
.sorted_by_cached_key(|(k, _)| k.to_string())
|
|
||||||
.map(|(k, v)| format!("\t\t{k} -> {v}"))
|
|
||||||
.join(",\n")
|
|
||||||
.as_str()
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.as_str(),
|
|
||||||
);
|
|
||||||
|
|
||||||
let module = &self.module;
|
|
||||||
str.push_str(format!("\n\tmodule: {module}").as_str());
|
|
||||||
|
|
||||||
str.push_str("\n}");
|
|
||||||
|
|
||||||
str
|
|
||||||
}
|
|
||||||
}
|
|
@ -10,11 +10,9 @@
|
|||||||
)]
|
)]
|
||||||
|
|
||||||
use std::{
|
use std::{
|
||||||
cell::LazyCell,
|
|
||||||
collections::{HashMap, HashSet},
|
collections::{HashMap, HashSet},
|
||||||
fs,
|
fs,
|
||||||
io::Write,
|
io::Write,
|
||||||
path::Path,
|
|
||||||
process::Command,
|
process::Command,
|
||||||
rc::Rc,
|
rc::Rc,
|
||||||
sync::Arc,
|
sync::Arc,
|
||||||
@ -68,13 +66,9 @@ use symbol_resolver::{DeferredEvaluationStore, InnerResolver, PythonHelper, Reso
|
|||||||
use timeline::TimeFns;
|
use timeline::TimeFns;
|
||||||
|
|
||||||
mod codegen;
|
mod codegen;
|
||||||
mod debug;
|
|
||||||
mod symbol_resolver;
|
mod symbol_resolver;
|
||||||
mod timeline;
|
mod timeline;
|
||||||
|
|
||||||
const ENV_NAC3_EMIT_LLVM_BC: &str = "NAC3_EMIT_LLVM_BC";
|
|
||||||
const ENV_NAC3_EMIT_LLVM_LL: &str = "NAC3_EMIT_LLVM_LL";
|
|
||||||
|
|
||||||
#[derive(PartialEq, Clone, Copy)]
|
#[derive(PartialEq, Clone, Copy)]
|
||||||
enum Isa {
|
enum Isa {
|
||||||
Host,
|
Host,
|
||||||
@ -166,8 +160,6 @@ pub struct PrimitivePythonId {
|
|||||||
virtual_id: u64,
|
virtual_id: u64,
|
||||||
option: u64,
|
option: u64,
|
||||||
module: u64,
|
module: u64,
|
||||||
kernel: u64,
|
|
||||||
kernel_invariant: u64,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Default)]
|
#[derive(Clone, Default)]
|
||||||
@ -236,17 +228,6 @@ impl Nac3 {
|
|||||||
let parser_result = parse_program(&source, source_file.into())
|
let parser_result = parse_program(&source, source_file.into())
|
||||||
.map_err(|e| exceptions::PySyntaxError::new_err(format!("parse error: {e}")))?;
|
.map_err(|e| exceptions::PySyntaxError::new_err(format!("parse error: {e}")))?;
|
||||||
|
|
||||||
let id_fn = LazyCell::new(|| {
|
|
||||||
Python::with_gil(|py| {
|
|
||||||
PyModule::import(py, "builtins").unwrap().getattr("id").unwrap().unbind()
|
|
||||||
})
|
|
||||||
});
|
|
||||||
let get_type_hints_fn = LazyCell::new(|| {
|
|
||||||
Python::with_gil(|py| {
|
|
||||||
PyModule::import(py, "typing").unwrap().getattr("get_type_hints").unwrap().unbind()
|
|
||||||
})
|
|
||||||
});
|
|
||||||
|
|
||||||
for mut stmt in parser_result {
|
for mut stmt in parser_result {
|
||||||
let include = match stmt.node {
|
let include = match stmt.node {
|
||||||
StmtKind::ClassDef { ref decorator_list, ref mut body, ref mut bases, .. } => {
|
StmtKind::ClassDef { ref decorator_list, ref mut body, ref mut bases, .. } => {
|
||||||
@ -263,6 +244,7 @@ impl Nac3 {
|
|||||||
// Drop unregistered (i.e. host-only) base classes.
|
// Drop unregistered (i.e. host-only) base classes.
|
||||||
bases.retain(|base| {
|
bases.retain(|base| {
|
||||||
Python::with_gil(|py| -> PyResult<bool> {
|
Python::with_gil(|py| -> PyResult<bool> {
|
||||||
|
let id_fn = PyModule::import(py, "builtins")?.getattr("id")?;
|
||||||
match &base.node {
|
match &base.node {
|
||||||
ExprKind::Name { id, .. } => {
|
ExprKind::Name { id, .. } => {
|
||||||
if *id == "Exception".into() {
|
if *id == "Exception".into() {
|
||||||
@ -270,8 +252,7 @@ impl Nac3 {
|
|||||||
} else {
|
} else {
|
||||||
let base_obj =
|
let base_obj =
|
||||||
module.bind(py).getattr(id.to_string().as_str())?;
|
module.bind(py).getattr(id.to_string().as_str())?;
|
||||||
let base_id =
|
let base_id = id_fn.call1((base_obj,))?.extract()?;
|
||||||
id_fn.bind(py).call1((base_obj,))?.extract()?;
|
|
||||||
Ok(registered_class_ids.contains(&base_id))
|
Ok(registered_class_ids.contains(&base_id))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -304,28 +285,10 @@ impl Nac3 {
|
|||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
// Allow global variable declaration with `Kernel` type annotation
|
||||||
// Allow global variable declaration with `Kernel` or `KernelInvariant` type annotation
|
StmtKind::AnnAssign { ref annotation, .. } => {
|
||||||
StmtKind::AnnAssign { ref target, .. } => match &target.node {
|
matches!(&annotation.node, ExprKind::Subscript { value, .. } if matches!(&value.node, ExprKind::Name {id, ..} if id == &"Kernel".into()))
|
||||||
ExprKind::Name { id, .. } => Python::with_gil(|py| {
|
}
|
||||||
let py_type_hints =
|
|
||||||
get_type_hints_fn.bind(py).call1((module.bind(py),)).unwrap();
|
|
||||||
let py_type_hints = py_type_hints.downcast::<PyDict>().unwrap();
|
|
||||||
let var_type_hint =
|
|
||||||
py_type_hints.get_item(id.to_string().as_str()).unwrap().unwrap();
|
|
||||||
let var_type = var_type_hint.getattr_opt("__origin__").unwrap();
|
|
||||||
if let Some(var_type) = var_type {
|
|
||||||
let var_type_id = id_fn.bind(py).call1((var_type,)).unwrap();
|
|
||||||
let var_type_id = var_type_id.extract::<u64>().unwrap();
|
|
||||||
|
|
||||||
[self.primitive_ids.kernel, self.primitive_ids.kernel_invariant]
|
|
||||||
.contains(&var_type_id)
|
|
||||||
} else {
|
|
||||||
false
|
|
||||||
}
|
|
||||||
}),
|
|
||||||
_ => false,
|
|
||||||
},
|
|
||||||
_ => false,
|
_ => false,
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -605,10 +568,7 @@ impl Nac3 {
|
|||||||
py,
|
py,
|
||||||
(
|
(
|
||||||
def_id.0.into_py_any(py)?,
|
def_id.0.into_py_any(py)?,
|
||||||
module
|
module.getattr(py, name.to_string().as_str()).unwrap(),
|
||||||
.bind(py)
|
|
||||||
.getattr(name.to_string().as_str())
|
|
||||||
.unwrap(),
|
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
@ -633,7 +593,7 @@ impl Nac3 {
|
|||||||
}
|
}
|
||||||
StmtKind::ClassDef { name, body, .. } => {
|
StmtKind::ClassDef { name, body, .. } => {
|
||||||
let class_name = name.to_string();
|
let class_name = name.to_string();
|
||||||
let class_obj = module.bind(py).getattr(class_name.as_str()).unwrap();
|
let class_obj = Arc::new(module.getattr(py, class_name.as_str()).unwrap());
|
||||||
for stmt in body {
|
for stmt in body {
|
||||||
if let StmtKind::FunctionDef { name, decorator_list, .. } = &stmt.node {
|
if let StmtKind::FunctionDef { name, decorator_list, .. } = &stmt.node {
|
||||||
for decorator in decorator_list {
|
for decorator in decorator_list {
|
||||||
@ -805,7 +765,9 @@ impl Nac3 {
|
|||||||
py,
|
py,
|
||||||
(
|
(
|
||||||
id.0.into_py_any(py)?,
|
id.0.into_py_any(py)?,
|
||||||
class_def.getattr(name.to_string().as_str()).unwrap(),
|
class_def
|
||||||
|
.getattr(py, name.to_string().as_str())
|
||||||
|
.unwrap(),
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
@ -939,18 +901,6 @@ impl Nac3 {
|
|||||||
|
|
||||||
embedding_map.setattr("expects_return", has_return).unwrap();
|
embedding_map.setattr("expects_return", has_return).unwrap();
|
||||||
|
|
||||||
let emit_llvm_bc = std::env::var(ENV_NAC3_EMIT_LLVM_BC).is_ok();
|
|
||||||
let emit_llvm_ll = std::env::var(ENV_NAC3_EMIT_LLVM_LL).is_ok();
|
|
||||||
|
|
||||||
let emit_llvm = |module: &Module<'_>, filename: &str| {
|
|
||||||
if emit_llvm_bc {
|
|
||||||
module.write_bitcode_to_path(Path::new(format!("{filename}.bc").as_str()));
|
|
||||||
}
|
|
||||||
if emit_llvm_ll {
|
|
||||||
module.print_to_file(Path::new(format!("{filename}.ll").as_str())).unwrap();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// Link all modules into `main`.
|
// Link all modules into `main`.
|
||||||
let buffers = membuffers.lock();
|
let buffers = membuffers.lock();
|
||||||
let main = context
|
let main = context
|
||||||
@ -959,8 +909,6 @@ impl Nac3 {
|
|||||||
"main",
|
"main",
|
||||||
))
|
))
|
||||||
.unwrap();
|
.unwrap();
|
||||||
emit_llvm(&main, "main");
|
|
||||||
|
|
||||||
for buffer in buffers.iter().rev().skip(1) {
|
for buffer in buffers.iter().rev().skip(1) {
|
||||||
let other = context
|
let other = context
|
||||||
.create_module_from_ir(MemoryBuffer::create_from_memory_range(buffer, "main"))
|
.create_module_from_ir(MemoryBuffer::create_from_memory_range(buffer, "main"))
|
||||||
@ -968,10 +916,7 @@ impl Nac3 {
|
|||||||
|
|
||||||
main.link_in_module(other).map_err(|err| CompileError::new_err(err.to_string()))?;
|
main.link_in_module(other).map_err(|err| CompileError::new_err(err.to_string()))?;
|
||||||
}
|
}
|
||||||
emit_llvm(&main, "main.merged");
|
|
||||||
|
|
||||||
main.link_in_module(irrt).map_err(|err| CompileError::new_err(err.to_string()))?;
|
main.link_in_module(irrt).map_err(|err| CompileError::new_err(err.to_string()))?;
|
||||||
emit_llvm(&main, "main.fat");
|
|
||||||
|
|
||||||
let mut function_iter = main.get_first_function();
|
let mut function_iter = main.get_first_function();
|
||||||
while let Some(func) = function_iter {
|
while let Some(func) = function_iter {
|
||||||
@ -991,8 +936,6 @@ impl Nac3 {
|
|||||||
global_option = global.get_next_global();
|
global_option = global.get_next_global();
|
||||||
}
|
}
|
||||||
|
|
||||||
emit_llvm(&main, "main.pre-opt");
|
|
||||||
|
|
||||||
let target_machine = self
|
let target_machine = self
|
||||||
.llvm_options
|
.llvm_options
|
||||||
.target
|
.target
|
||||||
@ -1007,8 +950,6 @@ impl Nac3 {
|
|||||||
panic!("Failed to run optimization for module `main`: {}", err.to_string());
|
panic!("Failed to run optimization for module `main`: {}", err.to_string());
|
||||||
}
|
}
|
||||||
|
|
||||||
emit_llvm(&main, "main.post-opt");
|
|
||||||
|
|
||||||
Python::with_gil(|py| {
|
Python::with_gil(|py| {
|
||||||
let string_store = self.string_store.read();
|
let string_store = self.string_store.read();
|
||||||
let mut string_store_vec = string_store.iter().collect::<Vec<_>>();
|
let mut string_store_vec = string_store.iter().collect::<Vec<_>>();
|
||||||
@ -1192,59 +1133,42 @@ impl Nac3 {
|
|||||||
|
|
||||||
let builtins_mod = PyModule::import(py, "builtins").unwrap();
|
let builtins_mod = PyModule::import(py, "builtins").unwrap();
|
||||||
let id_fn = builtins_mod.getattr("id").unwrap();
|
let id_fn = builtins_mod.getattr("id").unwrap();
|
||||||
|
let numpy_mod = PyModule::import(py, "numpy").unwrap();
|
||||||
|
let typing_mod = PyModule::import(py, "typing").unwrap();
|
||||||
|
let types_mod = PyModule::import(py, "types").unwrap();
|
||||||
|
|
||||||
let get_id = |x: &Bound<PyAny>| id_fn.call1((x,)).and_then(|id| id.extract()).unwrap();
|
let get_id = |x: &Bound<PyAny>| id_fn.call1((x,)).and_then(|id| id.extract()).unwrap();
|
||||||
let get_artiq_builtin = |mod_name: Option<&str>, name: &str| -> Bound<PyAny> {
|
let get_attr_id = |obj: &Bound<PyModule>, attr| {
|
||||||
if let Some(mod_name) = mod_name {
|
id_fn.call1((obj.getattr(attr).unwrap(),)).unwrap().extract().unwrap()
|
||||||
artiq_builtins
|
|
||||||
.get_item(mod_name)
|
|
||||||
.unwrap()
|
|
||||||
.unwrap_or_else(|| {
|
|
||||||
panic!("no module key '{mod_name}' present in artiq_builtins")
|
|
||||||
})
|
|
||||||
.downcast::<PyDict>()
|
|
||||||
.unwrap()
|
|
||||||
.get_item(name)
|
|
||||||
.unwrap()
|
|
||||||
.unwrap_or_else(|| {
|
|
||||||
panic!("no key '{name}' present in artiq_builtins.{mod_name}")
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
artiq_builtins
|
|
||||||
.get_item(name)
|
|
||||||
.unwrap()
|
|
||||||
.unwrap_or_else(|| panic!("no key '{name}' present in artiq_builtins"))
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
let primitive_ids = PrimitivePythonId {
|
let primitive_ids = PrimitivePythonId {
|
||||||
virtual_id: get_id(&get_artiq_builtin(Some("artiq"), "virtual")),
|
virtual_id: get_id(&artiq_builtins.get_item("virtual").ok().flatten().unwrap()),
|
||||||
generic_alias: (
|
generic_alias: (
|
||||||
get_id(&get_artiq_builtin(Some("typing"), "_GenericAlias")),
|
get_attr_id(&typing_mod, "_GenericAlias"),
|
||||||
get_id(&get_artiq_builtin(Some("types"), "GenericAlias")),
|
get_attr_id(&types_mod, "GenericAlias"),
|
||||||
),
|
),
|
||||||
none: get_id(&get_artiq_builtin(Some("artiq"), "none")),
|
none: get_id(&artiq_builtins.get_item("none").ok().flatten().unwrap()),
|
||||||
typevar: get_id(&get_artiq_builtin(Some("typing"), "TypeVar")),
|
typevar: get_attr_id(&typing_mod, "TypeVar"),
|
||||||
const_generic_marker: get_id(&get_artiq_builtin(Some("artiq"), "_ConstGenericMarker")),
|
const_generic_marker: get_id(
|
||||||
int: get_id(&get_artiq_builtin(None, "int")),
|
&artiq_builtins.get_item("_ConstGenericMarker").ok().flatten().unwrap(),
|
||||||
int32: get_id(&get_artiq_builtin(Some("numpy"), "int32")),
|
),
|
||||||
int64: get_id(&get_artiq_builtin(Some("numpy"), "int64")),
|
int: get_attr_id(&builtins_mod, "int"),
|
||||||
uint32: get_id(&get_artiq_builtin(Some("numpy"), "uint32")),
|
int32: get_attr_id(&numpy_mod, "int32"),
|
||||||
uint64: get_id(&get_artiq_builtin(Some("numpy"), "uint64")),
|
int64: get_attr_id(&numpy_mod, "int64"),
|
||||||
bool: get_id(&get_artiq_builtin(None, "bool")),
|
uint32: get_attr_id(&numpy_mod, "uint32"),
|
||||||
np_bool_: get_id(&get_artiq_builtin(Some("numpy"), "bool_")),
|
uint64: get_attr_id(&numpy_mod, "uint64"),
|
||||||
string: get_id(&get_artiq_builtin(None, "str")),
|
bool: get_attr_id(&builtins_mod, "bool"),
|
||||||
np_str_: get_id(&get_artiq_builtin(Some("numpy"), "str_")),
|
np_bool_: get_attr_id(&numpy_mod, "bool_"),
|
||||||
float: get_id(&get_artiq_builtin(None, "float")),
|
string: get_attr_id(&builtins_mod, "str"),
|
||||||
float64: get_id(&get_artiq_builtin(Some("numpy"), "float64")),
|
np_str_: get_attr_id(&numpy_mod, "str_"),
|
||||||
list: get_id(&get_artiq_builtin(None, "list")),
|
float: get_attr_id(&builtins_mod, "float"),
|
||||||
ndarray: get_id(&get_artiq_builtin(Some("numpy"), "ndarray")),
|
float64: get_attr_id(&numpy_mod, "float64"),
|
||||||
tuple: get_id(&get_artiq_builtin(None, "tuple")),
|
list: get_attr_id(&builtins_mod, "list"),
|
||||||
exception: get_id(&get_artiq_builtin(None, "Exception")),
|
ndarray: get_attr_id(&numpy_mod, "ndarray"),
|
||||||
option: get_id(&get_artiq_builtin(Some("artiq"), "Option")),
|
tuple: get_attr_id(&builtins_mod, "tuple"),
|
||||||
module: get_id(&get_artiq_builtin(Some("types"), "ModuleType")),
|
exception: get_attr_id(&builtins_mod, "Exception"),
|
||||||
kernel: get_id(&get_artiq_builtin(Some("artiq"), "Kernel")),
|
option: get_id(&artiq_builtins.get_item("Option").ok().flatten().unwrap()),
|
||||||
kernel_invariant: get_id(&get_artiq_builtin(Some("artiq"), "KernelInvariant")),
|
module: get_attr_id(&types_mod, "ModuleType"),
|
||||||
};
|
};
|
||||||
|
|
||||||
let working_directory = tempfile::Builder::new().prefix("nac3-").tempdir().unwrap();
|
let working_directory = tempfile::Builder::new().prefix("nac3-").tempdir().unwrap();
|
||||||
@ -1390,8 +1314,9 @@ impl Nac3 {
|
|||||||
py: Python<'py>,
|
py: Python<'py>,
|
||||||
) -> PyResult<()> {
|
) -> PyResult<()> {
|
||||||
let target_machine = self.get_llvm_target_machine();
|
let target_machine = self.get_llvm_target_machine();
|
||||||
let link_fn = |module: &Module| {
|
|
||||||
if self.isa == Isa::Host {
|
if self.isa == Isa::Host {
|
||||||
|
let link_fn = |module: &Module| {
|
||||||
let working_directory = self.working_directory.path().to_owned();
|
let working_directory = self.working_directory.path().to_owned();
|
||||||
target_machine
|
target_machine
|
||||||
.write_to_file(module, FileType::Object, &working_directory.join("module.o"))
|
.write_to_file(module, FileType::Object, &working_directory.join("module.o"))
|
||||||
@ -1401,7 +1326,11 @@ impl Nac3 {
|
|||||||
working_directory.join("module.o").to_string_lossy().to_string(),
|
working_directory.join("module.o").to_string_lossy().to_string(),
|
||||||
)?;
|
)?;
|
||||||
Ok(())
|
Ok(())
|
||||||
} else {
|
};
|
||||||
|
|
||||||
|
self.compile_method(obj, method_name, args, embedding_map, py, &link_fn)
|
||||||
|
} else {
|
||||||
|
let link_fn = |module: &Module| {
|
||||||
let object_mem = target_machine
|
let object_mem = target_machine
|
||||||
.write_to_memory_buffer(module, FileType::Object)
|
.write_to_memory_buffer(module, FileType::Object)
|
||||||
.expect("couldn't write module to object file buffer");
|
.expect("couldn't write module to object file buffer");
|
||||||
@ -1415,10 +1344,10 @@ impl Nac3 {
|
|||||||
} else {
|
} else {
|
||||||
Err(CompileError::new_err("linker failed to process object file"))
|
Err(CompileError::new_err("linker failed to process object file"))
|
||||||
}
|
}
|
||||||
}
|
};
|
||||||
};
|
|
||||||
|
|
||||||
self.compile_method(obj, method_name, args, embedding_map, py, &link_fn)
|
self.compile_method(obj, method_name, args, embedding_map, py, &link_fn)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn compile_method_to_mem<'py>(
|
fn compile_method_to_mem<'py>(
|
||||||
@ -1430,8 +1359,9 @@ impl Nac3 {
|
|||||||
py: Python<'py>,
|
py: Python<'py>,
|
||||||
) -> PyResult<PyObject> {
|
) -> PyResult<PyObject> {
|
||||||
let target_machine = self.get_llvm_target_machine();
|
let target_machine = self.get_llvm_target_machine();
|
||||||
let link_fn = |module: &Module| {
|
|
||||||
if self.isa == Isa::Host {
|
if self.isa == Isa::Host {
|
||||||
|
let link_fn = |module: &Module| {
|
||||||
let working_directory = self.working_directory.path().to_owned();
|
let working_directory = self.working_directory.path().to_owned();
|
||||||
target_machine
|
target_machine
|
||||||
.write_to_file(module, FileType::Object, &working_directory.join("module.o"))
|
.write_to_file(module, FileType::Object, &working_directory.join("module.o"))
|
||||||
@ -1445,7 +1375,11 @@ impl Nac3 {
|
|||||||
)?;
|
)?;
|
||||||
|
|
||||||
Ok(PyBytes::new(py, &fs::read(filename).unwrap()).into())
|
Ok(PyBytes::new(py, &fs::read(filename).unwrap()).into())
|
||||||
} else {
|
};
|
||||||
|
|
||||||
|
self.compile_method(obj, method_name, args, embedding_map, py, &link_fn)
|
||||||
|
} else {
|
||||||
|
let link_fn = |module: &Module| {
|
||||||
let object_mem = target_machine
|
let object_mem = target_machine
|
||||||
.write_to_memory_buffer(module, FileType::Object)
|
.write_to_memory_buffer(module, FileType::Object)
|
||||||
.expect("couldn't write module to object file buffer");
|
.expect("couldn't write module to object file buffer");
|
||||||
@ -1454,10 +1388,10 @@ impl Nac3 {
|
|||||||
} else {
|
} else {
|
||||||
Err(CompileError::new_err("linker failed to process object file"))
|
Err(CompileError::new_err("linker failed to process object file"))
|
||||||
}
|
}
|
||||||
}
|
};
|
||||||
};
|
|
||||||
|
|
||||||
self.compile_method(obj, method_name, args, embedding_map, py, &link_fn)
|
self.compile_method(obj, method_name, args, embedding_map, py, &link_fn)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,6 +1,5 @@
|
|||||||
use std::{
|
use std::{
|
||||||
collections::{HashMap, HashSet},
|
collections::{HashMap, HashSet},
|
||||||
fmt::Debug,
|
|
||||||
sync::{
|
sync::{
|
||||||
Arc,
|
Arc,
|
||||||
atomic::{AtomicBool, Ordering::Relaxed},
|
atomic::{AtomicBool, Ordering::Relaxed},
|
||||||
@ -42,7 +41,6 @@ use nac3core::{
|
|||||||
|
|
||||||
use super::PrimitivePythonId;
|
use super::PrimitivePythonId;
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub enum PrimitiveValue {
|
pub enum PrimitiveValue {
|
||||||
I32(i32),
|
I32(i32),
|
||||||
I64(i64),
|
I64(i64),
|
||||||
@ -75,10 +73,10 @@ impl DeferredEvaluationStore {
|
|||||||
|
|
||||||
/// A class field as stored in the [`InnerResolver`], represented by the ID and name of the
|
/// A class field as stored in the [`InnerResolver`], represented by the ID and name of the
|
||||||
/// associated [`PythonValue`].
|
/// associated [`PythonValue`].
|
||||||
pub(crate) type ResolverField = (u64, StrRef);
|
type ResolverField = (u64, StrRef);
|
||||||
|
|
||||||
/// A value as stored in Python, represented by the `id()` and [`PyObject`] of the value.
|
/// A value as stored in Python, represented by the `id()` and [`PyObject`] of the value.
|
||||||
pub(crate) type PyValueHandle = (u64, Arc<PyObject>);
|
type PyValueHandle = (u64, Arc<PyObject>);
|
||||||
|
|
||||||
pub struct InnerResolver {
|
pub struct InnerResolver {
|
||||||
pub id_to_type: RwLock<HashMap<StrRef, Type>>,
|
pub id_to_type: RwLock<HashMap<StrRef, Type>>,
|
||||||
@ -99,13 +97,6 @@ pub struct InnerResolver {
|
|||||||
pub module: Arc<PyObject>,
|
pub module: Arc<PyObject>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Debug for InnerResolver {
|
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
||||||
write!(f, "{}", self.debug_str(None, &None))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct Resolver(pub Arc<InnerResolver>);
|
pub struct Resolver(pub Arc<InnerResolver>);
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
@ -206,65 +197,36 @@ impl StaticValue for PythonValue {
|
|||||||
.unwrap_or_else(|| {
|
.unwrap_or_else(|| {
|
||||||
Python::with_gil(|py| -> PyResult<Option<PyValueHandle>> {
|
Python::with_gil(|py| -> PyResult<Option<PyValueHandle>> {
|
||||||
let helper = &self.resolver.helper;
|
let helper = &self.resolver.helper;
|
||||||
let id = helper.id_fn.bind(py).call1((&*self.value,))?.extract::<u64>()?;
|
|
||||||
let ty = helper.type_fn.bind(py).call1((&*self.value,))?;
|
let ty = helper.type_fn.bind(py).call1((&*self.value,))?;
|
||||||
let ty_id: u64 = helper.id_fn.bind(py).call1((ty,))?.extract()?;
|
let ty_id: u64 = helper.id_fn.bind(py).call1((ty,))?.extract()?;
|
||||||
|
|
||||||
// for optimizing unwrap KernelInvariant
|
// for optimizing unwrap KernelInvariant
|
||||||
if ty_id == self.resolver.primitive_ids.option && name == "_nac3_option".into() {
|
if ty_id == self.resolver.primitive_ids.option && name == "_nac3_option".into() {
|
||||||
let obj = self.value.bind(py).getattr(name.to_string().as_str())?;
|
let obj = Arc::new(self.value.getattr(py, name.to_string().as_str())?);
|
||||||
let id = self.resolver.helper.id_fn.bind(py).call1((&obj,))?.extract()?;
|
let id = self.resolver.helper.id_fn.bind(py).call1((&*obj,))?.extract()?;
|
||||||
let obj = Arc::new(obj.into_py_any(py)?);
|
|
||||||
return if self.id == self.resolver.primitive_ids.none {
|
return if self.id == self.resolver.primitive_ids.none {
|
||||||
Ok(None)
|
Ok(None)
|
||||||
} else {
|
} else {
|
||||||
Ok(Some((id, obj)))
|
Ok(Some((id, obj)))
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
let def_id = { *self.resolver.pyid_to_def.read().get(&ty_id).unwrap() };
|
||||||
let result = if let Some(def_id) =
|
let mut mutable = true;
|
||||||
self.resolver.pyid_to_def.read().get(&ty_id).copied()
|
let defs = ctx.top_level.definitions.read();
|
||||||
{
|
if let TopLevelDef::Class { fields, .. } = &*defs[def_id.0].read() {
|
||||||
let mut mutable = true;
|
for (field_name, _, is_mutable) in fields {
|
||||||
let defs = ctx.top_level.definitions.read();
|
if field_name == &name {
|
||||||
if let TopLevelDef::Class { fields, .. } = &*defs[def_id.0].read() {
|
mutable = *is_mutable;
|
||||||
for (field_name, _, is_mutable) in fields {
|
break;
|
||||||
if field_name == &name {
|
|
||||||
mutable = *is_mutable;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
if mutable {
|
let result = if mutable {
|
||||||
None
|
|
||||||
} else {
|
|
||||||
let obj = self.value.bind(py).getattr(name.to_string().as_str())?;
|
|
||||||
let id = self.resolver.helper.id_fn.bind(py).call1((&obj,))?.extract()?;
|
|
||||||
let obj = Arc::new(obj.into_py_any(py)?);
|
|
||||||
Some((id, obj))
|
|
||||||
}
|
|
||||||
} else if let Some(def_id) = self.resolver.pyid_to_def.read().get(&id).copied() {
|
|
||||||
// Check if self.value is a module
|
|
||||||
let in_mod_ctx = ctx
|
|
||||||
.top_level
|
|
||||||
.definitions
|
|
||||||
.read()
|
|
||||||
.get(def_id.0)
|
|
||||||
.is_some_and(|def| matches!(&*def.read(), TopLevelDef::Module { .. }));
|
|
||||||
|
|
||||||
if in_mod_ctx {
|
|
||||||
let obj = self.value.bind(py).getattr(name.to_string().as_str())?;
|
|
||||||
let id = self.resolver.helper.id_fn.bind(py).call1((&obj,))?.extract()?;
|
|
||||||
let obj = Arc::new(obj.into_py_any(py)?);
|
|
||||||
Some((id, obj))
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
None
|
None
|
||||||
|
} else {
|
||||||
|
let obj = Arc::new(self.value.getattr(py, name.to_string().as_str())?);
|
||||||
|
let id = self.resolver.helper.id_fn.bind(py).call1((&*obj,))?.extract()?;
|
||||||
|
Some((id, obj))
|
||||||
};
|
};
|
||||||
|
|
||||||
self.resolver.field_to_val.write().insert((self.id, name), result.clone());
|
self.resolver.field_to_val.write().insert((self.id, name), result.clone());
|
||||||
Ok(result)
|
Ok(result)
|
||||||
})
|
})
|
||||||
@ -1728,60 +1690,42 @@ impl SymbolResolver for Resolver {
|
|||||||
) -> Option<ValueEnum<'ctx>> {
|
) -> Option<ValueEnum<'ctx>> {
|
||||||
if let Some(def_id) = self.0.id_to_def.read().get(&id) {
|
if let Some(def_id) = self.0.id_to_def.read().get(&id) {
|
||||||
let top_levels = ctx.top_level.definitions.read();
|
let top_levels = ctx.top_level.definitions.read();
|
||||||
if let TopLevelDef::Variable { resolver, .. } = &*top_levels[def_id.0].read() {
|
if matches!(&*top_levels[def_id.0].read(), TopLevelDef::Variable { .. }) {
|
||||||
let module_val = &self.0.module;
|
let module_val = &self.0.module;
|
||||||
let Ok((obj, idx)) = Python::with_gil(
|
let ret = Python::with_gil(|py| -> PyResult<Result<BasicValueEnum, String>> {
|
||||||
|py| -> PyResult<Result<(BasicValueEnum<'ctx>, Option<usize>), String>> {
|
let module_val = (**module_val).bind(py);
|
||||||
let module_val = (**module_val).bind(py);
|
|
||||||
|
|
||||||
let ty = self.0.get_obj_type(
|
let ty = self.0.get_obj_type(
|
||||||
py,
|
py,
|
||||||
module_val,
|
module_val,
|
||||||
&mut ctx.unifier,
|
&mut ctx.unifier,
|
||||||
&top_levels,
|
&top_levels,
|
||||||
&ctx.primitives,
|
&ctx.primitives,
|
||||||
)?;
|
)?;
|
||||||
if let Err(ty) = ty {
|
if let Err(ty) = ty {
|
||||||
return Ok(Err(ty));
|
return Ok(Err(ty));
|
||||||
}
|
}
|
||||||
let ty = ty.unwrap();
|
let ty = ty.unwrap();
|
||||||
let obj =
|
let obj = self.0.get_obj_value(py, module_val, ctx, generator, ty)?.unwrap();
|
||||||
self.0.get_obj_value(py, module_val, ctx, generator, ty)?.unwrap();
|
let (idx, _) = ctx.get_attr_index(ty, id);
|
||||||
let (idx, _) = ctx.get_attr_index(ty, id);
|
let ret = unsafe {
|
||||||
|
ctx.builder.build_gep(
|
||||||
Ok(Ok((obj, idx)))
|
obj.into_pointer_value(),
|
||||||
},
|
&[
|
||||||
)
|
ctx.ctx.i32_type().const_zero(),
|
||||||
.unwrap() else {
|
ctx.ctx.i32_type().const_int(idx as u64, false),
|
||||||
return None;
|
],
|
||||||
};
|
id.to_string().as_str(),
|
||||||
|
)
|
||||||
let Some(idx) = idx else {
|
}
|
||||||
// `idx` not found in the current resolver - try the resolver of the variable
|
.unwrap();
|
||||||
return resolver.as_ref().and_then(|resolver| {
|
Ok(Ok(ret.as_basic_value_enum()))
|
||||||
let resolver = &**resolver;
|
})
|
||||||
|
|
||||||
// TODO: Can we assume that if get_identifier_def returns a result,
|
|
||||||
// get_symbol_value will also return a value?
|
|
||||||
resolver
|
|
||||||
.get_identifier_def(id)
|
|
||||||
.ok()
|
|
||||||
.and_then(|_| resolver.get_symbol_value(id, ctx, generator))
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
let ret = unsafe {
|
|
||||||
ctx.builder.build_gep(
|
|
||||||
obj.into_pointer_value(),
|
|
||||||
&[
|
|
||||||
ctx.ctx.i32_type().const_zero(),
|
|
||||||
ctx.ctx.i32_type().const_int(idx as u64, false),
|
|
||||||
],
|
|
||||||
id.to_string().as_str(),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
.unwrap();
|
.unwrap();
|
||||||
return Some(ret.as_basic_value_enum().into());
|
if ret.is_err() {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
return Some(ret.unwrap().into());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -124,7 +124,7 @@ impl<'ctx> CodeGenContext<'ctx, '_> {
|
|||||||
|
|
||||||
/// Checks the field and attributes of classes
|
/// Checks the field and attributes of classes
|
||||||
/// Returns the index of attr in class fields otherwise returns the attribute value
|
/// Returns the index of attr in class fields otherwise returns the attribute value
|
||||||
pub fn get_attr_index(&mut self, ty: Type, attr: StrRef) -> (Option<usize>, Option<Constant>) {
|
pub fn get_attr_index(&mut self, ty: Type, attr: StrRef) -> (usize, Option<Constant>) {
|
||||||
let obj_id = match &*self.unifier.get_ty(ty) {
|
let obj_id = match &*self.unifier.get_ty(ty) {
|
||||||
TypeEnum::TObj { obj_id, .. } => *obj_id,
|
TypeEnum::TObj { obj_id, .. } => *obj_id,
|
||||||
TypeEnum::TModule { module_id, .. } => *module_id,
|
TypeEnum::TModule { module_id, .. } => *module_id,
|
||||||
@ -134,16 +134,13 @@ impl<'ctx> CodeGenContext<'ctx, '_> {
|
|||||||
let def = &self.top_level.definitions.read()[obj_id.0];
|
let def = &self.top_level.definitions.read()[obj_id.0];
|
||||||
let (index, value) = if let TopLevelDef::Class { fields, attributes, .. } = &*def.read() {
|
let (index, value) = if let TopLevelDef::Class { fields, attributes, .. } = &*def.read() {
|
||||||
if let Some(field_index) = fields.iter().find_position(|x| x.0 == attr) {
|
if let Some(field_index) = fields.iter().find_position(|x| x.0 == attr) {
|
||||||
(Some(field_index.0), None)
|
(field_index.0, None)
|
||||||
} else {
|
} else {
|
||||||
let attribute_index = attributes.iter().find_position(|x| x.0 == attr);
|
let attribute_index = attributes.iter().find_position(|x| x.0 == attr).unwrap();
|
||||||
(
|
(attribute_index.0, Some(attribute_index.1.2.clone()))
|
||||||
attribute_index.map(|(idx, _)| idx),
|
|
||||||
attribute_index.map(|(_, (_, _, k))| k.clone()),
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
} else if let TopLevelDef::Module { attributes, .. } = &*def.read() {
|
} else if let TopLevelDef::Module { attributes, .. } = &*def.read() {
|
||||||
(attributes.iter().find_position(|x| x.0 == attr).map(|(idx, _)| idx), None)
|
(attributes.iter().find_position(|x| x.0 == attr).unwrap().0, None)
|
||||||
} else {
|
} else {
|
||||||
codegen_unreachable!(self)
|
codegen_unreachable!(self)
|
||||||
};
|
};
|
||||||
@ -2464,7 +2461,7 @@ pub fn gen_expr<'ctx, G: CodeGenerator>(
|
|||||||
let (index, _) = ctx.get_attr_index(value.custom.unwrap(), *attr);
|
let (index, _) = ctx.get_attr_index(value.custom.unwrap(), *attr);
|
||||||
Ok(ValueEnum::Dynamic(ctx.build_gep_and_load(
|
Ok(ValueEnum::Dynamic(ctx.build_gep_and_load(
|
||||||
v.into_pointer_value(),
|
v.into_pointer_value(),
|
||||||
&[zero, int32.const_int(index.unwrap() as u64, false)],
|
&[zero, int32.const_int(index as u64, false)],
|
||||||
None,
|
None,
|
||||||
))) as Result<_, String>
|
))) as Result<_, String>
|
||||||
},
|
},
|
||||||
@ -2481,7 +2478,7 @@ pub fn gen_expr<'ctx, G: CodeGenerator>(
|
|||||||
}
|
}
|
||||||
ValueEnum::Dynamic(ctx.build_gep_and_load(
|
ValueEnum::Dynamic(ctx.build_gep_and_load(
|
||||||
v.into_pointer_value(),
|
v.into_pointer_value(),
|
||||||
&[zero, int32.const_int(index.unwrap() as u64, false)],
|
&[zero, int32.const_int(index as u64, false)],
|
||||||
None,
|
None,
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
@ -133,7 +133,7 @@ pub fn gen_store_target<'ctx, G: CodeGenerator>(
|
|||||||
ptr,
|
ptr,
|
||||||
&[
|
&[
|
||||||
ctx.ctx.i32_type().const_zero(),
|
ctx.ctx.i32_type().const_zero(),
|
||||||
ctx.ctx.i32_type().const_int(index.unwrap() as u64, false),
|
ctx.ctx.i32_type().const_int(index as u64, false),
|
||||||
],
|
],
|
||||||
name.unwrap_or(""),
|
name.unwrap_or(""),
|
||||||
)
|
)
|
||||||
|
@ -463,9 +463,9 @@ impl TopLevelComposer {
|
|||||||
|
|
||||||
/// Registers a top-level variable with the given `name` into the composer.
|
/// Registers a top-level variable with the given `name` into the composer.
|
||||||
///
|
///
|
||||||
/// - `annotation` - The type annotation of the top-level variable, or [`None`] if no type
|
/// `annotation` - The type annotation of the top-level variable, or [`None`] if no type
|
||||||
/// annotation is provided.
|
/// annotation is provided.
|
||||||
/// - `location` - The location of the top-level variable.
|
/// `location` - The location of the top-level variable.
|
||||||
pub fn register_top_level_var(
|
pub fn register_top_level_var(
|
||||||
&mut self,
|
&mut self,
|
||||||
name: Ident,
|
name: Ident,
|
||||||
@ -1999,15 +1999,13 @@ impl TopLevelComposer {
|
|||||||
ExprKind::Subscript { value, slice, .. }
|
ExprKind::Subscript { value, slice, .. }
|
||||||
if matches!(
|
if matches!(
|
||||||
&value.node,
|
&value.node,
|
||||||
ast::ExprKind::Name { id, .. } if self.core_config.kernel_ann.is_some_and(|c| id == &c.into()) || id == &self.core_config.kernel_invariant_ann.into()
|
ast::ExprKind::Name { id, .. } if self.core_config.kernel_ann.is_some_and(|c| id == &c.into())
|
||||||
) =>
|
) =>
|
||||||
{
|
{
|
||||||
slice
|
slice
|
||||||
}
|
}
|
||||||
_ if self.core_config.kernel_ann.is_none() => ty_decl,
|
_ if self.core_config.kernel_ann.is_none() => ty_decl,
|
||||||
_ => unreachable!(
|
_ => unreachable!("Global variables should be annotated with Kernel[]"), // ignore fields annotated otherwise
|
||||||
"Global variables should be annotated with Kernel[] or KernelInvariant[]"
|
|
||||||
), // ignore fields annotated otherwise
|
|
||||||
};
|
};
|
||||||
|
|
||||||
let ty_annotation = parse_ast_to_type_annotation_kinds(
|
let ty_annotation = parse_ast_to_type_annotation_kinds(
|
||||||
|
@ -60,182 +60,6 @@ impl TypeAnnotation {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Converts a [`DefinitionId`] representing a [`TopLevelDef::Class`] and its type arguments into a
|
|
||||||
/// [`TypeAnnotation`].
|
|
||||||
#[allow(clippy::too_many_arguments)]
|
|
||||||
fn class_def_id_to_type_annotation<T, S: std::hash::BuildHasher + Clone>(
|
|
||||||
resolver: &(dyn SymbolResolver + Send + Sync),
|
|
||||||
top_level_defs: &[Arc<RwLock<TopLevelDef>>],
|
|
||||||
unifier: &mut Unifier,
|
|
||||||
primitives: &PrimitiveStore,
|
|
||||||
mut locked: HashMap<DefinitionId, Vec<Type>, S>,
|
|
||||||
id: StrRef,
|
|
||||||
(obj_id, type_args): (DefinitionId, Option<&Expr<T>>),
|
|
||||||
location: &Location,
|
|
||||||
) -> Result<TypeAnnotation, HashSet<String>> {
|
|
||||||
let Some(top_level_def) = top_level_defs.get(obj_id.0) else {
|
|
||||||
return Err(HashSet::from([format!(
|
|
||||||
"NameError: name '{id}' is not defined (at {location})",
|
|
||||||
)]));
|
|
||||||
};
|
|
||||||
|
|
||||||
// We need to use `try_read` here, since the composer may be processing our class right now,
|
|
||||||
// which requires exclusive access to modify the class internals.
|
|
||||||
//
|
|
||||||
// `locked` is guaranteed to hold a k-v pair of the composer-processing class, so fallback
|
|
||||||
// to it if the `top_level_def` is already locked for mutation.
|
|
||||||
let type_vars = if let Some(def_read) = top_level_def.try_read() {
|
|
||||||
if let TopLevelDef::Class { type_vars, .. } = &*def_read {
|
|
||||||
type_vars.clone()
|
|
||||||
} else {
|
|
||||||
return Err(HashSet::from([format!(
|
|
||||||
"function cannot be used as a type (at {location})",
|
|
||||||
)]));
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
locked.get(&obj_id).unwrap().clone()
|
|
||||||
};
|
|
||||||
|
|
||||||
let param_type_infos = if let Some(slice) = type_args {
|
|
||||||
// we do not check whether the application of type variables are compatible here
|
|
||||||
let params_ast = if let ast::ExprKind::Tuple { elts, .. } = &slice.node {
|
|
||||||
elts.iter().collect_vec()
|
|
||||||
} else {
|
|
||||||
vec![slice]
|
|
||||||
};
|
|
||||||
|
|
||||||
if type_vars.len() != params_ast.len() {
|
|
||||||
return Err(HashSet::from([format!(
|
|
||||||
"expect {} type parameters but got {} (at {})",
|
|
||||||
type_vars.len(),
|
|
||||||
params_ast.len(),
|
|
||||||
params_ast[0].location,
|
|
||||||
)]));
|
|
||||||
}
|
|
||||||
|
|
||||||
let result = params_ast
|
|
||||||
.iter()
|
|
||||||
.map(|x| {
|
|
||||||
parse_ast_to_type_annotation_kinds(
|
|
||||||
resolver,
|
|
||||||
top_level_defs,
|
|
||||||
unifier,
|
|
||||||
primitives,
|
|
||||||
x,
|
|
||||||
{
|
|
||||||
locked.insert(obj_id, type_vars.clone());
|
|
||||||
locked.clone()
|
|
||||||
},
|
|
||||||
)
|
|
||||||
})
|
|
||||||
.collect::<Result<Vec<_>, _>>()?;
|
|
||||||
|
|
||||||
// make sure the result do not contain any type vars
|
|
||||||
let no_type_var =
|
|
||||||
result.iter().all(|x| get_type_var_contained_in_type_annotation(x).is_empty());
|
|
||||||
if no_type_var {
|
|
||||||
result
|
|
||||||
} else {
|
|
||||||
return Err(HashSet::from([format!(
|
|
||||||
"application of type vars to generic class is not currently supported (at {})",
|
|
||||||
params_ast[0].location
|
|
||||||
)]));
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// check param number here
|
|
||||||
if !type_vars.is_empty() {
|
|
||||||
return Err(HashSet::from([format!(
|
|
||||||
"expect {} type variable parameter but got 0 (at {location})",
|
|
||||||
type_vars.len(),
|
|
||||||
)]));
|
|
||||||
}
|
|
||||||
|
|
||||||
Vec::new()
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(TypeAnnotation::CustomClass { id: obj_id, params: param_type_infos })
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Parses the `id` of a [`ast::ExprKind::Name`] expression as a [`TypeAnnotation`].
|
|
||||||
fn parse_name_as_type_annotation<T, S: std::hash::BuildHasher + Clone>(
|
|
||||||
resolver: &(dyn SymbolResolver + Send + Sync),
|
|
||||||
top_level_defs: &[Arc<RwLock<TopLevelDef>>],
|
|
||||||
unifier: &mut Unifier,
|
|
||||||
primitives: &PrimitiveStore,
|
|
||||||
locked: HashMap<DefinitionId, Vec<Type>, S>,
|
|
||||||
id: StrRef,
|
|
||||||
location: &Location,
|
|
||||||
) -> Result<TypeAnnotation, HashSet<String>> {
|
|
||||||
if id == "int32".into() {
|
|
||||||
Ok(TypeAnnotation::Primitive(primitives.int32))
|
|
||||||
} else if id == "int64".into() {
|
|
||||||
Ok(TypeAnnotation::Primitive(primitives.int64))
|
|
||||||
} else if id == "uint32".into() {
|
|
||||||
Ok(TypeAnnotation::Primitive(primitives.uint32))
|
|
||||||
} else if id == "uint64".into() {
|
|
||||||
Ok(TypeAnnotation::Primitive(primitives.uint64))
|
|
||||||
} else if id == "float".into() {
|
|
||||||
Ok(TypeAnnotation::Primitive(primitives.float))
|
|
||||||
} else if id == "bool".into() {
|
|
||||||
Ok(TypeAnnotation::Primitive(primitives.bool))
|
|
||||||
} else if id == "str".into() {
|
|
||||||
Ok(TypeAnnotation::Primitive(primitives.str))
|
|
||||||
} else if id == "Exception".into() {
|
|
||||||
Ok(TypeAnnotation::CustomClass { id: PrimDef::Exception.id(), params: Vec::default() })
|
|
||||||
} else if let Ok(obj_id) = resolver.get_identifier_def(id) {
|
|
||||||
class_def_id_to_type_annotation(
|
|
||||||
resolver,
|
|
||||||
top_level_defs,
|
|
||||||
unifier,
|
|
||||||
primitives,
|
|
||||||
locked,
|
|
||||||
id,
|
|
||||||
(obj_id, None as Option<&Expr<T>>),
|
|
||||||
location,
|
|
||||||
)
|
|
||||||
} else if let Ok(ty) = resolver.get_symbol_type(unifier, top_level_defs, primitives, id) {
|
|
||||||
if let TypeEnum::TVar { .. } = unifier.get_ty(ty).as_ref() {
|
|
||||||
let var = unifier.get_fresh_var(Some(id), Some(*location)).ty;
|
|
||||||
unifier.unify(var, ty).unwrap();
|
|
||||||
Ok(TypeAnnotation::TypeVar(ty))
|
|
||||||
} else {
|
|
||||||
Err(HashSet::from([format!("`{id}` is not a valid type annotation (at {location})",)]))
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
Err(HashSet::from([format!("`{id}` is not a valid type annotation (at {location})",)]))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Parses the `id` and generic arguments of a class as a [`TypeAnnotation`].
|
|
||||||
#[allow(clippy::too_many_arguments)]
|
|
||||||
fn parse_class_id_as_type_annotation<T, S: std::hash::BuildHasher + Clone>(
|
|
||||||
resolver: &(dyn SymbolResolver + Send + Sync),
|
|
||||||
top_level_defs: &[Arc<RwLock<TopLevelDef>>],
|
|
||||||
unifier: &mut Unifier,
|
|
||||||
primitives: &PrimitiveStore,
|
|
||||||
locked: HashMap<DefinitionId, Vec<Type>, S>,
|
|
||||||
id: StrRef,
|
|
||||||
slice: &Expr<T>,
|
|
||||||
location: &Location,
|
|
||||||
) -> Result<TypeAnnotation, HashSet<String>> {
|
|
||||||
if ["virtual".into(), "Generic".into(), "tuple".into(), "Option".into()].contains(&id) {
|
|
||||||
return Err(HashSet::from([format!("keywords cannot be class name (at {location})")]));
|
|
||||||
}
|
|
||||||
|
|
||||||
let obj_id = resolver.get_identifier_def(id)?;
|
|
||||||
|
|
||||||
class_def_id_to_type_annotation(
|
|
||||||
resolver,
|
|
||||||
top_level_defs,
|
|
||||||
unifier,
|
|
||||||
primitives,
|
|
||||||
locked,
|
|
||||||
id,
|
|
||||||
(obj_id, Some(slice)),
|
|
||||||
location,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Parses an AST expression `expr` into a [`TypeAnnotation`].
|
/// Parses an AST expression `expr` into a [`TypeAnnotation`].
|
||||||
///
|
///
|
||||||
/// * `locked` - A [`HashMap`] containing the IDs of known definitions, mapped to a [`Vec`] of all
|
/// * `locked` - A [`HashMap`] containing the IDs of known definitions, mapped to a [`Vec`] of all
|
||||||
@ -251,17 +75,152 @@ pub fn parse_ast_to_type_annotation_kinds<T, S: std::hash::BuildHasher + Clone>(
|
|||||||
// the key stores the type_var of this topleveldef::class, we only need this field here
|
// the key stores the type_var of this topleveldef::class, we only need this field here
|
||||||
locked: HashMap<DefinitionId, Vec<Type>, S>,
|
locked: HashMap<DefinitionId, Vec<Type>, S>,
|
||||||
) -> Result<TypeAnnotation, HashSet<String>> {
|
) -> Result<TypeAnnotation, HashSet<String>> {
|
||||||
match &expr.node {
|
let name_handle = |id: &StrRef,
|
||||||
ast::ExprKind::Name { id, .. } => parse_name_as_type_annotation::<T, S>(
|
unifier: &mut Unifier,
|
||||||
resolver,
|
locked: HashMap<DefinitionId, Vec<Type>, S>| {
|
||||||
top_level_defs,
|
if id == &"int32".into() {
|
||||||
unifier,
|
Ok(TypeAnnotation::Primitive(primitives.int32))
|
||||||
primitives,
|
} else if id == &"int64".into() {
|
||||||
locked,
|
Ok(TypeAnnotation::Primitive(primitives.int64))
|
||||||
*id,
|
} else if id == &"uint32".into() {
|
||||||
&expr.location,
|
Ok(TypeAnnotation::Primitive(primitives.uint32))
|
||||||
),
|
} else if id == &"uint64".into() {
|
||||||
|
Ok(TypeAnnotation::Primitive(primitives.uint64))
|
||||||
|
} else if id == &"float".into() {
|
||||||
|
Ok(TypeAnnotation::Primitive(primitives.float))
|
||||||
|
} else if id == &"bool".into() {
|
||||||
|
Ok(TypeAnnotation::Primitive(primitives.bool))
|
||||||
|
} else if id == &"str".into() {
|
||||||
|
Ok(TypeAnnotation::Primitive(primitives.str))
|
||||||
|
} else if id == &"Exception".into() {
|
||||||
|
Ok(TypeAnnotation::CustomClass { id: PrimDef::Exception.id(), params: Vec::default() })
|
||||||
|
} else if let Ok(obj_id) = resolver.get_identifier_def(*id) {
|
||||||
|
let type_vars = {
|
||||||
|
let Some(top_level_def) = top_level_defs.get(obj_id.0) else {
|
||||||
|
return Err(HashSet::from([format!(
|
||||||
|
"NameError: name '{id}' is not defined (at {})",
|
||||||
|
expr.location
|
||||||
|
)]));
|
||||||
|
};
|
||||||
|
let def_read = top_level_def.try_read();
|
||||||
|
if let Some(def_read) = def_read {
|
||||||
|
if let TopLevelDef::Class { type_vars, .. } = &*def_read {
|
||||||
|
type_vars.clone()
|
||||||
|
} else {
|
||||||
|
return Err(HashSet::from([format!(
|
||||||
|
"function cannot be used as a type (at {})",
|
||||||
|
expr.location
|
||||||
|
)]));
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
locked.get(&obj_id).unwrap().clone()
|
||||||
|
}
|
||||||
|
};
|
||||||
|
// check param number here
|
||||||
|
if !type_vars.is_empty() {
|
||||||
|
return Err(HashSet::from([format!(
|
||||||
|
"expect {} type variable parameter but got 0 (at {})",
|
||||||
|
type_vars.len(),
|
||||||
|
expr.location,
|
||||||
|
)]));
|
||||||
|
}
|
||||||
|
Ok(TypeAnnotation::CustomClass { id: obj_id, params: vec![] })
|
||||||
|
} else if let Ok(ty) = resolver.get_symbol_type(unifier, top_level_defs, primitives, *id) {
|
||||||
|
if let TypeEnum::TVar { .. } = unifier.get_ty(ty).as_ref() {
|
||||||
|
let var = unifier.get_fresh_var(Some(*id), Some(expr.location)).ty;
|
||||||
|
unifier.unify(var, ty).unwrap();
|
||||||
|
Ok(TypeAnnotation::TypeVar(ty))
|
||||||
|
} else {
|
||||||
|
Err(HashSet::from([format!(
|
||||||
|
"`{}` is not a valid type annotation (at {})",
|
||||||
|
id, expr.location
|
||||||
|
)]))
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
Err(HashSet::from([format!(
|
||||||
|
"`{}` is not a valid type annotation (at {})",
|
||||||
|
id, expr.location
|
||||||
|
)]))
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let class_name_handle =
|
||||||
|
|id: &StrRef,
|
||||||
|
slice: &ast::Expr<T>,
|
||||||
|
unifier: &mut Unifier,
|
||||||
|
mut locked: HashMap<DefinitionId, Vec<Type>, S>| {
|
||||||
|
if ["virtual".into(), "Generic".into(), "tuple".into(), "Option".into()].contains(id) {
|
||||||
|
return Err(HashSet::from([format!(
|
||||||
|
"keywords cannot be class name (at {})",
|
||||||
|
expr.location
|
||||||
|
)]));
|
||||||
|
}
|
||||||
|
let obj_id = resolver.get_identifier_def(*id)?;
|
||||||
|
let type_vars = {
|
||||||
|
let Some(top_level_def) = top_level_defs.get(obj_id.0) else {
|
||||||
|
return Err(HashSet::from([format!(
|
||||||
|
"NameError: name '{id}' is not defined (at {})",
|
||||||
|
expr.location
|
||||||
|
)]));
|
||||||
|
};
|
||||||
|
let def_read = top_level_def.try_read();
|
||||||
|
if let Some(def_read) = def_read {
|
||||||
|
let TopLevelDef::Class { type_vars, .. } = &*def_read else {
|
||||||
|
unreachable!("must be class here")
|
||||||
|
};
|
||||||
|
type_vars.clone()
|
||||||
|
} else {
|
||||||
|
locked.get(&obj_id).unwrap().clone()
|
||||||
|
}
|
||||||
|
};
|
||||||
|
// we do not check whether the application of type variables are compatible here
|
||||||
|
let param_type_infos = {
|
||||||
|
let params_ast = if let ast::ExprKind::Tuple { elts, .. } = &slice.node {
|
||||||
|
elts.iter().collect_vec()
|
||||||
|
} else {
|
||||||
|
vec![slice]
|
||||||
|
};
|
||||||
|
if type_vars.len() != params_ast.len() {
|
||||||
|
return Err(HashSet::from([format!(
|
||||||
|
"expect {} type parameters but got {} (at {})",
|
||||||
|
type_vars.len(),
|
||||||
|
params_ast.len(),
|
||||||
|
params_ast[0].location,
|
||||||
|
)]));
|
||||||
|
}
|
||||||
|
let result = params_ast
|
||||||
|
.iter()
|
||||||
|
.map(|x| {
|
||||||
|
parse_ast_to_type_annotation_kinds(
|
||||||
|
resolver,
|
||||||
|
top_level_defs,
|
||||||
|
unifier,
|
||||||
|
primitives,
|
||||||
|
x,
|
||||||
|
{
|
||||||
|
locked.insert(obj_id, type_vars.clone());
|
||||||
|
locked.clone()
|
||||||
|
},
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.collect::<Result<Vec<_>, _>>()?;
|
||||||
|
// make sure the result do not contain any type vars
|
||||||
|
let no_type_var =
|
||||||
|
result.iter().all(|x| get_type_var_contained_in_type_annotation(x).is_empty());
|
||||||
|
if no_type_var {
|
||||||
|
result
|
||||||
|
} else {
|
||||||
|
return Err(HashSet::from([format!(
|
||||||
|
"application of type vars to generic class is not currently supported (at {})",
|
||||||
|
params_ast[0].location
|
||||||
|
)]));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
Ok(TypeAnnotation::CustomClass { id: obj_id, params: param_type_infos })
|
||||||
|
};
|
||||||
|
|
||||||
|
match &expr.node {
|
||||||
|
ast::ExprKind::Name { id, .. } => name_handle(id, unifier, locked),
|
||||||
// virtual
|
// virtual
|
||||||
ast::ExprKind::Subscript { value, slice, .. }
|
ast::ExprKind::Subscript { value, slice, .. }
|
||||||
if {
|
if {
|
||||||
@ -382,105 +341,9 @@ pub fn parse_ast_to_type_annotation_kinds<T, S: std::hash::BuildHasher + Clone>(
|
|||||||
|
|
||||||
// custom class
|
// custom class
|
||||||
ast::ExprKind::Subscript { value, slice, .. } => {
|
ast::ExprKind::Subscript { value, slice, .. } => {
|
||||||
match &value.node {
|
|
||||||
ast::ExprKind::Name { id, .. } => parse_class_id_as_type_annotation(
|
|
||||||
resolver,
|
|
||||||
top_level_defs,
|
|
||||||
unifier,
|
|
||||||
primitives,
|
|
||||||
locked,
|
|
||||||
*id,
|
|
||||||
slice,
|
|
||||||
&expr.location,
|
|
||||||
),
|
|
||||||
|
|
||||||
ast::ExprKind::Attribute { value, attr, .. } => {
|
|
||||||
if let ast::ExprKind::Name { id, .. } = &value.node {
|
|
||||||
let mod_id = resolver.get_identifier_def(*id)?;
|
|
||||||
let Some(mod_tld) = top_level_defs.get(mod_id.0) else {
|
|
||||||
return Err(HashSet::from([format!(
|
|
||||||
"NameError: name '{id}' is not defined (at {})",
|
|
||||||
expr.location
|
|
||||||
)]));
|
|
||||||
};
|
|
||||||
|
|
||||||
let matching_attr =
|
|
||||||
if let TopLevelDef::Module { methods, .. } = &*mod_tld.read() {
|
|
||||||
methods.get(attr).copied()
|
|
||||||
} else {
|
|
||||||
unreachable!("must be module here")
|
|
||||||
};
|
|
||||||
|
|
||||||
let Some(def_id) = matching_attr else {
|
|
||||||
return Err(HashSet::from([format!(
|
|
||||||
"AttributeError: module '{id}' has no attribute '{attr}' (at {})",
|
|
||||||
expr.location
|
|
||||||
)]));
|
|
||||||
};
|
|
||||||
|
|
||||||
class_def_id_to_type_annotation::<T, S>(
|
|
||||||
resolver,
|
|
||||||
top_level_defs,
|
|
||||||
unifier,
|
|
||||||
primitives,
|
|
||||||
locked,
|
|
||||||
*attr,
|
|
||||||
(def_id, Some(slice)),
|
|
||||||
&expr.location,
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
// TODO: Handle multiple indirection
|
|
||||||
Err(HashSet::from([format!(
|
|
||||||
"unsupported expression type for class name (at {})",
|
|
||||||
value.location
|
|
||||||
)]))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
_ => Err(HashSet::from([format!(
|
|
||||||
"unsupported expression type for class name (at {})",
|
|
||||||
value.location
|
|
||||||
)])),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
ast::ExprKind::Constant { value, .. } => Ok(TypeAnnotation::Literal(vec![value.clone()])),
|
|
||||||
|
|
||||||
ast::ExprKind::Attribute { value, attr, .. } => {
|
|
||||||
if let ast::ExprKind::Name { id, .. } = &value.node {
|
if let ast::ExprKind::Name { id, .. } = &value.node {
|
||||||
let mod_id = resolver.get_identifier_def(*id)?;
|
class_name_handle(id, slice, unifier, locked)
|
||||||
let Some(mod_tld) = top_level_defs.get(mod_id.0) else {
|
|
||||||
return Err(HashSet::from([format!(
|
|
||||||
"NameError: name '{id}' is not defined (at {})",
|
|
||||||
expr.location
|
|
||||||
)]));
|
|
||||||
};
|
|
||||||
|
|
||||||
let matching_attr = if let TopLevelDef::Module { methods, .. } = &*mod_tld.read() {
|
|
||||||
methods.get(attr).copied()
|
|
||||||
} else {
|
|
||||||
unreachable!("must be module here")
|
|
||||||
};
|
|
||||||
|
|
||||||
let Some(def_id) = matching_attr else {
|
|
||||||
return Err(HashSet::from([format!(
|
|
||||||
"AttributeError: module '{id}' has no attribute '{attr}' (at {})",
|
|
||||||
expr.location
|
|
||||||
)]));
|
|
||||||
};
|
|
||||||
|
|
||||||
class_def_id_to_type_annotation::<T, S>(
|
|
||||||
resolver,
|
|
||||||
top_level_defs,
|
|
||||||
unifier,
|
|
||||||
primitives,
|
|
||||||
locked,
|
|
||||||
*attr,
|
|
||||||
(def_id, None),
|
|
||||||
&expr.location,
|
|
||||||
)
|
|
||||||
} else {
|
} else {
|
||||||
// TODO: Handle multiple indirection
|
|
||||||
Err(HashSet::from([format!(
|
Err(HashSet::from([format!(
|
||||||
"unsupported expression type for class name (at {})",
|
"unsupported expression type for class name (at {})",
|
||||||
value.location
|
value.location
|
||||||
@ -488,6 +351,8 @@ pub fn parse_ast_to_type_annotation_kinds<T, S: std::hash::BuildHasher + Clone>(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
ast::ExprKind::Constant { value, .. } => Ok(TypeAnnotation::Literal(vec![value.clone()])),
|
||||||
|
|
||||||
_ => Err(HashSet::from([format!(
|
_ => Err(HashSet::from([format!(
|
||||||
"unsupported expression for type annotation (at {})",
|
"unsupported expression for type annotation (at {})",
|
||||||
expr.location
|
expr.location
|
||||||
|
@ -19,12 +19,8 @@ use nac3core::{
|
|||||||
WithCall, WorkerRegistry, concrete_type::ConcreteTypeStore, irrt::load_irrt,
|
WithCall, WorkerRegistry, concrete_type::ConcreteTypeStore, irrt::load_irrt,
|
||||||
},
|
},
|
||||||
inkwell::{
|
inkwell::{
|
||||||
OptimizationLevel,
|
OptimizationLevel, memory_buffer::MemoryBuffer, module::Linkage,
|
||||||
memory_buffer::MemoryBuffer,
|
passes::PassBuilderOptions, support::is_multithreaded, targets::*,
|
||||||
module::{Linkage, Module},
|
|
||||||
passes::PassBuilderOptions,
|
|
||||||
support::is_multithreaded,
|
|
||||||
targets::*,
|
|
||||||
},
|
},
|
||||||
nac3parser::{
|
nac3parser::{
|
||||||
ast::{Constant, Expr, ExprKind, StmtKind, StrRef},
|
ast::{Constant, Expr, ExprKind, StmtKind, StrRef},
|
||||||
@ -63,13 +59,11 @@ struct CommandLineArgs {
|
|||||||
#[arg(short = 'O', default_value_t = 2, value_parser = clap::value_parser!(u32).range(0..=3))]
|
#[arg(short = 'O', default_value_t = 2, value_parser = clap::value_parser!(u32).range(0..=3))]
|
||||||
opt_level: u32,
|
opt_level: u32,
|
||||||
|
|
||||||
/// Whether to emit LLVM bitcode at the end of every module.
|
/// Whether to emit LLVM IR at the end of every module.
|
||||||
|
///
|
||||||
|
/// If multithreaded compilation is also enabled, each thread will emit its own module.
|
||||||
#[arg(long, default_value_t = false)]
|
#[arg(long, default_value_t = false)]
|
||||||
emit_llvm_bc: bool,
|
emit_llvm: bool,
|
||||||
|
|
||||||
/// Whether to emit LLVM IR text at the end of every module.
|
|
||||||
#[arg(long, default_value_t = false)]
|
|
||||||
emit_llvm_ir: bool,
|
|
||||||
|
|
||||||
/// The target triple to compile for.
|
/// The target triple to compile for.
|
||||||
#[arg(long)]
|
#[arg(long)]
|
||||||
@ -282,16 +276,8 @@ fn handle_global_var(
|
|||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
let cli = CommandLineArgs::parse();
|
let cli = CommandLineArgs::parse();
|
||||||
let CommandLineArgs {
|
let CommandLineArgs { file_name, threads, opt_level, emit_llvm, triple, mcpu, target_features } =
|
||||||
file_name,
|
cli;
|
||||||
threads,
|
|
||||||
opt_level,
|
|
||||||
emit_llvm_bc,
|
|
||||||
emit_llvm_ir,
|
|
||||||
triple,
|
|
||||||
mcpu,
|
|
||||||
target_features,
|
|
||||||
} = cli;
|
|
||||||
|
|
||||||
Target::initialize_all(&InitializationConfig::default());
|
Target::initialize_all(&InitializationConfig::default());
|
||||||
|
|
||||||
@ -360,18 +346,11 @@ fn main() {
|
|||||||
let resolver =
|
let resolver =
|
||||||
Arc::new(Resolver(internal_resolver.clone())) as Arc<dyn SymbolResolver + Send + Sync>;
|
Arc::new(Resolver(internal_resolver.clone())) as Arc<dyn SymbolResolver + Send + Sync>;
|
||||||
|
|
||||||
let emit_llvm = |module: &Module<'_>, filename: &str| {
|
|
||||||
if emit_llvm_bc {
|
|
||||||
module.write_bitcode_to_path(Path::new(format!("{filename}.bc").as_str()));
|
|
||||||
}
|
|
||||||
if emit_llvm_ir {
|
|
||||||
module.print_to_file(Path::new(format!("{filename}.ll").as_str())).unwrap();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// Process IRRT
|
// Process IRRT
|
||||||
let irrt = load_irrt(&context, resolver.as_ref());
|
let irrt = load_irrt(&context, resolver.as_ref());
|
||||||
emit_llvm(&irrt, "irrt");
|
if emit_llvm {
|
||||||
|
irrt.write_bitcode_to_path(Path::new("irrt.bc"));
|
||||||
|
}
|
||||||
|
|
||||||
// Process the Python script
|
// Process the Python script
|
||||||
let parser_result = parser::parse_program(&program, file_name.into()).unwrap();
|
let parser_result = parser::parse_program(&program, file_name.into()).unwrap();
|
||||||
@ -496,19 +475,23 @@ fn main() {
|
|||||||
let main = context
|
let main = context
|
||||||
.create_module_from_ir(MemoryBuffer::create_from_memory_range(&buffers[0], "main"))
|
.create_module_from_ir(MemoryBuffer::create_from_memory_range(&buffers[0], "main"))
|
||||||
.unwrap();
|
.unwrap();
|
||||||
emit_llvm(&main, "main");
|
if emit_llvm {
|
||||||
|
main.write_bitcode_to_path(Path::new("main.bc"));
|
||||||
|
}
|
||||||
|
|
||||||
for buffer in buffers.iter().skip(1) {
|
for (idx, buffer) in buffers.iter().skip(1).enumerate() {
|
||||||
let other = context
|
let other = context
|
||||||
.create_module_from_ir(MemoryBuffer::create_from_memory_range(buffer, "main"))
|
.create_module_from_ir(MemoryBuffer::create_from_memory_range(buffer, "main"))
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
|
if emit_llvm {
|
||||||
|
other.write_bitcode_to_path(Path::new(&format!("module{idx}.bc")));
|
||||||
|
}
|
||||||
|
|
||||||
main.link_in_module(other).unwrap();
|
main.link_in_module(other).unwrap();
|
||||||
}
|
}
|
||||||
emit_llvm(&main, "main.merged");
|
|
||||||
|
|
||||||
main.link_in_module(irrt).unwrap();
|
main.link_in_module(irrt).unwrap();
|
||||||
emit_llvm(&main, "main.fat");
|
|
||||||
|
|
||||||
// Private all functions except "run"
|
// Private all functions except "run"
|
||||||
let mut function_iter = main.get_first_function();
|
let mut function_iter = main.get_first_function();
|
||||||
@ -519,8 +502,6 @@ fn main() {
|
|||||||
function_iter = func.get_next_function();
|
function_iter = func.get_next_function();
|
||||||
}
|
}
|
||||||
|
|
||||||
emit_llvm(&main, "main.pre-opt");
|
|
||||||
|
|
||||||
// Optimize `main`
|
// Optimize `main`
|
||||||
let pass_options = PassBuilderOptions::create();
|
let pass_options = PassBuilderOptions::create();
|
||||||
pass_options.set_merge_functions(true);
|
pass_options.set_merge_functions(true);
|
||||||
@ -530,8 +511,6 @@ fn main() {
|
|||||||
panic!("Failed to run optimization for module `main`: {}", err.to_string());
|
panic!("Failed to run optimization for module `main`: {}", err.to_string());
|
||||||
}
|
}
|
||||||
|
|
||||||
emit_llvm(&main, "main.post-opt");
|
|
||||||
|
|
||||||
// Write output
|
// Write output
|
||||||
target_machine
|
target_machine
|
||||||
.write_to_file(&main, FileType::Object, Path::new("module.o"))
|
.write_to_file(&main, FileType::Object, Path::new("module.o"))
|
||||||
|
Loading…
x
Reference in New Issue
Block a user