forked from M-Labs/nac3
Compare commits
16 Commits
6a0377642f
...
b0526ba29f
Author | SHA1 | Date | |
---|---|---|---|
b0526ba29f | |||
0a481ec880 | |||
65fa85815a | |||
e4f6fbeeeb | |||
f5285fbf7f | |||
b82926297b | |||
8142e01aeb | |||
e8c967cc29 | |||
902fd97a0a | |||
d73a47b096 | |||
6a29ffcfbc | |||
ae7b1e4391 | |||
cdd0b2011e | |||
34f6c3eaf8 | |||
088f026356 | |||
d23da9966d |
102
Cargo.lock
generated
102
Cargo.lock
generated
@ -127,9 +127,9 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b"
|
||||
|
||||
[[package]]
|
||||
name = "cc"
|
||||
version = "1.2.16"
|
||||
version = "1.2.17"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "be714c154be609ec7f5dad223a33bf1482fff90472de28f7362806e6d4832b8c"
|
||||
checksum = "1fcb57c740ae1daf453ae85f16e37396f672b039e00d9d866e07ddb24e328e3a"
|
||||
dependencies = [
|
||||
"shlex",
|
||||
]
|
||||
@ -168,7 +168,7 @@ version = "4.5.32"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "09176aae279615badda0765c0c0b3f6ed53f4709118af73cf4655d85d1530cd7"
|
||||
dependencies = [
|
||||
"heck 0.5.0",
|
||||
"heck",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.100",
|
||||
@ -340,9 +340,9 @@ checksum = "1d674e81391d1e1ab681a28d99df07927c6d4aa5b027d7da16ba32d1d21ecd99"
|
||||
|
||||
[[package]]
|
||||
name = "foldhash"
|
||||
version = "0.1.4"
|
||||
version = "0.1.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a0d2fde1f7b3d48b8395d5f2de76c18a528bd6a9cdde438df747bfcba3e05d6f"
|
||||
checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2"
|
||||
|
||||
[[package]]
|
||||
name = "function_name"
|
||||
@ -400,14 +400,14 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "getrandom"
|
||||
version = "0.3.1"
|
||||
version = "0.3.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "43a49c392881ce6d5c3b8cb70f98717b7c07aabbdff06687b9030dbfbe2725f8"
|
||||
checksum = "73fea8450eea4bac3940448fb7ae50d91f034f941199fcd9d909a5a07aa455f0"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"libc",
|
||||
"wasi 0.13.3+wasi-0.2.2",
|
||||
"windows-targets",
|
||||
"r-efi",
|
||||
"wasi 0.14.2+wasi-0.2.4",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -425,12 +425,6 @@ dependencies = [
|
||||
"foldhash",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "heck"
|
||||
version = "0.4.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8"
|
||||
|
||||
[[package]]
|
||||
name = "heck"
|
||||
version = "0.5.0"
|
||||
@ -590,9 +584,9 @@ checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f"
|
||||
|
||||
[[package]]
|
||||
name = "linux-raw-sys"
|
||||
version = "0.9.2"
|
||||
version = "0.9.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6db9c683daf087dc577b7506e9695b3d556a9f3849903fa28186283afd6809e9"
|
||||
checksum = "fe7db12097d22ec582439daf8618b8fdd1a7bef6270e9af3b1ebcd30893cf413"
|
||||
|
||||
[[package]]
|
||||
name = "llvm-sys"
|
||||
@ -619,9 +613,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "log"
|
||||
version = "0.4.26"
|
||||
version = "0.4.27"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "30bde2b3dc3671ae49d8e2e9f044c7c005836e7a023ee57cffa25ab82764bb9e"
|
||||
checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94"
|
||||
|
||||
[[package]]
|
||||
name = "memchr"
|
||||
@ -859,7 +853,7 @@ version = "0.2.21"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9"
|
||||
dependencies = [
|
||||
"zerocopy 0.8.23",
|
||||
"zerocopy 0.8.24",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -903,15 +897,15 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "pyo3"
|
||||
version = "0.21.2"
|
||||
version = "0.24.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a5e00b96a521718e08e03b1a622f01c8a8deb50719335de3f60b3b3950f069d8"
|
||||
checksum = "7f1c6c3591120564d64db2261bec5f910ae454f01def849b9c22835a84695e86"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"indoc",
|
||||
"libc",
|
||||
"memoffset",
|
||||
"parking_lot",
|
||||
"once_cell",
|
||||
"portable-atomic",
|
||||
"pyo3-build-config",
|
||||
"pyo3-ffi",
|
||||
@ -921,9 +915,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "pyo3-build-config"
|
||||
version = "0.21.2"
|
||||
version = "0.24.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7883df5835fafdad87c0d888b266c8ec0f4c9ca48a5bed6bbb592e8dedee1b50"
|
||||
checksum = "e9b6c2b34cf71427ea37c7001aefbaeb85886a074795e35f161f5aecc7620a7a"
|
||||
dependencies = [
|
||||
"once_cell",
|
||||
"target-lexicon",
|
||||
@ -931,9 +925,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "pyo3-ffi"
|
||||
version = "0.21.2"
|
||||
version = "0.24.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "01be5843dc60b916ab4dad1dca6d20b9b4e6ddc8e15f50c47fe6d85f1fb97403"
|
||||
checksum = "5507651906a46432cdda02cd02dd0319f6064f1374c9147c45b978621d2c3a9c"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"pyo3-build-config",
|
||||
@ -941,9 +935,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "pyo3-macros"
|
||||
version = "0.21.2"
|
||||
version = "0.24.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "77b34069fc0682e11b31dbd10321cbf94808394c56fd996796ce45217dfac53c"
|
||||
checksum = "b0d394b5b4fd8d97d48336bb0dd2aebabad39f1d294edd6bcd2cccf2eefe6f42"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"pyo3-macros-backend",
|
||||
@ -953,11 +947,11 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "pyo3-macros-backend"
|
||||
version = "0.21.2"
|
||||
version = "0.24.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "08260721f32db5e1a5beae69a55553f56b99bd0e1c3e6e0a5e8851a9d0f5a85c"
|
||||
checksum = "fd72da09cfa943b1080f621f024d2ef7e2773df7badd51aa30a2be1f8caa7c8e"
|
||||
dependencies = [
|
||||
"heck 0.4.1",
|
||||
"heck",
|
||||
"proc-macro2",
|
||||
"pyo3-build-config",
|
||||
"quote",
|
||||
@ -973,6 +967,12 @@ dependencies = [
|
||||
"proc-macro2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "r-efi"
|
||||
version = "5.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "74765f6d916ee2faa39bc8e68e4f3ed8949b48cccdac59983d287a7cb71ce9c5"
|
||||
|
||||
[[package]]
|
||||
name = "rand"
|
||||
version = "0.8.5"
|
||||
@ -1050,9 +1050,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "rustix"
|
||||
version = "1.0.2"
|
||||
version = "1.0.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f7178faa4b75a30e269c71e61c353ce2748cf3d76f0c44c393f4e60abf49b825"
|
||||
checksum = "e56a18552996ac8d29ecc3b190b4fdbb2d91ca4ec396de7bbffaf43f3d637e96"
|
||||
dependencies = [
|
||||
"bitflags",
|
||||
"errno",
|
||||
@ -1171,9 +1171,9 @@ checksum = "7fcf8323ef1faaee30a44a340193b1ac6814fd9b7b4e88e9d4519a3e4abe1cfd"
|
||||
|
||||
[[package]]
|
||||
name = "string-interner"
|
||||
version = "0.18.0"
|
||||
version = "0.19.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1a3275464d7a9f2d4cac57c89c2ef96a8524dba2864c8d6f82e3980baf136f9b"
|
||||
checksum = "23de088478b31c349c9ba67816fa55d9355232d63c3afea8bf513e31f0f1d2c0"
|
||||
dependencies = [
|
||||
"hashbrown",
|
||||
"serde",
|
||||
@ -1209,7 +1209,7 @@ version = "0.27.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c77a8c5abcaf0f9ce05d62342b7d298c346515365c36b673df4ebe3ced01fde8"
|
||||
dependencies = [
|
||||
"heck 0.5.0",
|
||||
"heck",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"rustversion",
|
||||
@ -1239,9 +1239,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "target-lexicon"
|
||||
version = "0.12.16"
|
||||
version = "0.13.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "61c41af27dd6d1e27b1b16b489db798443478cef1f06a660c96db617ba5de3b1"
|
||||
checksum = "e502f78cdbb8ba4718f566c418c52bc729126ffd16baee5baa718cf25dd5a69a"
|
||||
|
||||
[[package]]
|
||||
name = "target-triple"
|
||||
@ -1251,12 +1251,12 @@ checksum = "1ac9aa371f599d22256307c24a9d748c041e548cbf599f35d890f9d365361790"
|
||||
|
||||
[[package]]
|
||||
name = "tempfile"
|
||||
version = "3.19.0"
|
||||
version = "3.19.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "488960f40a3fd53d72c2a29a58722561dee8afdd175bd88e3db4677d7b2ba600"
|
||||
checksum = "7437ac7763b9b123ccf33c338a5cc1bac6f69b45a136c19bdd8a65e3916435bf"
|
||||
dependencies = [
|
||||
"fastrand",
|
||||
"getrandom 0.3.1",
|
||||
"getrandom 0.3.2",
|
||||
"once_cell",
|
||||
"rustix",
|
||||
"windows-sys 0.59.0",
|
||||
@ -1518,9 +1518,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
|
||||
|
||||
[[package]]
|
||||
name = "wasi"
|
||||
version = "0.13.3+wasi-0.2.2"
|
||||
version = "0.14.2+wasi-0.2.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "26816d2e1a4a36a2940b96c5296ce403917633dff8f3440e9b236ed6f6bacad2"
|
||||
checksum = "9683f9a5a998d873c0d21fcbe3c083009670149a8fab228644b8bd36b2c48cb3"
|
||||
dependencies = [
|
||||
"wit-bindgen-rt",
|
||||
]
|
||||
@ -1627,9 +1627,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "wit-bindgen-rt"
|
||||
version = "0.33.0"
|
||||
version = "0.39.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3268f3d866458b787f390cf61f4bbb563b922d091359f9608842999eaee3943c"
|
||||
checksum = "6f42320e61fe2cfd34354ecb597f86f413484a798ba44a8ca1165c58d42da6c1"
|
||||
dependencies = [
|
||||
"bitflags",
|
||||
]
|
||||
@ -1645,11 +1645,11 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "zerocopy"
|
||||
version = "0.8.23"
|
||||
version = "0.8.24"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fd97444d05a4328b90e75e503a34bad781f14e28a823ad3557f0750df1ebcbc6"
|
||||
checksum = "2586fea28e186957ef732a5f8b3be2da217d65c5969d4b1e17f973ebbe876879"
|
||||
dependencies = [
|
||||
"zerocopy-derive 0.8.23",
|
||||
"zerocopy-derive 0.8.24",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -1665,9 +1665,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "zerocopy-derive"
|
||||
version = "0.8.23"
|
||||
version = "0.8.24"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6352c01d0edd5db859a63e2605f4ea3183ddbd15e2c4a9e7d32184df75e4f154"
|
||||
checksum = "a996a8f63c5c4448cd959ac1bab0aaa3306ccfd060472f85943ee0750f0169be"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
@ -2,18 +2,18 @@
|
||||
name = "nac3artiq"
|
||||
version = "0.1.0"
|
||||
authors = ["M-Labs"]
|
||||
edition = "2021"
|
||||
edition = "2024"
|
||||
|
||||
[lib]
|
||||
name = "nac3artiq"
|
||||
crate-type = ["cdylib"]
|
||||
|
||||
[dependencies]
|
||||
indexmap = "2.7"
|
||||
indexmap = "2.8"
|
||||
itertools = "0.14"
|
||||
pyo3 = { version = "0.21", features = ["extension-module", "gil-refs"] }
|
||||
pyo3 = { version = "0.24", features = ["extension-module"] }
|
||||
parking_lot = "0.12"
|
||||
tempfile = "3.16"
|
||||
tempfile = "3.19"
|
||||
nac3core = { path = "../nac3core" }
|
||||
nac3ld = { path = "../nac3ld" }
|
||||
|
||||
|
@ -1,54 +1,54 @@
|
||||
use std::{
|
||||
collections::{hash_map::DefaultHasher, HashMap},
|
||||
collections::{HashMap, hash_map::DefaultHasher},
|
||||
hash::{Hash, Hasher},
|
||||
iter::once,
|
||||
mem,
|
||||
sync::Arc,
|
||||
io,
|
||||
io::*,
|
||||
};
|
||||
|
||||
use itertools::Itertools;
|
||||
use pyo3::{
|
||||
types::{PyDict, PyList},
|
||||
PyObject, PyResult, Python,
|
||||
prelude::*,
|
||||
types::{PyDict, PyList},
|
||||
};
|
||||
|
||||
use super::{symbol_resolver::InnerResolver, timeline::TimeFns, SpecialPythonId};
|
||||
use nac3core::{
|
||||
codegen::{
|
||||
CodeGenContext, CodeGenerator,
|
||||
expr::{create_fn_and_call, destructure_range, gen_call, infer_and_call_function},
|
||||
llvm_intrinsics::{call_int_smax, call_memcpy, call_stackrestore, call_stacksave},
|
||||
stmt::{gen_block, gen_for_callback_incrementing, gen_if_callback, gen_with},
|
||||
type_aligned_alloca,
|
||||
types::{ndarray::NDArrayType, RangeType},
|
||||
types::{RangeType, ndarray::NDArrayType},
|
||||
values::{
|
||||
ArrayLikeIndexer, ArrayLikeValue, ArraySliceValue, ListValue, ProxyValue,
|
||||
UntypedArrayLikeAccessor,
|
||||
},
|
||||
CodeGenContext, CodeGenerator,
|
||||
},
|
||||
inkwell::{
|
||||
AddressSpace, IntPredicate, OptimizationLevel,
|
||||
context::Context,
|
||||
module::Linkage,
|
||||
targets::TargetMachine,
|
||||
types::{BasicType, IntType},
|
||||
values::{BasicValueEnum, IntValue, PointerValue, StructValue},
|
||||
AddressSpace, IntPredicate, OptimizationLevel,
|
||||
},
|
||||
nac3parser::ast::{Expr, ExprKind, Located, Stmt, StmtKind, StrRef},
|
||||
symbol_resolver::ValueEnum,
|
||||
toplevel::{
|
||||
helper::{extract_ndims, PrimDef},
|
||||
numpy::unpack_ndarray_var_tys,
|
||||
DefinitionId, GenCall,
|
||||
helper::{PrimDef, extract_ndims},
|
||||
numpy::unpack_ndarray_var_tys,
|
||||
},
|
||||
typecheck::{
|
||||
type_inferencer::PrimitiveStore,
|
||||
typedef::{iter_type_vars, FunSignature, FuncArg, Type, TypeEnum, VarMap},
|
||||
typedef::{FunSignature, FuncArg, Type, TypeEnum, VarMap, iter_type_vars},
|
||||
},
|
||||
};
|
||||
|
||||
use super::{SpecialPythonId, symbol_resolver::InnerResolver, timeline::TimeFns};
|
||||
|
||||
/// The parallelism mode within a block.
|
||||
#[derive(Copy, Clone, Eq, PartialEq)]
|
||||
enum ParallelMode {
|
||||
@ -191,11 +191,7 @@ impl CodeGenerator for ArtiqCodeGenerator<'_> {
|
||||
}
|
||||
|
||||
fn get_size_type<'ctx>(&self, ctx: &'ctx Context) -> IntType<'ctx> {
|
||||
if self.size_t == 32 {
|
||||
ctx.i32_type()
|
||||
} else {
|
||||
ctx.i64_type()
|
||||
}
|
||||
if self.size_t == 32 { ctx.i32_type() } else { ctx.i64_type() }
|
||||
}
|
||||
|
||||
fn gen_block<'ctx, 'a, 'c, I: Iterator<Item = &'c Stmt<Option<Type>>>>(
|
||||
@ -454,7 +450,10 @@ fn gen_rpc_tag(
|
||||
&*ctx.unifier.get_ty_immutable(ndarray_ndims)
|
||||
{
|
||||
if values.len() != 1 {
|
||||
return Err(format!("NDArray types with multiple literal bounds for ndims is not supported: {}", ctx.unifier.stringify(ty)));
|
||||
return Err(format!(
|
||||
"NDArray types with multiple literal bounds for ndims is not supported: {}",
|
||||
ctx.unifier.stringify(ty)
|
||||
));
|
||||
}
|
||||
|
||||
let value = values[0].clone();
|
||||
@ -518,16 +517,8 @@ fn format_rpc_arg<'ctx>(
|
||||
ctx.builder.build_int_truncate_or_bit_cast(sizeof_pdata, llvm_usize, "").unwrap();
|
||||
|
||||
let sizeof_buf_shape = ctx.builder.build_int_mul(sizeof_usize, ndims, "").unwrap();
|
||||
let sizeof_buf = ctx.builder.build_int_add(sizeof_buf_shape, sizeof_pdata, "").unwrap();
|
||||
|
||||
let alignment = llvm_usize.const_int(8, false);
|
||||
let sizeof_buf = ctx
|
||||
.builder
|
||||
.build_int_add(
|
||||
sizeof_buf_shape,
|
||||
ctx.builder.build_int_add(sizeof_pdata, alignment, "").unwrap(),
|
||||
"",
|
||||
)
|
||||
.unwrap();
|
||||
// buf = { data: void*, shape: [size_t; ndims]; }
|
||||
let buf = ctx.builder.build_array_alloca(llvm_i8, sizeof_buf, "rpc.arg").unwrap();
|
||||
let buf = ArraySliceValue::from_ptr_val(buf, sizeof_buf, Some("rpc.arg"));
|
||||
@ -538,46 +529,12 @@ fn format_rpc_arg<'ctx>(
|
||||
// Write to `buf->data`
|
||||
let carray_data = carray.load_data(ctx);
|
||||
let carray_data = ctx.builder.build_pointer_cast(carray_data, llvm_pi8, "").unwrap();
|
||||
if ctx.registry.llvm_options.opt_level == OptimizationLevel::None {
|
||||
let dst_size = sizeof_pdata;
|
||||
let src_size = sizeof_pdata;
|
||||
let cmp = ctx
|
||||
.builder
|
||||
.build_int_compare(IntPredicate::ULE, src_size, dst_size, "buffer_size_check1")
|
||||
.unwrap();
|
||||
ctx.make_assert(
|
||||
generator,
|
||||
cmp,
|
||||
"0:AssertionError",
|
||||
"Buffer overflow risk in RPC data copy: source size {0} exceeds destination size {1}",
|
||||
[Some(src_size), Some(dst_size), None],
|
||||
ctx.current_loc,
|
||||
);
|
||||
}
|
||||
call_memcpy(ctx, buf_data, carray_data, sizeof_pdata);
|
||||
|
||||
// Write to `buf->shape`
|
||||
let carray_shape = ndarray.shape().base_ptr(ctx, generator);
|
||||
let carray_shape_i8 =
|
||||
ctx.builder.build_pointer_cast(carray_shape, llvm_pi8, "").unwrap();
|
||||
// Safety check for buffer overflow
|
||||
if ctx.registry.llvm_options.opt_level == OptimizationLevel::None {
|
||||
let dst_size = sizeof_buf_shape;
|
||||
let src_size = sizeof_buf_shape;
|
||||
let cmp = ctx
|
||||
.builder
|
||||
.build_int_compare(IntPredicate::ULE, src_size, dst_size, "buffer_size_check2")
|
||||
.unwrap();
|
||||
|
||||
ctx.make_assert(
|
||||
generator,
|
||||
cmp,
|
||||
"0:AssertionError",
|
||||
"Buffer overflow risk in RPC shape copy: source size {0} exceeds destination size {1}",
|
||||
[Some(src_size), Some(dst_size), None],
|
||||
ctx.current_loc,
|
||||
);
|
||||
}
|
||||
call_memcpy(ctx, buf_shape, carray_shape_i8, sizeof_buf_shape);
|
||||
|
||||
buf.base_ptr(ctx, generator)
|
||||
@ -606,7 +563,6 @@ fn format_rpc_ret<'ctx>(
|
||||
generator: &mut dyn CodeGenerator,
|
||||
ctx: &mut CodeGenContext<'ctx, '_>,
|
||||
ret_ty: Type,
|
||||
is_async: bool,
|
||||
) -> Option<BasicValueEnum<'ctx>> {
|
||||
// -- receive value:
|
||||
// T result = {
|
||||
@ -634,7 +590,6 @@ fn format_rpc_ret<'ctx>(
|
||||
return None;
|
||||
}
|
||||
|
||||
let stackptr = call_stacksave(ctx, Some("rpc.stack.ret"));
|
||||
let prehead_bb = ctx.builder.get_insert_block().unwrap();
|
||||
let current_function = prehead_bb.get_parent().unwrap();
|
||||
let head_bb = ctx.ctx.append_basic_block(current_function, "rpc.head");
|
||||
@ -705,9 +660,14 @@ fn format_rpc_ret<'ctx>(
|
||||
let unaligned_buffer_size =
|
||||
ctx.builder.build_int_add(sizeof_ptr, sizeof_shape, "").unwrap();
|
||||
|
||||
let stackptr = call_stacksave(ctx, Some("rpc.stack.ret"));
|
||||
let buffer =
|
||||
type_aligned_alloca(generator, ctx, llvm_i8_8, sizeof_ptr, Some("rpc.buffer"));
|
||||
let stackptr = call_stacksave(ctx, None);
|
||||
let buffer = type_aligned_alloca(
|
||||
generator,
|
||||
ctx,
|
||||
llvm_i8_8,
|
||||
unaligned_buffer_size,
|
||||
Some("rpc.buffer"),
|
||||
);
|
||||
let buffer = ArraySliceValue::from_ptr_val(buffer, unaligned_buffer_size, None);
|
||||
|
||||
// The first call to `rpc_recv` reads the top-level ndarray object: [pdata, shape]
|
||||
@ -858,9 +818,6 @@ fn format_rpc_ret<'ctx>(
|
||||
}
|
||||
};
|
||||
|
||||
if !is_async && !result.get_type().is_pointer_type() {
|
||||
call_stackrestore(ctx, stackptr);
|
||||
}
|
||||
Some(result)
|
||||
}
|
||||
|
||||
@ -878,52 +835,33 @@ fn rpc_codegen_callback_fn<'ctx>(
|
||||
let ptr_type = int8.ptr_type(AddressSpace::default());
|
||||
let tag_ptr_type = ctx.ctx.struct_type(&[ptr_type.into(), size_type.into()], false);
|
||||
|
||||
let service_id = int32.const_int(fun.1 .0 as u64, false);
|
||||
// build the RPC tag with keyword
|
||||
let service_id = int32.const_int(fun.1.0 as u64, false);
|
||||
// -- setup rpc tags
|
||||
let mut tag = Vec::new();
|
||||
|
||||
if obj.is_some() {
|
||||
tag.push(b'O');
|
||||
}
|
||||
for arg in &fun.0.args {
|
||||
gen_rpc_tag(ctx, arg.ty, &mut tag)?;
|
||||
}
|
||||
tag.push(b'|');
|
||||
for arg in &fun.0.args {
|
||||
let name_string = arg.name.to_string();
|
||||
let name_bytes = name_string.as_bytes();
|
||||
if name_bytes.len() > 255 {
|
||||
return Err(format!("Parameter name too long: '{}'", arg.name));
|
||||
}
|
||||
tag.push(name_bytes.len() as u8);
|
||||
tag.extend_from_slice(name_bytes);
|
||||
}
|
||||
tag.push(b':');
|
||||
gen_rpc_tag(ctx, fun.0.ret, &mut tag)?;
|
||||
|
||||
let marker = b'K';
|
||||
if obj.is_some() {
|
||||
tag.insert(1, marker);
|
||||
} else {
|
||||
tag.insert(0, marker);
|
||||
}
|
||||
println!("Constructed RPC tag: {tag:?}");
|
||||
io::stdout().flush().unwrap();
|
||||
|
||||
let mut hasher = DefaultHasher::new();
|
||||
tag.hash(&mut hasher);
|
||||
let hash = format!("{}", hasher.finish());
|
||||
|
||||
let tag_ptr = ctx
|
||||
.module
|
||||
.get_global(&hash)
|
||||
.get_global(hash.as_str())
|
||||
.unwrap_or_else(|| {
|
||||
let tag_arr_ptr = ctx.module.add_global(
|
||||
int8.array_type(tag.len() as u32),
|
||||
None,
|
||||
&format!("tag_array_{hash}"),
|
||||
format!("tagptr{}", fun.1.0).as_str(),
|
||||
);
|
||||
tag_arr_ptr.set_initializer(&int8.const_array(
|
||||
&tag.iter().map(|&b| int8.const_int(u64::from(b), false)).collect::<Vec<_>>(),
|
||||
&tag.iter().map(|v| int8.const_int(u64::from(*v), false)).collect::<Vec<_>>(),
|
||||
));
|
||||
tag_arr_ptr.set_linkage(Linkage::Private);
|
||||
let tag_ptr = ctx.module.add_global(tag_ptr_type, None, &hash);
|
||||
@ -940,6 +878,7 @@ fn rpc_codegen_callback_fn<'ctx>(
|
||||
.as_pointer_value();
|
||||
|
||||
let arg_length = args.len() + usize::from(obj.is_some());
|
||||
|
||||
let stackptr = call_stacksave(ctx, Some("rpc.stack"));
|
||||
let args_ptr = ctx
|
||||
.builder
|
||||
@ -950,53 +889,39 @@ fn rpc_codegen_callback_fn<'ctx>(
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
// -- rpc args handling
|
||||
let mut keys = fun.0.args.clone();
|
||||
let mut mapping = HashMap::new();
|
||||
for (maybe_key, value) in args {
|
||||
let key_str = if let Some(k) = maybe_key {
|
||||
let s = k.to_string();
|
||||
keys.retain(|p| p.name.to_string() != s);
|
||||
s
|
||||
} else {
|
||||
let removed = keys.remove(0).name.to_string();
|
||||
removed
|
||||
};
|
||||
mapping.insert(key_str, value);
|
||||
for (key, value) in args {
|
||||
mapping.insert(key.unwrap_or_else(|| keys.remove(0).name), value);
|
||||
}
|
||||
// default value handling
|
||||
for k in keys {
|
||||
let key_str = k.name.to_string();
|
||||
if let Some(default_val) = k.default_value.as_ref() {
|
||||
mapping.insert(key_str, ctx.gen_symbol_val(generator, default_val, k.ty).into());
|
||||
} else {
|
||||
return Err(format!(
|
||||
"No argument provided for parameter '{}' and no default value exists",
|
||||
k.name
|
||||
));
|
||||
}
|
||||
}
|
||||
let mut real_params = Vec::new();
|
||||
for arg in &fun.0.args {
|
||||
let key_str = arg.name.to_string();
|
||||
let value = if let Some(val) = mapping.remove(&key_str) {
|
||||
val
|
||||
} else if let Some(default_val) = arg.default_value.as_ref() {
|
||||
ctx.gen_symbol_val(generator, default_val, arg.ty).into()
|
||||
} else {
|
||||
return Err(format!(
|
||||
"No argument provided for parameter '{}' and no default value exists",
|
||||
arg.name
|
||||
));
|
||||
};
|
||||
let llvm_val = value.to_basic_value_enum(ctx, generator, arg.ty)?;
|
||||
real_params.push((llvm_val, arg.ty));
|
||||
mapping
|
||||
.insert(k.name, ctx.gen_symbol_val(generator, &k.default_value.unwrap(), k.ty).into());
|
||||
}
|
||||
// reorder the parameters
|
||||
let mut real_params = fun
|
||||
.0
|
||||
.args
|
||||
.iter()
|
||||
.map(|arg| {
|
||||
mapping
|
||||
.remove(&arg.name)
|
||||
.unwrap()
|
||||
.to_basic_value_enum(ctx, generator, arg.ty)
|
||||
.map(|llvm_val| (llvm_val, arg.ty))
|
||||
})
|
||||
.collect::<Result<Vec<(_, _)>, _>>()?;
|
||||
if let Some(obj) = obj {
|
||||
if let ValueEnum::Static(obj_val) = obj.1 {
|
||||
real_params.insert(0, (obj_val.get_const_obj(ctx, generator), obj.0));
|
||||
} else {
|
||||
return Err("Only host objects are allowed for 'self'".into());
|
||||
// should be an error here...
|
||||
panic!("only host object is allowed");
|
||||
}
|
||||
}
|
||||
|
||||
for (i, (arg, arg_ty)) in real_params.iter().enumerate() {
|
||||
let arg_slot = format_rpc_arg(generator, ctx, (*arg, *arg_ty, i));
|
||||
let arg_ptr = unsafe {
|
||||
@ -1009,6 +934,8 @@ fn rpc_codegen_callback_fn<'ctx>(
|
||||
.unwrap();
|
||||
ctx.builder.build_store(arg_ptr, arg_slot).unwrap();
|
||||
}
|
||||
|
||||
// call
|
||||
infer_and_call_function(
|
||||
ctx,
|
||||
if is_async { "rpc_send_async" } else { "rpc_send" },
|
||||
@ -1018,15 +945,20 @@ fn rpc_codegen_callback_fn<'ctx>(
|
||||
None,
|
||||
);
|
||||
|
||||
// reclaim stack space used by arguments
|
||||
call_stackrestore(ctx, stackptr);
|
||||
|
||||
if is_async {
|
||||
// async RPCs do not return any values
|
||||
Ok(None)
|
||||
} else {
|
||||
let result = format_rpc_ret(generator, ctx, fun.0.ret, is_async);
|
||||
let result = format_rpc_ret(generator, ctx, fun.0.ret);
|
||||
|
||||
if !result.is_some_and(|res| res.get_type().is_pointer_type()) {
|
||||
// An RPC returning an NDArray would not touch here.
|
||||
call_stackrestore(ctx, stackptr);
|
||||
}
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
}
|
||||
@ -1039,7 +971,7 @@ pub fn attributes_writeback<'ctx>(
|
||||
return_obj: Option<(Type, ValueEnum<'ctx>)>,
|
||||
) -> Result<(), String> {
|
||||
Python::with_gil(|py| -> PyResult<Result<(), String>> {
|
||||
let host_attributes: &PyList = host_attributes.downcast(py)?;
|
||||
let host_attributes = host_attributes.downcast_bound::<PyList>(py)?;
|
||||
let top_levels = ctx.top_level.definitions.read();
|
||||
let globals = inner_resolver.global_value_ids.read();
|
||||
let int32 = ctx.ctx.i32_type();
|
||||
@ -1052,7 +984,7 @@ pub fn attributes_writeback<'ctx>(
|
||||
}
|
||||
|
||||
for val in (*globals).values() {
|
||||
let val = val.as_ref(py);
|
||||
let val = val.bind(py);
|
||||
let ty = inner_resolver.get_obj_type(
|
||||
py,
|
||||
val,
|
||||
@ -1236,7 +1168,7 @@ fn polymorphic_print<'ctx>(
|
||||
if as_rtio { "rtio_log" } else { "core_log" },
|
||||
if as_rtio { None } else { Some(llvm_i32.into()) },
|
||||
&[llvm_pi8.into()],
|
||||
&once(fmt.into()).chain(args).map(BasicValueEnum::into).collect_vec(),
|
||||
&once(fmt.into()).chain(args).collect_vec(),
|
||||
true,
|
||||
None,
|
||||
None,
|
||||
|
@ -1,7 +1,6 @@
|
||||
#![deny(future_incompatible, let_underscore, nonstandard_style, clippy::all)]
|
||||
#![warn(clippy::pedantic)]
|
||||
#![allow(
|
||||
unsafe_op_in_unsafe_fn,
|
||||
clippy::cast_possible_truncation,
|
||||
clippy::cast_sign_loss,
|
||||
clippy::enum_glob_use,
|
||||
@ -23,7 +22,7 @@ use indexmap::IndexMap;
|
||||
use itertools::Itertools;
|
||||
use parking_lot::{Mutex, RwLock};
|
||||
use pyo3::{
|
||||
create_exception, exceptions,
|
||||
IntoPyObjectExt, create_exception, exceptions,
|
||||
prelude::*,
|
||||
types::{PyBytes, PyDict, PyNone, PySet},
|
||||
};
|
||||
@ -31,17 +30,17 @@ use tempfile::{self, TempDir};
|
||||
|
||||
use nac3core::{
|
||||
codegen::{
|
||||
concrete_type::ConcreteTypeStore, gen_func_impl, irrt::load_irrt, CodeGenLLVMOptions,
|
||||
CodeGenTargetMachineOptions, CodeGenTask, CodeGenerator, WithCall, WorkerRegistry,
|
||||
CodeGenLLVMOptions, CodeGenTargetMachineOptions, CodeGenTask, CodeGenerator, WithCall,
|
||||
WorkerRegistry, concrete_type::ConcreteTypeStore, gen_func_impl, irrt::load_irrt,
|
||||
},
|
||||
inkwell::{
|
||||
OptimizationLevel,
|
||||
context::Context,
|
||||
memory_buffer::MemoryBuffer,
|
||||
module::{FlagBehavior, Linkage, Module},
|
||||
passes::PassBuilderOptions,
|
||||
support::is_multithreaded,
|
||||
targets::*,
|
||||
OptimizationLevel,
|
||||
},
|
||||
nac3parser::{
|
||||
ast::{self, Constant, ExprKind, Located, Stmt, StmtKind, StrRef},
|
||||
@ -49,19 +48,19 @@ use nac3core::{
|
||||
},
|
||||
symbol_resolver::SymbolResolver,
|
||||
toplevel::{
|
||||
DefinitionId, GenCall, TopLevelDef,
|
||||
builtins::get_exn_constructor,
|
||||
composer::{BuiltinFuncCreator, BuiltinFuncSpec, ComposerConfig, TopLevelComposer},
|
||||
DefinitionId, GenCall, TopLevelDef,
|
||||
},
|
||||
typecheck::{
|
||||
type_inferencer::PrimitiveStore,
|
||||
typedef::{into_var_map, FunSignature, FuncArg, Type, TypeEnum, Unifier, VarMap},
|
||||
typedef::{FunSignature, FuncArg, Type, TypeEnum, Unifier, VarMap, into_var_map},
|
||||
},
|
||||
};
|
||||
use nac3ld::Linker;
|
||||
|
||||
use codegen::{
|
||||
attributes_writeback, gen_core_log, gen_rtio_log, rpc_codegen_callback, ArtiqCodeGenerator,
|
||||
ArtiqCodeGenerator, attributes_writeback, gen_core_log, gen_rtio_log, rpc_codegen_callback,
|
||||
};
|
||||
use symbol_resolver::{DeferredEvaluationStore, InnerResolver, PythonHelper, Resolver};
|
||||
use timeline::TimeFns;
|
||||
@ -170,7 +169,10 @@ pub struct SpecialPythonId {
|
||||
sequential: u64,
|
||||
}
|
||||
|
||||
type TopLevelComponent = (Stmt, String, PyObject);
|
||||
/// An [`IndexMap`] storing the `id()` of values, mapped to a handle of the value itself.
|
||||
type PyValueMap = IndexMap<u64, Arc<PyObject>>;
|
||||
|
||||
type TopLevelComponent = (Stmt, String, Arc<PyObject>);
|
||||
|
||||
// TopLevelComposer is unsendable as it holds the unification table, which is
|
||||
// unsendable due to Rc. Arc would cause a performance hit.
|
||||
@ -197,17 +199,17 @@ create_exception!(nac3artiq, CompileError, exceptions::PyException);
|
||||
impl Nac3 {
|
||||
fn register_module(
|
||||
&mut self,
|
||||
module: &PyObject,
|
||||
module: &Arc<PyObject>,
|
||||
registered_class_ids: &HashSet<u64>,
|
||||
) -> PyResult<()> {
|
||||
let (module_name, source_file, source) =
|
||||
Python::with_gil(|py| -> PyResult<(String, String, String)> {
|
||||
let module: &PyAny = module.extract(py)?;
|
||||
let module = module.bind(py);
|
||||
let source_file = module.getattr("__file__");
|
||||
let (source_file, source) = if let Ok(source_file) = source_file {
|
||||
let source_file = source_file.extract()?;
|
||||
let source_file = source_file.extract::<&str>()?;
|
||||
(
|
||||
source_file,
|
||||
source_file.to_string(),
|
||||
fs::read_to_string(source_file).map_err(|e| {
|
||||
exceptions::PyIOError::new_err(format!(
|
||||
"failed to read input file: {e}"
|
||||
@ -217,13 +219,10 @@ impl Nac3 {
|
||||
} else {
|
||||
// kernels submitted by content have no file
|
||||
// but still can provide source by StringLoader
|
||||
let get_src_fn = module
|
||||
.getattr("__loader__")?
|
||||
.extract::<PyObject>()?
|
||||
.getattr(py, "get_source")?;
|
||||
("<expcontent>", get_src_fn.call1(py, (PyNone::get(py),))?.extract(py)?)
|
||||
let get_src_fn = module.getattr("__loader__")?.getattr("get_source")?;
|
||||
(String::from("<expcontent>"), get_src_fn.call1((PyNone::get(py),))?.extract()?)
|
||||
};
|
||||
Ok((module.getattr("__name__")?.extract()?, source_file.to_string(), source))
|
||||
Ok((module.getattr("__name__")?.extract()?, source_file, source))
|
||||
})?;
|
||||
|
||||
let parser_result = parse_program(&source, source_file.into())
|
||||
@ -252,7 +251,7 @@ impl Nac3 {
|
||||
Ok(true)
|
||||
} else {
|
||||
let base_obj =
|
||||
module.getattr(py, id.to_string().as_str())?;
|
||||
module.bind(py).getattr(id.to_string().as_str())?;
|
||||
let base_id = id_fn.call1((base_obj,))?.extract()?;
|
||||
Ok(registered_class_ids.contains(&base_id))
|
||||
}
|
||||
@ -322,7 +321,7 @@ impl Nac3 {
|
||||
None => {
|
||||
return Some(format!(
|
||||
"object launching kernel does not have method `{method_name}`"
|
||||
))
|
||||
));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
@ -343,7 +342,7 @@ impl Nac3 {
|
||||
None if default_value.is_none() => {
|
||||
return Some(format!(
|
||||
"argument `{name}` not provided when launching kernel function"
|
||||
))
|
||||
));
|
||||
}
|
||||
_ => break,
|
||||
};
|
||||
@ -357,7 +356,7 @@ impl Nac3 {
|
||||
Err(e) => {
|
||||
return Some(format!(
|
||||
"type error ({e}) at parameter #{i} when calling kernel function"
|
||||
))
|
||||
));
|
||||
}
|
||||
};
|
||||
if let Err(e) = unifier.unify(in_ty, *ty) {
|
||||
@ -431,13 +430,13 @@ impl Nac3 {
|
||||
]
|
||||
}
|
||||
|
||||
fn compile_method<T>(
|
||||
fn compile_method<'py, T>(
|
||||
&self,
|
||||
obj: &PyAny,
|
||||
obj: &Bound<'py, PyAny>,
|
||||
method_name: &str,
|
||||
args: Vec<&PyAny>,
|
||||
embedding_map: &PyAny,
|
||||
py: Python,
|
||||
args: Vec<Bound<'py, PyAny>>,
|
||||
embedding_map: &Bound<'py, PyAny>,
|
||||
py: Python<'py>,
|
||||
link_fn: &dyn Fn(&Module) -> PyResult<T>,
|
||||
) -> PyResult<T> {
|
||||
let size_t = self.isa.get_size_type(&Context::create());
|
||||
@ -453,19 +452,20 @@ impl Nac3 {
|
||||
let id_fn = builtins.getattr("id")?;
|
||||
let issubclass = builtins.getattr("issubclass")?;
|
||||
let exn_class = builtins.getattr("Exception")?;
|
||||
let store_obj = embedding_map.getattr("store_object").unwrap().to_object(py);
|
||||
let store_str = embedding_map.getattr("store_str").unwrap().to_object(py);
|
||||
let store_fun = embedding_map.getattr("store_function").unwrap().to_object(py);
|
||||
let host_attributes = embedding_map.getattr("attributes_writeback").unwrap().to_object(py);
|
||||
let store_obj = embedding_map.getattr("store_object").unwrap();
|
||||
let store_str = embedding_map.getattr("store_str").unwrap();
|
||||
let store_fun = embedding_map.getattr("store_function").unwrap().into_py_any(py)?;
|
||||
let host_attributes =
|
||||
embedding_map.getattr("attributes_writeback").unwrap().into_py_any(py)?;
|
||||
let global_value_ids: Arc<RwLock<HashMap<_, _>>> = Arc::new(RwLock::new(HashMap::new()));
|
||||
let helper = PythonHelper {
|
||||
id_fn: builtins.getattr("id").unwrap().to_object(py),
|
||||
len_fn: builtins.getattr("len").unwrap().to_object(py),
|
||||
type_fn: builtins.getattr("type").unwrap().to_object(py),
|
||||
origin_ty_fn: typings.getattr("get_origin").unwrap().to_object(py),
|
||||
args_ty_fn: typings.getattr("get_args").unwrap().to_object(py),
|
||||
store_obj: store_obj.clone(),
|
||||
store_str,
|
||||
id_fn: Arc::new(builtins.getattr("id").unwrap().into_py_any(py)?),
|
||||
len_fn: Arc::new(builtins.getattr("len").unwrap().into_py_any(py)?),
|
||||
type_fn: Arc::new(builtins.getattr("type").unwrap().into_py_any(py)?),
|
||||
origin_ty_fn: Arc::new(typings.getattr("get_origin").unwrap().into_py_any(py)?),
|
||||
args_ty_fn: Arc::new(typings.getattr("get_args").unwrap().into_py_any(py)?),
|
||||
store_obj: Arc::new(store_obj.clone().into_py_any(py)?),
|
||||
store_str: Arc::new(store_str.into_py_any(py)?),
|
||||
};
|
||||
|
||||
let pyid_to_type = Arc::new(RwLock::new(HashMap::<u64, Type>::new()));
|
||||
@ -488,14 +488,14 @@ impl Nac3 {
|
||||
|
||||
let mut rpc_ids = vec![];
|
||||
for (stmt, path, module) in &self.top_levels {
|
||||
let py_module: &PyAny = module.extract(py)?;
|
||||
let py_module = module.bind(py);
|
||||
let module_id: u64 = id_fn.call1((py_module,))?.extract()?;
|
||||
let module_name: String = py_module.getattr("__name__")?.extract()?;
|
||||
let helper = helper.clone();
|
||||
let class_obj;
|
||||
if let StmtKind::ClassDef { name, .. } = &stmt.node {
|
||||
let class = py_module.getattr(name.to_string().as_str()).unwrap();
|
||||
if issubclass.call1((class, exn_class)).unwrap().extract().unwrap()
|
||||
if issubclass.call1((&class, &exn_class)).unwrap().extract().unwrap()
|
||||
&& class.getattr("artiq_builtin").is_err()
|
||||
{
|
||||
class_obj = Some(class);
|
||||
@ -508,8 +508,8 @@ impl Nac3 {
|
||||
let (name_to_pyid, resolver, _, _) =
|
||||
module_to_resolver_cache.get(&module_id).cloned().unwrap_or_else(|| {
|
||||
let mut name_to_pyid: HashMap<StrRef, u64> = HashMap::new();
|
||||
let members: &PyDict =
|
||||
py_module.getattr("__dict__").unwrap().downcast().unwrap();
|
||||
let members = py_module.getattr("__dict__").unwrap();
|
||||
let members = members.downcast::<PyDict>().unwrap();
|
||||
for (key, val) in members {
|
||||
let key: &str = key.extract().unwrap();
|
||||
let val = id_fn.call1((val,)).unwrap().extract().unwrap();
|
||||
@ -555,7 +555,7 @@ impl Nac3 {
|
||||
if let Some(class_obj) = class_obj {
|
||||
self.exception_ids
|
||||
.write()
|
||||
.insert(def_id.0, store_obj.call1(py, (class_obj,))?.extract(py)?);
|
||||
.insert(def_id.0, store_obj.call1((class_obj,))?.extract()?);
|
||||
}
|
||||
|
||||
match &stmt.node {
|
||||
@ -567,7 +567,7 @@ impl Nac3 {
|
||||
.call1(
|
||||
py,
|
||||
(
|
||||
def_id.0.into_py(py),
|
||||
def_id.0.into_py_any(py)?,
|
||||
module.getattr(py, name.to_string().as_str()).unwrap(),
|
||||
),
|
||||
)
|
||||
@ -583,16 +583,17 @@ impl Nac3 {
|
||||
&& decorator_str != "extern"
|
||||
{
|
||||
return Err(CompileError::new_err(format!(
|
||||
"compilation failed\n----------\nDecorator {} is not supported (at {})",
|
||||
decorator_id_string(decorator).unwrap(), stmt.location
|
||||
)));
|
||||
"compilation failed\n----------\nDecorator {} is not supported (at {})",
|
||||
decorator_id_string(decorator).unwrap(),
|
||||
stmt.location
|
||||
)));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
StmtKind::ClassDef { name, body, .. } => {
|
||||
let class_name = name.to_string();
|
||||
let class_obj = module.getattr(py, class_name.as_str()).unwrap();
|
||||
let class_obj = Arc::new(module.getattr(py, class_name.as_str()).unwrap());
|
||||
for stmt in body {
|
||||
if let StmtKind::FunctionDef { name, decorator_list, .. } = &stmt.node {
|
||||
for decorator in decorator_list {
|
||||
@ -619,7 +620,8 @@ impl Nac3 {
|
||||
{
|
||||
return Err(CompileError::new_err(format!(
|
||||
"compilation failed\n----------\nDecorator {} is not supported (at {})",
|
||||
decorator_id_string(decorator).unwrap(), stmt.location
|
||||
decorator_id_string(decorator).unwrap(),
|
||||
stmt.location
|
||||
)));
|
||||
}
|
||||
}
|
||||
@ -666,7 +668,7 @@ impl Nac3 {
|
||||
let mut arg_names = vec![];
|
||||
for (i, arg) in args.into_iter().enumerate() {
|
||||
let name = format!("tmp{i}");
|
||||
module.add(&name, arg)?;
|
||||
module.add(&*name, &arg)?;
|
||||
name_to_pyid.insert(name.clone().into(), id_fun.call1((arg,))?.extract()?);
|
||||
arg_names.push(name);
|
||||
}
|
||||
@ -688,7 +690,7 @@ impl Nac3 {
|
||||
id_to_primitive: RwLock::default(),
|
||||
field_to_val: RwLock::default(),
|
||||
name_to_pyid,
|
||||
module: module.to_object(py),
|
||||
module: Arc::new(module.into_py_any(py)?),
|
||||
helper: helper.clone(),
|
||||
string_store: self.string_store.clone(),
|
||||
exception_ids: self.exception_ids.clone(),
|
||||
@ -762,7 +764,7 @@ impl Nac3 {
|
||||
.call1(
|
||||
py,
|
||||
(
|
||||
id.0.into_py(py),
|
||||
id.0.into_py_any(py)?,
|
||||
class_def
|
||||
.getattr(py, name.to_string().as_str())
|
||||
.unwrap(),
|
||||
@ -775,7 +777,7 @@ impl Nac3 {
|
||||
TopLevelDef::Variable { .. } => {
|
||||
return Err(CompileError::new_err(String::from(
|
||||
"Unsupported @rpc annotation on global variable",
|
||||
)))
|
||||
)));
|
||||
}
|
||||
TopLevelDef::Module { .. } => {
|
||||
unreachable!("Type module cannot be decorated with @rpc")
|
||||
@ -953,7 +955,8 @@ impl Nac3 {
|
||||
let mut string_store_vec = string_store.iter().collect::<Vec<_>>();
|
||||
string_store_vec.sort_by(|(_s1, key1), (_s2, key2)| key1.cmp(key2));
|
||||
for (s, key) in string_store_vec {
|
||||
let embed_key: i32 = helper.store_str.call1(py, (s,)).unwrap().extract(py).unwrap();
|
||||
let embed_key: i32 =
|
||||
helper.store_str.bind(py).call1((s,)).unwrap().extract().unwrap();
|
||||
assert_eq!(
|
||||
embed_key, *key,
|
||||
"string {s} is out of sync between embedding map (key={embed_key}) and \
|
||||
@ -1063,7 +1066,7 @@ fn add_exceptions(
|
||||
#[pymethods]
|
||||
impl Nac3 {
|
||||
#[new]
|
||||
fn new(isa: &str, artiq_builtins: &PyDict, py: Python) -> PyResult<Self> {
|
||||
fn new<'py>(isa: &str, artiq_builtins: &Bound<'py, PyDict>, py: Python<'py>) -> PyResult<Self> {
|
||||
let isa = match isa {
|
||||
"host" => Isa::Host,
|
||||
"rv32g" => Isa::RiscV32G,
|
||||
@ -1134,38 +1137,38 @@ impl Nac3 {
|
||||
let typing_mod = PyModule::import(py, "typing").unwrap();
|
||||
let types_mod = PyModule::import(py, "types").unwrap();
|
||||
|
||||
let get_id = |x: &PyAny| id_fn.call1((x,)).and_then(PyAny::extract).unwrap();
|
||||
let get_attr_id = |obj: &PyModule, attr| {
|
||||
let get_id = |x: &Bound<PyAny>| id_fn.call1((x,)).and_then(|id| id.extract()).unwrap();
|
||||
let get_attr_id = |obj: &Bound<PyModule>, attr| {
|
||||
id_fn.call1((obj.getattr(attr).unwrap(),)).unwrap().extract().unwrap()
|
||||
};
|
||||
let primitive_ids = PrimitivePythonId {
|
||||
virtual_id: get_id(artiq_builtins.get_item("virtual").ok().flatten().unwrap()),
|
||||
virtual_id: get_id(&artiq_builtins.get_item("virtual").ok().flatten().unwrap()),
|
||||
generic_alias: (
|
||||
get_attr_id(typing_mod, "_GenericAlias"),
|
||||
get_attr_id(types_mod, "GenericAlias"),
|
||||
get_attr_id(&typing_mod, "_GenericAlias"),
|
||||
get_attr_id(&types_mod, "GenericAlias"),
|
||||
),
|
||||
none: get_id(artiq_builtins.get_item("none").ok().flatten().unwrap()),
|
||||
typevar: get_attr_id(typing_mod, "TypeVar"),
|
||||
none: get_id(&artiq_builtins.get_item("none").ok().flatten().unwrap()),
|
||||
typevar: get_attr_id(&typing_mod, "TypeVar"),
|
||||
const_generic_marker: get_id(
|
||||
artiq_builtins.get_item("_ConstGenericMarker").ok().flatten().unwrap(),
|
||||
&artiq_builtins.get_item("_ConstGenericMarker").ok().flatten().unwrap(),
|
||||
),
|
||||
int: get_attr_id(builtins_mod, "int"),
|
||||
int32: get_attr_id(numpy_mod, "int32"),
|
||||
int64: get_attr_id(numpy_mod, "int64"),
|
||||
uint32: get_attr_id(numpy_mod, "uint32"),
|
||||
uint64: get_attr_id(numpy_mod, "uint64"),
|
||||
bool: get_attr_id(builtins_mod, "bool"),
|
||||
np_bool_: get_attr_id(numpy_mod, "bool_"),
|
||||
string: get_attr_id(builtins_mod, "str"),
|
||||
np_str_: get_attr_id(numpy_mod, "str_"),
|
||||
float: get_attr_id(builtins_mod, "float"),
|
||||
float64: get_attr_id(numpy_mod, "float64"),
|
||||
list: get_attr_id(builtins_mod, "list"),
|
||||
ndarray: get_attr_id(numpy_mod, "ndarray"),
|
||||
tuple: get_attr_id(builtins_mod, "tuple"),
|
||||
exception: get_attr_id(builtins_mod, "Exception"),
|
||||
option: get_id(artiq_builtins.get_item("Option").ok().flatten().unwrap()),
|
||||
module: get_attr_id(types_mod, "ModuleType"),
|
||||
int: get_attr_id(&builtins_mod, "int"),
|
||||
int32: get_attr_id(&numpy_mod, "int32"),
|
||||
int64: get_attr_id(&numpy_mod, "int64"),
|
||||
uint32: get_attr_id(&numpy_mod, "uint32"),
|
||||
uint64: get_attr_id(&numpy_mod, "uint64"),
|
||||
bool: get_attr_id(&builtins_mod, "bool"),
|
||||
np_bool_: get_attr_id(&numpy_mod, "bool_"),
|
||||
string: get_attr_id(&builtins_mod, "str"),
|
||||
np_str_: get_attr_id(&numpy_mod, "str_"),
|
||||
float: get_attr_id(&builtins_mod, "float"),
|
||||
float64: get_attr_id(&numpy_mod, "float64"),
|
||||
list: get_attr_id(&builtins_mod, "list"),
|
||||
ndarray: get_attr_id(&numpy_mod, "ndarray"),
|
||||
tuple: get_attr_id(&builtins_mod, "tuple"),
|
||||
exception: get_attr_id(&builtins_mod, "Exception"),
|
||||
option: get_id(&artiq_builtins.get_item("Option").ok().flatten().unwrap()),
|
||||
module: get_attr_id(&types_mod, "ModuleType"),
|
||||
};
|
||||
|
||||
let working_directory = tempfile::Builder::new().prefix("nac3-").tempdir().unwrap();
|
||||
@ -1233,37 +1236,45 @@ impl Nac3 {
|
||||
})
|
||||
}
|
||||
|
||||
fn analyze(
|
||||
fn analyze<'py>(
|
||||
&mut self,
|
||||
functions: &PySet,
|
||||
classes: &PySet,
|
||||
special_ids: &PyDict,
|
||||
content_modules: &PySet,
|
||||
functions: &Bound<'py, PySet>,
|
||||
classes: &Bound<'py, PySet>,
|
||||
special_ids: &Bound<'py, PyDict>,
|
||||
content_modules: &Bound<'py, PySet>,
|
||||
) -> PyResult<()> {
|
||||
let (modules, class_ids) =
|
||||
Python::with_gil(|py| -> PyResult<(IndexMap<u64, PyObject>, HashSet<u64>)> {
|
||||
let mut modules: IndexMap<u64, PyObject> = IndexMap::new();
|
||||
Python::with_gil(|py| -> PyResult<(PyValueMap, HashSet<u64>)> {
|
||||
let mut modules: IndexMap<u64, Arc<PyObject>> = IndexMap::new();
|
||||
let mut class_ids: HashSet<u64> = HashSet::new();
|
||||
|
||||
let id_fn = PyModule::import(py, "builtins")?.getattr("id")?;
|
||||
let getmodule_fn = PyModule::import(py, "inspect")?.getattr("getmodule")?;
|
||||
|
||||
for function in functions {
|
||||
let module: PyObject = getmodule_fn.call1((function,))?.extract()?;
|
||||
if !module.is_none(py) {
|
||||
modules.insert(id_fn.call1((&module,))?.extract()?, module);
|
||||
let module = getmodule_fn.call1((&function,))?;
|
||||
if !module.is_none() {
|
||||
modules.insert(
|
||||
id_fn.call1((&module,))?.extract()?,
|
||||
Arc::new(module.into_py_any(py)?),
|
||||
);
|
||||
}
|
||||
}
|
||||
for class in classes {
|
||||
let module: PyObject = getmodule_fn.call1((class,))?.extract()?;
|
||||
if !module.is_none(py) {
|
||||
modules.insert(id_fn.call1((&module,))?.extract()?, module);
|
||||
let module = getmodule_fn.call1((&class,))?;
|
||||
if !module.is_none() {
|
||||
modules.insert(
|
||||
id_fn.call1((&module,))?.extract()?,
|
||||
Arc::new(module.into_py_any(py)?),
|
||||
);
|
||||
}
|
||||
class_ids.insert(id_fn.call1((class,))?.extract()?);
|
||||
class_ids.insert(id_fn.call1((&class,))?.extract()?);
|
||||
}
|
||||
for module in content_modules {
|
||||
let module: PyObject = module.extract()?;
|
||||
modules.insert(id_fn.call1((&module,))?.extract()?, module);
|
||||
modules.insert(
|
||||
id_fn.call1((&module,))?.extract()?,
|
||||
Arc::new(module.into_py_any(py)?),
|
||||
);
|
||||
}
|
||||
Ok((modules, class_ids))
|
||||
})?;
|
||||
@ -1293,14 +1304,14 @@ impl Nac3 {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn compile_method_to_file(
|
||||
fn compile_method_to_file<'py>(
|
||||
&mut self,
|
||||
obj: &PyAny,
|
||||
obj: &Bound<'py, PyAny>,
|
||||
method_name: &str,
|
||||
args: Vec<&PyAny>,
|
||||
args: Vec<Bound<'py, PyAny>>,
|
||||
filename: &str,
|
||||
embedding_map: &PyAny,
|
||||
py: Python,
|
||||
embedding_map: &Bound<'py, PyAny>,
|
||||
py: Python<'py>,
|
||||
) -> PyResult<()> {
|
||||
let target_machine = self.get_llvm_target_machine();
|
||||
|
||||
@ -1339,13 +1350,13 @@ impl Nac3 {
|
||||
}
|
||||
}
|
||||
|
||||
fn compile_method_to_mem(
|
||||
fn compile_method_to_mem<'py>(
|
||||
&mut self,
|
||||
obj: &PyAny,
|
||||
obj: &Bound<'py, PyAny>,
|
||||
method_name: &str,
|
||||
args: Vec<&PyAny>,
|
||||
embedding_map: &PyAny,
|
||||
py: Python,
|
||||
args: Vec<Bound<'py, PyAny>>,
|
||||
embedding_map: &Bound<'py, PyAny>,
|
||||
py: Python<'py>,
|
||||
) -> PyResult<PyObject> {
|
||||
let target_machine = self.get_llvm_target_machine();
|
||||
|
||||
@ -1385,12 +1396,12 @@ impl Nac3 {
|
||||
}
|
||||
|
||||
#[cfg(feature = "init-llvm-profile")]
|
||||
extern "C" {
|
||||
unsafe extern "C" {
|
||||
fn __llvm_profile_initialize();
|
||||
}
|
||||
|
||||
#[pymodule]
|
||||
fn nac3artiq(py: Python, m: &PyModule) -> PyResult<()> {
|
||||
fn nac3artiq<'py>(py: Python<'py>, m: &Bound<'py, PyModule>) -> PyResult<()> {
|
||||
#[cfg(feature = "init-llvm-profile")]
|
||||
unsafe {
|
||||
__llvm_profile_initialize();
|
||||
|
@ -1,44 +1,46 @@
|
||||
use std::{
|
||||
collections::{HashMap, HashSet},
|
||||
sync::{
|
||||
atomic::{AtomicBool, Ordering::Relaxed},
|
||||
Arc,
|
||||
atomic::{AtomicBool, Ordering::Relaxed},
|
||||
},
|
||||
};
|
||||
|
||||
use itertools::Itertools;
|
||||
use parking_lot::RwLock;
|
||||
use pyo3::{
|
||||
IntoPyObjectExt, PyAny, PyErr, PyObject, PyResult, Python,
|
||||
prelude::*,
|
||||
types::{PyDict, PyTuple},
|
||||
PyAny, PyErr, PyObject, PyResult, Python,
|
||||
};
|
||||
|
||||
use super::PrimitivePythonId;
|
||||
use nac3core::{
|
||||
codegen::{
|
||||
types::{ndarray::NDArrayType, structure::StructProxyType, ProxyType},
|
||||
values::ndarray::make_contiguous_strides,
|
||||
CodeGenContext, CodeGenerator,
|
||||
types::{ProxyType, ndarray::NDArrayType, structure::StructProxyType},
|
||||
values::ndarray::make_contiguous_strides,
|
||||
},
|
||||
inkwell::{
|
||||
AddressSpace,
|
||||
module::Linkage,
|
||||
types::{BasicType, BasicTypeEnum},
|
||||
values::{BasicValue, BasicValueEnum},
|
||||
AddressSpace,
|
||||
},
|
||||
nac3parser::ast::{self, StrRef},
|
||||
symbol_resolver::{StaticValue, SymbolResolver, SymbolValue, ValueEnum},
|
||||
toplevel::{
|
||||
DefinitionId, TopLevelDef,
|
||||
helper::PrimDef,
|
||||
numpy::{make_ndarray_ty, unpack_ndarray_var_tys},
|
||||
DefinitionId, TopLevelDef,
|
||||
},
|
||||
typecheck::{
|
||||
type_inferencer::PrimitiveStore,
|
||||
typedef::{into_var_map, iter_type_vars, Type, TypeEnum, TypeVar, Unifier, VarMap},
|
||||
typedef::{Type, TypeEnum, TypeVar, Unifier, VarMap, into_var_map, iter_type_vars},
|
||||
},
|
||||
};
|
||||
|
||||
use super::PrimitivePythonId;
|
||||
|
||||
pub enum PrimitiveValue {
|
||||
I32(i32),
|
||||
I64(i64),
|
||||
@ -72,15 +74,16 @@ impl DeferredEvaluationStore {
|
||||
/// A class field as stored in the [`InnerResolver`], represented by the ID and name of the
|
||||
/// associated [`PythonValue`].
|
||||
type ResolverField = (u64, StrRef);
|
||||
/// A class field as stored in Python, represented by the `id()` and [`PyObject`] of the field.
|
||||
type PyFieldHandle = (u64, PyObject);
|
||||
|
||||
/// A value as stored in Python, represented by the `id()` and [`PyObject`] of the value.
|
||||
type PyValueHandle = (u64, Arc<PyObject>);
|
||||
|
||||
pub struct InnerResolver {
|
||||
pub id_to_type: RwLock<HashMap<StrRef, Type>>,
|
||||
pub id_to_def: RwLock<HashMap<StrRef, DefinitionId>>,
|
||||
pub id_to_pyval: RwLock<HashMap<StrRef, (u64, PyObject)>>,
|
||||
pub id_to_pyval: RwLock<HashMap<StrRef, PyValueHandle>>,
|
||||
pub id_to_primitive: RwLock<HashMap<u64, PrimitiveValue>>,
|
||||
pub field_to_val: RwLock<HashMap<ResolverField, Option<PyFieldHandle>>>,
|
||||
pub field_to_val: RwLock<HashMap<ResolverField, Option<PyValueHandle>>>,
|
||||
pub global_value_ids: Arc<RwLock<HashMap<u64, PyObject>>>,
|
||||
pub pyid_to_def: Arc<RwLock<HashMap<u64, DefinitionId>>>,
|
||||
pub pyid_to_type: Arc<RwLock<HashMap<u64, Type>>>,
|
||||
@ -91,26 +94,26 @@ pub struct InnerResolver {
|
||||
pub deferred_eval_store: DeferredEvaluationStore,
|
||||
// module specific
|
||||
pub name_to_pyid: HashMap<StrRef, u64>,
|
||||
pub module: PyObject,
|
||||
pub module: Arc<PyObject>,
|
||||
}
|
||||
|
||||
pub struct Resolver(pub Arc<InnerResolver>);
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct PythonHelper {
|
||||
pub type_fn: PyObject,
|
||||
pub len_fn: PyObject,
|
||||
pub id_fn: PyObject,
|
||||
pub origin_ty_fn: PyObject,
|
||||
pub args_ty_fn: PyObject,
|
||||
pub store_obj: PyObject,
|
||||
pub store_str: PyObject,
|
||||
pub type_fn: Arc<PyObject>,
|
||||
pub len_fn: Arc<PyObject>,
|
||||
pub id_fn: Arc<PyObject>,
|
||||
pub origin_ty_fn: Arc<PyObject>,
|
||||
pub args_ty_fn: Arc<PyObject>,
|
||||
pub store_obj: Arc<PyObject>,
|
||||
pub store_str: Arc<PyObject>,
|
||||
}
|
||||
|
||||
struct PythonValue {
|
||||
id: u64,
|
||||
value: PyObject,
|
||||
store_obj: PyObject,
|
||||
value: Arc<PyObject>,
|
||||
store_obj: Arc<PyObject>,
|
||||
resolver: Arc<InnerResolver>,
|
||||
}
|
||||
|
||||
@ -127,7 +130,7 @@ impl StaticValue for PythonValue {
|
||||
ctx.module.get_global(format!("{}_const", self.id).as_str()).map_or_else(
|
||||
|| {
|
||||
Python::with_gil(|py| -> PyResult<BasicValueEnum<'ctx>> {
|
||||
let id: u32 = self.store_obj.call1(py, (self.value.clone(),))?.extract(py)?;
|
||||
let id: u32 = self.store_obj.bind(py).call1((&*self.value,))?.extract()?;
|
||||
let struct_type = ctx.ctx.struct_type(&[ctx.ctx.i32_type().into()], false);
|
||||
let global = ctx.module.add_global(
|
||||
struct_type,
|
||||
@ -176,7 +179,7 @@ impl StaticValue for PythonValue {
|
||||
|
||||
Python::with_gil(|py| -> PyResult<BasicValueEnum<'ctx>> {
|
||||
self.resolver
|
||||
.get_obj_value(py, self.value.as_ref(py), ctx, generator, expected_ty)
|
||||
.get_obj_value(py, (*self.value).bind(py), ctx, generator, expected_ty)
|
||||
.map(Option::unwrap)
|
||||
})
|
||||
.map_err(|e| e.to_string())
|
||||
@ -192,14 +195,14 @@ impl StaticValue for PythonValue {
|
||||
field_to_val.get(&(self.id, name)).cloned()
|
||||
}
|
||||
.unwrap_or_else(|| {
|
||||
Python::with_gil(|py| -> PyResult<Option<(u64, PyObject)>> {
|
||||
Python::with_gil(|py| -> PyResult<Option<PyValueHandle>> {
|
||||
let helper = &self.resolver.helper;
|
||||
let ty = helper.type_fn.call1(py, (&self.value,))?;
|
||||
let ty_id: u64 = helper.id_fn.call1(py, (ty,))?.extract(py)?;
|
||||
let ty = helper.type_fn.bind(py).call1((&*self.value,))?;
|
||||
let ty_id: u64 = helper.id_fn.bind(py).call1((ty,))?.extract()?;
|
||||
// for optimizing unwrap KernelInvariant
|
||||
if ty_id == self.resolver.primitive_ids.option && name == "_nac3_option".into() {
|
||||
let obj = self.value.getattr(py, name.to_string().as_str())?;
|
||||
let id = self.resolver.helper.id_fn.call1(py, (&obj,))?.extract(py)?;
|
||||
let obj = Arc::new(self.value.getattr(py, name.to_string().as_str())?);
|
||||
let id = self.resolver.helper.id_fn.bind(py).call1((&*obj,))?.extract()?;
|
||||
return if self.id == self.resolver.primitive_ids.none {
|
||||
Ok(None)
|
||||
} else {
|
||||
@ -220,8 +223,8 @@ impl StaticValue for PythonValue {
|
||||
let result = if mutable {
|
||||
None
|
||||
} else {
|
||||
let obj = self.value.getattr(py, name.to_string().as_str())?;
|
||||
let id = self.resolver.helper.id_fn.call1(py, (&obj,))?.extract(py)?;
|
||||
let obj = Arc::new(self.value.getattr(py, name.to_string().as_str())?);
|
||||
let id = self.resolver.helper.id_fn.bind(py).call1((&*obj,))?.extract()?;
|
||||
Some((id, obj))
|
||||
};
|
||||
self.resolver.field_to_val.write().insert((self.id, name), result.clone());
|
||||
@ -230,25 +233,27 @@ impl StaticValue for PythonValue {
|
||||
.unwrap()
|
||||
})
|
||||
.map(|(id, obj)| {
|
||||
ValueEnum::Static(Arc::new(PythonValue {
|
||||
id,
|
||||
value: obj,
|
||||
store_obj: self.store_obj.clone(),
|
||||
resolver: self.resolver.clone(),
|
||||
}))
|
||||
Python::with_gil(|_| {
|
||||
ValueEnum::Static(Arc::new(PythonValue {
|
||||
id,
|
||||
value: obj,
|
||||
store_obj: self.store_obj.clone(),
|
||||
resolver: self.resolver.clone(),
|
||||
}))
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
fn get_tuple_element<'ctx>(&self, index: u32) -> Option<ValueEnum<'ctx>> {
|
||||
Python::with_gil(|py| -> PyResult<Option<(u64, PyObject)>> {
|
||||
Python::with_gil(|py| -> PyResult<Option<PyValueHandle>> {
|
||||
let helper = &self.resolver.helper;
|
||||
let ty = helper.type_fn.call1(py, (&self.value,))?;
|
||||
let ty_id: u64 = helper.id_fn.call1(py, (ty,))?.extract(py)?;
|
||||
let ty = helper.type_fn.bind(py).call1((&*self.value,))?;
|
||||
let ty_id: u64 = helper.id_fn.bind(py).call1((ty,))?.extract()?;
|
||||
assert_eq!(ty_id, self.resolver.primitive_ids.tuple);
|
||||
let tup: &PyTuple = self.value.extract(py)?;
|
||||
let elem = tup.get_item(index as usize)?;
|
||||
let id = self.resolver.helper.id_fn.call1(py, (elem,))?.extract(py)?;
|
||||
Ok(Some((id, elem.into())))
|
||||
let tup = self.value.bind(py).downcast::<PyTuple>()?;
|
||||
let elem = Arc::new(tup.get_item(index as usize)?.into_py_any(py)?);
|
||||
let id = self.resolver.helper.id_fn.bind(py).call1((&*elem,))?.extract()?;
|
||||
Ok(Some((id, elem)))
|
||||
})
|
||||
.unwrap()
|
||||
.map(|(id, obj)| {
|
||||
@ -263,23 +268,23 @@ impl StaticValue for PythonValue {
|
||||
}
|
||||
|
||||
impl InnerResolver {
|
||||
fn get_list_elem_type(
|
||||
fn get_list_elem_type<'py>(
|
||||
&self,
|
||||
py: Python,
|
||||
list: &PyAny,
|
||||
py: Python<'py>,
|
||||
list: &Bound<'py, PyAny>,
|
||||
len: usize,
|
||||
unifier: &mut Unifier,
|
||||
defs: &[Arc<RwLock<TopLevelDef>>],
|
||||
primitives: &PrimitiveStore,
|
||||
) -> PyResult<Result<Type, String>> {
|
||||
let mut ty = match self.get_obj_type(py, list.get_item(0)?, unifier, defs, primitives)? {
|
||||
let mut ty = match self.get_obj_type(py, &list.get_item(0)?, unifier, defs, primitives)? {
|
||||
Ok(t) => t,
|
||||
Err(e) => return Ok(Err(format!("type error ({e}) at element #0 of the list"))),
|
||||
};
|
||||
for i in 1..len {
|
||||
let b = match list
|
||||
.get_item(i)
|
||||
.map(|elem| self.get_obj_type(py, elem, unifier, defs, primitives))??
|
||||
.map(|elem| self.get_obj_type(py, &elem, unifier, defs, primitives))??
|
||||
{
|
||||
Ok(t) => t,
|
||||
Err(e) => return Ok(Err(format!("type error ({e}) at element #{i} of the list"))),
|
||||
@ -290,7 +295,7 @@ impl InnerResolver {
|
||||
return Ok(Err(format!(
|
||||
"inhomogeneous type ({}) at element #{i} of the list",
|
||||
e.to_display(unifier)
|
||||
)))
|
||||
)));
|
||||
}
|
||||
};
|
||||
}
|
||||
@ -303,17 +308,21 @@ impl InnerResolver {
|
||||
/// `TypeVars` and `GenericAlias`(`A[int, bool]`) should use `ty_ty_id` to check.
|
||||
///
|
||||
/// The `bool` value returned indicates whether they are instantiated or not
|
||||
fn get_pyty_obj_type(
|
||||
fn get_pyty_obj_type<'py>(
|
||||
&self,
|
||||
py: Python,
|
||||
pyty: &PyAny,
|
||||
py: Python<'py>,
|
||||
pyty: &Bound<'py, PyAny>,
|
||||
unifier: &mut Unifier,
|
||||
defs: &[Arc<RwLock<TopLevelDef>>],
|
||||
primitives: &PrimitiveStore,
|
||||
) -> PyResult<Result<(Type, bool), String>> {
|
||||
let ty_id: u64 = self.helper.id_fn.call1(py, (pyty,))?.extract(py)?;
|
||||
let ty_ty_id: u64 =
|
||||
self.helper.id_fn.call1(py, (self.helper.type_fn.call1(py, (pyty,))?,))?.extract(py)?;
|
||||
let ty_id: u64 = self.helper.id_fn.bind(py).call1((pyty,))?.extract()?;
|
||||
let ty_ty_id: u64 = self
|
||||
.helper
|
||||
.id_fn
|
||||
.bind(py)
|
||||
.call1((self.helper.type_fn.bind(py).call1((pyty,))?,))?
|
||||
.extract()?;
|
||||
|
||||
if ty_id == self.primitive_ids.int || ty_id == self.primitive_ids.int32 {
|
||||
Ok(Ok((primitives.int32, true)))
|
||||
@ -394,7 +403,8 @@ impl InnerResolver {
|
||||
(unifier.add_ty(ty), false)
|
||||
}))
|
||||
} else if ty_ty_id == self.primitive_ids.typevar {
|
||||
let name: &str = pyty.getattr("__name__").unwrap().extract().unwrap();
|
||||
let name = pyty.getattr("__name__").unwrap();
|
||||
let name = name.extract::<&str>().unwrap();
|
||||
let (constraint_types, is_const_generic) = {
|
||||
let constraints = pyty.getattr("__constraints__").unwrap();
|
||||
let mut result: Vec<Type> = vec![];
|
||||
@ -402,8 +412,9 @@ impl InnerResolver {
|
||||
|
||||
let mut is_const_generic = false;
|
||||
for i in 0usize.. {
|
||||
if let Ok(constr) = constraints.get_item(i) {
|
||||
let constr_id: u64 = self.helper.id_fn.call1(py, (constr,))?.extract(py)?;
|
||||
if let Ok(constr) = &constraints.get_item(i) {
|
||||
let constr_id: u64 =
|
||||
self.helper.id_fn.bind(py).call1((constr,))?.extract()?;
|
||||
if constr_id == self.primitive_ids.const_generic_marker {
|
||||
is_const_generic = true;
|
||||
continue;
|
||||
@ -462,24 +473,23 @@ impl InnerResolver {
|
||||
} else if ty_ty_id == self.primitive_ids.generic_alias.0
|
||||
|| ty_ty_id == self.primitive_ids.generic_alias.1
|
||||
{
|
||||
let origin = self.helper.origin_ty_fn.call1(py, (pyty,))?;
|
||||
let args = self.helper.args_ty_fn.call1(py, (pyty,))?;
|
||||
let args: &PyTuple = args.downcast(py)?;
|
||||
let origin_ty =
|
||||
match self.get_pyty_obj_type(py, origin.as_ref(py), unifier, defs, primitives)? {
|
||||
Ok((ty, false)) => ty,
|
||||
Ok((_, true)) => {
|
||||
return Ok(Err("instantiated type does not take type parameters".into()))
|
||||
}
|
||||
Err(err) => return Ok(Err(err)),
|
||||
};
|
||||
let origin = self.helper.origin_ty_fn.bind(py).call1((pyty,))?;
|
||||
let args = self.helper.args_ty_fn.bind(py).call1((pyty,))?;
|
||||
let args = args.downcast::<PyTuple>()?;
|
||||
let origin_ty = match self.get_pyty_obj_type(py, &origin, unifier, defs, primitives)? {
|
||||
Ok((ty, false)) => ty,
|
||||
Ok((_, true)) => {
|
||||
return Ok(Err("instantiated type does not take type parameters".into()));
|
||||
}
|
||||
Err(err) => return Ok(Err(err)),
|
||||
};
|
||||
|
||||
match &*unifier.get_ty(origin_ty) {
|
||||
TypeEnum::TObj { obj_id, .. } if *obj_id == PrimDef::List.id() => {
|
||||
if args.len() == 1 {
|
||||
let ty = match self.get_pyty_obj_type(
|
||||
py,
|
||||
args.get_item(0)?,
|
||||
&args.get_item(0)?,
|
||||
unifier,
|
||||
defs,
|
||||
primitives,
|
||||
@ -524,10 +534,10 @@ impl InnerResolver {
|
||||
|
||||
// npt.NDArray[T] == np.ndarray[Any, np.dtype[T]]
|
||||
let ndarray_dtype_pyty =
|
||||
self.helper.args_ty_fn.call1(py, (args.get_item(1)?,))?;
|
||||
let dtype = ndarray_dtype_pyty.downcast::<PyTuple>(py)?.get_item(0)?;
|
||||
self.helper.args_ty_fn.bind(py).call1((args.get_item(1)?,))?;
|
||||
let dtype = ndarray_dtype_pyty.downcast::<PyTuple>()?.get_item(0)?;
|
||||
|
||||
let ty = match self.get_pyty_obj_type(py, dtype, unifier, defs, primitives)? {
|
||||
let ty = match self.get_pyty_obj_type(py, &dtype, unifier, defs, primitives)? {
|
||||
Ok(ty) => ty,
|
||||
Err(err) => return Ok(Err(err)),
|
||||
};
|
||||
@ -543,7 +553,7 @@ impl InnerResolver {
|
||||
TypeEnum::TTuple { .. } => {
|
||||
let args = match args
|
||||
.iter()
|
||||
.map(|x| self.get_pyty_obj_type(py, x, unifier, defs, primitives))
|
||||
.map(|x| self.get_pyty_obj_type(py, &x, unifier, defs, primitives))
|
||||
.collect::<Result<Vec<_>, _>>()?
|
||||
.into_iter()
|
||||
.collect::<Result<Vec<_>, _>>() {
|
||||
@ -576,7 +586,7 @@ impl InnerResolver {
|
||||
}
|
||||
let args = match args
|
||||
.iter()
|
||||
.map(|x| self.get_pyty_obj_type(py, x, unifier, defs, primitives))
|
||||
.map(|x| self.get_pyty_obj_type(py, &x, unifier, defs, primitives))
|
||||
.collect::<Result<Vec<_>, _>>()?
|
||||
.into_iter()
|
||||
.collect::<Result<Vec<_>, _>>() {
|
||||
@ -603,7 +613,7 @@ impl InnerResolver {
|
||||
if args.len() == 1 {
|
||||
let ty = match self.get_pyty_obj_type(
|
||||
py,
|
||||
args.get_item(0)?,
|
||||
&args.get_item(0)?,
|
||||
unifier,
|
||||
defs,
|
||||
primitives,
|
||||
@ -634,23 +644,22 @@ impl InnerResolver {
|
||||
false,
|
||||
)))
|
||||
} else {
|
||||
let str_fn =
|
||||
pyo3::types::PyModule::import(py, "builtins").unwrap().getattr("repr").unwrap();
|
||||
let str_fn = PyModule::import(py, "builtins").unwrap().getattr("repr").unwrap();
|
||||
let str_repr: String = str_fn.call1((pyty,)).unwrap().extract().unwrap();
|
||||
Ok(Err(format!("{str_repr} is not registered with NAC3 (@nac3 decorator missing?)")))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_obj_type(
|
||||
pub fn get_obj_type<'py>(
|
||||
&self,
|
||||
py: Python,
|
||||
obj: &PyAny,
|
||||
py: Python<'py>,
|
||||
obj: &Bound<'py, PyAny>,
|
||||
unifier: &mut Unifier,
|
||||
defs: &[Arc<RwLock<TopLevelDef>>],
|
||||
primitives: &PrimitiveStore,
|
||||
) -> PyResult<Result<Type, String>> {
|
||||
let ty = self.helper.type_fn.call1(py, (obj,)).unwrap();
|
||||
let py_obj_id: u64 = self.helper.id_fn.call1(py, (obj,))?.extract(py)?;
|
||||
let ty = self.helper.type_fn.bind(py).call1((obj,)).unwrap();
|
||||
let py_obj_id: u64 = self.helper.id_fn.bind(py).call1((obj,))?.extract()?;
|
||||
if let Some(ty) = self.pyid_to_type.read().get(&py_obj_id) {
|
||||
return Ok(Ok(*ty));
|
||||
}
|
||||
@ -675,8 +684,7 @@ impl InnerResolver {
|
||||
});
|
||||
|
||||
// check if obj is module
|
||||
if self.helper.id_fn.call1(py, (ty.clone(),))?.extract::<u64>(py)?
|
||||
== self.primitive_ids.module
|
||||
if self.helper.id_fn.bind(py).call1((&ty,))?.extract::<u64>()? == self.primitive_ids.module
|
||||
&& self.pyid_to_def.read().contains_key(&py_obj_id)
|
||||
{
|
||||
let def_id = self.pyid_to_def.read()[&py_obj_id];
|
||||
@ -691,7 +699,7 @@ impl InnerResolver {
|
||||
for (name, _) in attributes {
|
||||
let attribute_obj = obj.getattr(name.to_string().as_str())?;
|
||||
let attribute_ty =
|
||||
self.get_obj_type(py, attribute_obj, unifier, defs, primitives)?;
|
||||
self.get_obj_type(py, &attribute_obj, unifier, defs, primitives)?;
|
||||
if let Ok(attribute_ty) = attribute_ty {
|
||||
module_attributes.insert(*name, (attribute_ty, false));
|
||||
} else {
|
||||
@ -701,7 +709,7 @@ impl InnerResolver {
|
||||
|
||||
for name in methods.keys() {
|
||||
let method_obj = obj.getattr(name.to_string().as_str())?;
|
||||
let method_ty = self.get_obj_type(py, method_obj, unifier, defs, primitives)?;
|
||||
let method_ty = self.get_obj_type(py, &method_obj, unifier, defs, primitives)?;
|
||||
if let Ok(method_ty) = method_ty {
|
||||
module_attributes.insert(*name, (method_ty, true));
|
||||
} else {
|
||||
@ -729,11 +737,11 @@ impl InnerResolver {
|
||||
self.primitive_ids.generic_alias.0,
|
||||
self.primitive_ids.generic_alias.1,
|
||||
]
|
||||
.contains(&self.helper.id_fn.call1(py, (ty.clone(),))?.extract::<u64>(py)?)
|
||||
.contains(&self.helper.id_fn.bind(py).call1((&ty,))?.extract::<u64>()?)
|
||||
{
|
||||
obj
|
||||
} else {
|
||||
ty.as_ref(py)
|
||||
&ty
|
||||
}
|
||||
},
|
||||
unifier,
|
||||
@ -769,7 +777,7 @@ impl InnerResolver {
|
||||
// do the instantiation for these four types
|
||||
(TypeEnum::TObj { obj_id, params, .. }, false) if *obj_id == PrimDef::List.id() => {
|
||||
let ty = iter_type_vars(params).nth(0).unwrap().ty;
|
||||
let len: usize = self.helper.len_fn.call1(py, (obj,))?.extract(py)?;
|
||||
let len: usize = self.helper.len_fn.bind(py).call1((obj,))?.extract()?;
|
||||
if len == 0 {
|
||||
assert!(matches!(
|
||||
&*unifier.get_ty(ty),
|
||||
@ -821,7 +829,7 @@ impl InnerResolver {
|
||||
Ok(Ok(extracted_ty))
|
||||
} else {
|
||||
let dtype = obj.getattr("dtype")?.getattr("type")?;
|
||||
let dtype_ty = self.get_pyty_obj_type(py, dtype, unifier, defs, primitives)?;
|
||||
let dtype_ty = self.get_pyty_obj_type(py, &dtype, unifier, defs, primitives)?;
|
||||
match dtype_ty {
|
||||
Ok((t, _)) => match unifier.unify(ty, t) {
|
||||
Ok(()) => {
|
||||
@ -840,10 +848,10 @@ impl InnerResolver {
|
||||
}
|
||||
}
|
||||
(TypeEnum::TTuple { .. }, false) => {
|
||||
let elements: &PyTuple = obj.downcast()?;
|
||||
let elements = obj.downcast::<PyTuple>()?;
|
||||
let types: Result<Result<Vec<_>, _>, _> = elements
|
||||
.iter()
|
||||
.map(|elem| self.get_obj_type(py, elem, unifier, defs, primitives))
|
||||
.map(|elem| self.get_obj_type(py, &elem, unifier, defs, primitives))
|
||||
.collect();
|
||||
let types = types?;
|
||||
Ok(types.map(|types| {
|
||||
@ -855,11 +863,11 @@ impl InnerResolver {
|
||||
(TypeEnum::TObj { obj_id, params, .. }, false)
|
||||
if *obj_id == primitives.option.obj_id(unifier).unwrap() =>
|
||||
{
|
||||
let Ok(field_data) = obj.getattr("_nac3_option") else {
|
||||
let Ok(field_data) = &obj.getattr("_nac3_option") else {
|
||||
unreachable!("cannot be None")
|
||||
};
|
||||
// if is `none`
|
||||
let zelf_id: u64 = self.helper.id_fn.call1(py, (obj,))?.extract(py)?;
|
||||
let zelf_id: u64 = self.helper.id_fn.bind(py).call1((obj,))?.extract()?;
|
||||
if zelf_id == self.primitive_ids.none {
|
||||
let ty_enum = unifier.get_ty_immutable(primitives.option);
|
||||
let TypeEnum::TObj { params, .. } = ty_enum.as_ref() else {
|
||||
@ -884,7 +892,7 @@ impl InnerResolver {
|
||||
Err(e) => {
|
||||
return Ok(Err(format!(
|
||||
"error when getting type of the option object ({e})"
|
||||
)))
|
||||
)));
|
||||
}
|
||||
};
|
||||
let new_var_map: VarMap = params.iter().map(|(id, _)| (*id, ty)).collect();
|
||||
@ -907,10 +915,10 @@ impl InnerResolver {
|
||||
// loop through non-function fields of the class to get the instantiated value
|
||||
for field in fields {
|
||||
let name: String = (*field.0).into();
|
||||
if let TypeEnum::TFunc(..) = &*unifier.get_ty(field.1 .0) {
|
||||
if let TypeEnum::TFunc(..) = &*unifier.get_ty(field.1.0) {
|
||||
continue;
|
||||
}
|
||||
let field_data = match obj.getattr(name.as_str()) {
|
||||
let field_data = &match obj.getattr(name.as_str()) {
|
||||
Ok(d) => d,
|
||||
Err(e) => return Ok(Err(format!("{e}"))),
|
||||
};
|
||||
@ -920,10 +928,10 @@ impl InnerResolver {
|
||||
Err(e) => {
|
||||
return Ok(Err(format!(
|
||||
"error when getting type of field `{name}` ({e})"
|
||||
)))
|
||||
)));
|
||||
}
|
||||
};
|
||||
let field_ty = unifier.subst(field.1 .0, &var_map).unwrap_or(field.1 .0);
|
||||
let field_ty = unifier.subst(field.1.0, &var_map).unwrap_or(field.1.0);
|
||||
if let Err(e) = unifier.unify(ty, field_ty) {
|
||||
// field type mismatch
|
||||
return Ok(Err(format!(
|
||||
@ -954,22 +962,22 @@ impl InnerResolver {
|
||||
// check integer bounds
|
||||
if unifier.unioned(extracted_ty, primitives.int32) {
|
||||
obj.extract::<i32>().map_or_else(
|
||||
|_| Ok(Err(format!("{obj} is not in the range of int32"))),
|
||||
|_| Ok(Err(format!("{obj:?} is not in the range of int32"))),
|
||||
|_| Ok(Ok(extracted_ty)),
|
||||
)
|
||||
} else if unifier.unioned(extracted_ty, primitives.int64) {
|
||||
obj.extract::<i64>().map_or_else(
|
||||
|_| Ok(Err(format!("{obj} is not in the range of int64"))),
|
||||
|_| Ok(Err(format!("{obj:?} is not in the range of int64"))),
|
||||
|_| Ok(Ok(extracted_ty)),
|
||||
)
|
||||
} else if unifier.unioned(extracted_ty, primitives.uint32) {
|
||||
obj.extract::<u32>().map_or_else(
|
||||
|_| Ok(Err(format!("{obj} is not in the range of uint32"))),
|
||||
|_| Ok(Err(format!("{obj:?} is not in the range of uint32"))),
|
||||
|_| Ok(Ok(extracted_ty)),
|
||||
)
|
||||
} else if unifier.unioned(extracted_ty, primitives.uint64) {
|
||||
obj.extract::<u64>().map_or_else(
|
||||
|_| Ok(Err(format!("{obj} is not in the range of uint64"))),
|
||||
|_| Ok(Err(format!("{obj:?} is not in the range of uint64"))),
|
||||
|_| Ok(Ok(extracted_ty)),
|
||||
)
|
||||
} else if unifier.unioned(extracted_ty, primitives.bool) {
|
||||
@ -978,11 +986,11 @@ impl InnerResolver {
|
||||
{
|
||||
Ok(Ok(extracted_ty))
|
||||
} else {
|
||||
Ok(Err(format!("{obj} is not in the range of bool")))
|
||||
Ok(Err(format!("{obj:?} is not in the range of bool")))
|
||||
}
|
||||
} else if unifier.unioned(extracted_ty, primitives.float) {
|
||||
obj.extract::<f64>().map_or_else(
|
||||
|_| Ok(Err(format!("{obj} is not in the range of float64"))),
|
||||
|_| Ok(Err(format!("{obj:?} is not in the range of float64"))),
|
||||
|_| Ok(Ok(extracted_ty)),
|
||||
)
|
||||
} else {
|
||||
@ -992,17 +1000,21 @@ impl InnerResolver {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_obj_value<'ctx>(
|
||||
pub fn get_obj_value<'ctx, 'py>(
|
||||
&self,
|
||||
py: Python,
|
||||
obj: &PyAny,
|
||||
py: Python<'py>,
|
||||
obj: &Bound<'py, PyAny>,
|
||||
ctx: &mut CodeGenContext<'ctx, '_>,
|
||||
generator: &mut dyn CodeGenerator,
|
||||
expected_ty: Type,
|
||||
) -> PyResult<Option<BasicValueEnum<'ctx>>> {
|
||||
let ty_id: u64 =
|
||||
self.helper.id_fn.call1(py, (self.helper.type_fn.call1(py, (obj,))?,))?.extract(py)?;
|
||||
let id: u64 = self.helper.id_fn.call1(py, (obj,))?.extract(py)?;
|
||||
let ty_id: u64 = self
|
||||
.helper
|
||||
.id_fn
|
||||
.bind(py)
|
||||
.call1((self.helper.type_fn.bind(py).call1((obj,))?,))?
|
||||
.extract()?;
|
||||
let id: u64 = self.helper.id_fn.bind(py).call1((obj,))?.extract()?;
|
||||
if ty_id == self.primitive_ids.int || ty_id == self.primitive_ids.int32 {
|
||||
let val: i32 = obj.extract().unwrap();
|
||||
self.id_to_primitive.write().insert(id, PrimitiveValue::I32(val));
|
||||
@ -1042,7 +1054,7 @@ impl InnerResolver {
|
||||
return Ok(Some(global.as_pointer_value().into()));
|
||||
}
|
||||
|
||||
let len: usize = self.helper.len_fn.call1(py, (obj,))?.extract(py)?;
|
||||
let len: usize = self.helper.len_fn.bind(py).call1((obj,))?.extract()?;
|
||||
let elem_ty = match ctx.unifier.get_ty_immutable(expected_ty).as_ref() {
|
||||
TypeEnum::TObj { obj_id, params, .. } if *obj_id == PrimDef::List.id() => {
|
||||
iter_type_vars(params).nth(0).unwrap().ty
|
||||
@ -1069,13 +1081,13 @@ impl InnerResolver {
|
||||
});
|
||||
return Ok(Some(global.as_pointer_value().into()));
|
||||
}
|
||||
self.global_value_ids.write().insert(id, obj.into());
|
||||
self.global_value_ids.write().insert(id, obj.as_unbound().into_py_any(py)?);
|
||||
}
|
||||
|
||||
let arr: Result<Option<Vec<_>>, _> = (0..len)
|
||||
.map(|i| {
|
||||
obj.get_item(i).and_then(|elem| {
|
||||
self.get_obj_value(py, elem, ctx, generator, elem_ty).map_err(|e| {
|
||||
self.get_obj_value(py, &elem, ctx, generator, elem_ty).map_err(|e| {
|
||||
super::CompileError::new_err(format!("Error getting element {i}: {e}"))
|
||||
})
|
||||
})
|
||||
@ -1153,13 +1165,14 @@ impl InnerResolver {
|
||||
});
|
||||
return Ok(Some(global.as_pointer_value().into()));
|
||||
}
|
||||
self.global_value_ids.write().insert(id, obj.into());
|
||||
self.global_value_ids.write().insert(id, obj.as_unbound().into_py_any(py)?);
|
||||
}
|
||||
|
||||
let ndims = llvm_ndarray.ndims();
|
||||
|
||||
// Obtain the shape of the ndarray
|
||||
let shape_tuple: &PyTuple = obj.getattr("shape")?.downcast()?;
|
||||
let shape_tuple = obj.getattr("shape")?;
|
||||
let shape_tuple = shape_tuple.downcast::<PyTuple>()?;
|
||||
assert_eq!(shape_tuple.len(), ndims as usize);
|
||||
|
||||
// The Rust type inferencer cannot figure this out
|
||||
@ -1168,7 +1181,7 @@ impl InnerResolver {
|
||||
.enumerate()
|
||||
.map(|(i, elem)| {
|
||||
let value = self
|
||||
.get_obj_value(py, elem, ctx, generator, ctx.primitives.usize())
|
||||
.get_obj_value(py, &elem, ctx, generator, ctx.primitives.usize())
|
||||
.map_err(|e| {
|
||||
super::CompileError::new_err(format!("Error getting element {i}: {e}"))
|
||||
})?
|
||||
@ -1205,7 +1218,7 @@ impl InnerResolver {
|
||||
.map(|i| {
|
||||
obj.getattr("flat")?.get_item(i).and_then(|elem| {
|
||||
let value = self
|
||||
.get_obj_value(py, elem, ctx, generator, ndarray_dtype)
|
||||
.get_obj_value(py, &elem, ctx, generator, ndarray_dtype)
|
||||
.map_err(|e| {
|
||||
super::CompileError::new_err(format!(
|
||||
"Error getting element {i}: {e}"
|
||||
@ -1338,14 +1351,14 @@ impl InnerResolver {
|
||||
};
|
||||
|
||||
let tup_tys = ty.iter();
|
||||
let elements: &PyTuple = obj.downcast()?;
|
||||
let elements = obj.downcast::<PyTuple>()?;
|
||||
assert_eq!(elements.len(), tup_tys.len());
|
||||
let val: Result<Option<Vec<_>>, _> = elements
|
||||
.iter()
|
||||
.enumerate()
|
||||
.zip(tup_tys)
|
||||
.map(|((i, elem), ty)| {
|
||||
self.get_obj_value(py, elem, ctx, generator, *ty).map_err(|e| {
|
||||
self.get_obj_value(py, &elem, ctx, generator, *ty).map_err(|e| {
|
||||
super::CompileError::new_err(format!("Error getting element {i}: {e}"))
|
||||
})
|
||||
})
|
||||
@ -1374,7 +1387,7 @@ impl InnerResolver {
|
||||
match self
|
||||
.get_obj_value(
|
||||
py,
|
||||
obj.getattr("_nac3_option").unwrap(),
|
||||
&obj.getattr("_nac3_option").unwrap(),
|
||||
ctx,
|
||||
generator,
|
||||
option_val_ty,
|
||||
@ -1398,7 +1411,9 @@ impl InnerResolver {
|
||||
});
|
||||
return Ok(Some(global.as_pointer_value().into()));
|
||||
}
|
||||
self.global_value_ids.write().insert(id, obj.into());
|
||||
self.global_value_ids
|
||||
.write()
|
||||
.insert(id, obj.as_unbound().into_py_any(py)?);
|
||||
}
|
||||
let global = ctx.module.add_global(
|
||||
v.get_type(),
|
||||
@ -1435,7 +1450,7 @@ impl InnerResolver {
|
||||
});
|
||||
return Ok(Some(global.as_pointer_value().into()));
|
||||
}
|
||||
self.global_value_ids.write().insert(id, obj.into());
|
||||
self.global_value_ids.write().insert(id, obj.as_unbound().into_py_any(py)?);
|
||||
}
|
||||
|
||||
let fields = {
|
||||
@ -1445,7 +1460,7 @@ impl InnerResolver {
|
||||
attributes
|
||||
.iter()
|
||||
.filter_map(|f| {
|
||||
let definition = top_level_defs.get(f.1 .0).unwrap().read();
|
||||
let definition = top_level_defs.get(f.1.0).unwrap().read();
|
||||
if let TopLevelDef::Variable { ty, .. } = &*definition {
|
||||
Some((f.0, *ty))
|
||||
} else {
|
||||
@ -1460,7 +1475,7 @@ impl InnerResolver {
|
||||
.map(|(name, ty)| {
|
||||
self.get_obj_value(
|
||||
py,
|
||||
obj.getattr(name.to_string().as_str())?,
|
||||
&obj.getattr(name.to_string().as_str())?,
|
||||
ctx,
|
||||
generator,
|
||||
*ty,
|
||||
@ -1505,7 +1520,7 @@ impl InnerResolver {
|
||||
});
|
||||
return Ok(Some(global.as_pointer_value().into()));
|
||||
}
|
||||
self.global_value_ids.write().insert(id, obj.into());
|
||||
self.global_value_ids.write().insert(id, obj.as_unbound().into_py_any(py)?);
|
||||
}
|
||||
// should be classes
|
||||
let definition =
|
||||
@ -1517,7 +1532,7 @@ impl InnerResolver {
|
||||
.map(|(name, ty, _)| {
|
||||
self.get_obj_value(
|
||||
py,
|
||||
obj.getattr(name.to_string().as_str())?,
|
||||
&obj.getattr(name.to_string().as_str())?,
|
||||
ctx,
|
||||
generator,
|
||||
*ty,
|
||||
@ -1541,14 +1556,18 @@ impl InnerResolver {
|
||||
}
|
||||
}
|
||||
|
||||
fn get_default_param_obj_value(
|
||||
fn get_default_param_obj_value<'py>(
|
||||
&self,
|
||||
py: Python,
|
||||
obj: &PyAny,
|
||||
py: Python<'py>,
|
||||
obj: &Bound<'py, PyAny>,
|
||||
) -> PyResult<Result<SymbolValue, String>> {
|
||||
let id: u64 = self.helper.id_fn.call1(py, (obj,))?.extract(py)?;
|
||||
let ty_id: u64 =
|
||||
self.helper.id_fn.call1(py, (self.helper.type_fn.call1(py, (obj,))?,))?.extract(py)?;
|
||||
let id: u64 = self.helper.id_fn.bind(py).call1((obj,))?.extract()?;
|
||||
let ty_id: u64 = self
|
||||
.helper
|
||||
.id_fn
|
||||
.bind(py)
|
||||
.call1((self.helper.type_fn.bind(py).call1((obj,))?,))?
|
||||
.extract()?;
|
||||
Ok(if ty_id == self.primitive_ids.int || ty_id == self.primitive_ids.int32 {
|
||||
let val: i32 = obj.extract()?;
|
||||
Ok(SymbolValue::I32(val))
|
||||
@ -1574,15 +1593,15 @@ impl InnerResolver {
|
||||
let val: f64 = obj.extract()?;
|
||||
Ok(SymbolValue::Double(val))
|
||||
} else if ty_id == self.primitive_ids.tuple {
|
||||
let elements: &PyTuple = obj.downcast()?;
|
||||
let elements = obj.downcast::<PyTuple>()?;
|
||||
let elements: Result<Result<Vec<_>, String>, _> =
|
||||
elements.iter().map(|elem| self.get_default_param_obj_value(py, elem)).collect();
|
||||
elements.iter().map(|elem| self.get_default_param_obj_value(py, &elem)).collect();
|
||||
elements?.map(SymbolValue::Tuple)
|
||||
} else if ty_id == self.primitive_ids.option {
|
||||
if id == self.primitive_ids.none {
|
||||
Ok(SymbolValue::OptionNone)
|
||||
} else {
|
||||
self.get_default_param_obj_value(py, obj.getattr("_nac3_option").unwrap())?
|
||||
self.get_default_param_obj_value(py, &obj.getattr("_nac3_option").unwrap())?
|
||||
.map(|v| SymbolValue::OptionSome(Box::new(v)))
|
||||
}
|
||||
} else {
|
||||
@ -1598,13 +1617,14 @@ impl SymbolResolver for Resolver {
|
||||
};
|
||||
|
||||
Python::with_gil(|py| -> PyResult<Option<SymbolValue>> {
|
||||
let obj: &PyAny = self.0.module.extract(py)?;
|
||||
let members: &PyDict = obj.getattr("__dict__").unwrap().downcast().unwrap();
|
||||
let obj = self.0.module.bind(py);
|
||||
let members = obj.getattr("__dict__").unwrap();
|
||||
let members = members.downcast::<PyDict>().unwrap();
|
||||
let mut sym_value = None;
|
||||
for (key, val) in members {
|
||||
let key: &str = key.extract()?;
|
||||
if key == id.to_string() {
|
||||
if let Ok(Ok(v)) = self.0.get_default_param_obj_value(py, val) {
|
||||
if let Ok(Ok(v)) = self.0.get_default_param_obj_value(py, &val) {
|
||||
sym_value = Some(v);
|
||||
}
|
||||
break;
|
||||
@ -1638,13 +1658,14 @@ impl SymbolResolver for Resolver {
|
||||
Ok(t)
|
||||
} else {
|
||||
Python::with_gil(|py| -> PyResult<Result<Type, String>> {
|
||||
let obj: &PyAny = self.0.module.extract(py)?;
|
||||
let obj = self.0.module.bind(py);
|
||||
let mut sym_ty = Err(format!("cannot find symbol `{str}`"));
|
||||
let members: &PyDict = obj.getattr("__dict__").unwrap().downcast().unwrap();
|
||||
let members = obj.getattr("__dict__").unwrap();
|
||||
let members = members.downcast::<PyDict>().unwrap();
|
||||
for (key, val) in members {
|
||||
let key: &str = key.extract()?;
|
||||
if key == str.to_string() {
|
||||
sym_ty = self.0.get_obj_type(py, val, unifier, defs, primitives)?;
|
||||
sym_ty = self.0.get_obj_type(py, &val, unifier, defs, primitives)?;
|
||||
break;
|
||||
}
|
||||
}
|
||||
@ -1672,7 +1693,7 @@ impl SymbolResolver for Resolver {
|
||||
if matches!(&*top_levels[def_id.0].read(), TopLevelDef::Variable { .. }) {
|
||||
let module_val = &self.0.module;
|
||||
let ret = Python::with_gil(|py| -> PyResult<Result<BasicValueEnum, String>> {
|
||||
let module_val = module_val.as_ref(py);
|
||||
let module_val = (**module_val).bind(py);
|
||||
|
||||
let ty = self.0.get_obj_type(
|
||||
py,
|
||||
@ -1713,15 +1734,16 @@ impl SymbolResolver for Resolver {
|
||||
id_to_val.get(&id).cloned()
|
||||
}
|
||||
.or_else(|| {
|
||||
Python::with_gil(|py| -> PyResult<Option<(u64, PyObject)>> {
|
||||
let obj: &PyAny = self.0.module.extract(py)?;
|
||||
let mut sym_value: Option<(u64, PyObject)> = None;
|
||||
let members: &PyDict = obj.getattr("__dict__").unwrap().downcast().unwrap();
|
||||
Python::with_gil(|py| -> PyResult<Option<PyValueHandle>> {
|
||||
let obj = self.0.module.bind(py);
|
||||
let mut sym_value: Option<PyValueHandle> = None;
|
||||
let members = obj.getattr("__dict__").unwrap();
|
||||
let members = members.downcast::<PyDict>().unwrap();
|
||||
for (key, val) in members {
|
||||
let key: &str = key.extract()?;
|
||||
if key == id.to_string() {
|
||||
let id = self.0.helper.id_fn.call1(py, (val,))?.extract(py)?;
|
||||
sym_value = Some((id, val.extract()?));
|
||||
let id = self.0.helper.id_fn.bind(py).call1((&val,))?.extract()?;
|
||||
sym_value = Some((id, Arc::new(val.as_unbound().into_py_any(py)?)));
|
||||
break;
|
||||
}
|
||||
}
|
||||
@ -1733,12 +1755,14 @@ impl SymbolResolver for Resolver {
|
||||
.unwrap()
|
||||
});
|
||||
sym_value.map(|(id, v)| {
|
||||
ValueEnum::Static(Arc::new(PythonValue {
|
||||
id,
|
||||
value: v,
|
||||
store_obj: self.0.helper.store_obj.clone(),
|
||||
resolver: self.0.clone(),
|
||||
}))
|
||||
Python::with_gil(|_| {
|
||||
ValueEnum::Static(Arc::new(PythonValue {
|
||||
id,
|
||||
value: v,
|
||||
store_obj: self.0.helper.store_obj.clone(),
|
||||
resolver: self.0.clone(),
|
||||
}))
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
@ -1786,9 +1810,9 @@ impl SymbolResolver for Resolver {
|
||||
let store = self.0.deferred_eval_store.store.read();
|
||||
Python::with_gil(|py| -> PyResult<Result<(), String>> {
|
||||
for (variables, constraints, name) in store.iter() {
|
||||
let constraints: &PyAny = constraints.as_ref(py);
|
||||
let constraints = constraints.bind(py);
|
||||
for (i, var) in variables.iter().enumerate() {
|
||||
if let Ok(constr) = constraints.get_item(i) {
|
||||
if let Ok(constr) = &constraints.get_item(i) {
|
||||
match self.0.get_pyty_obj_type(py, constr, unifier, defs, primitives)? {
|
||||
Ok((ty, _)) => {
|
||||
if !unifier.is_concrete(ty, &[]) {
|
||||
|
@ -1,6 +1,6 @@
|
||||
use nac3core::{
|
||||
codegen::{expr::infer_and_call_function, CodeGenContext},
|
||||
inkwell::{values::BasicValueEnum, AddressSpace, AtomicOrdering},
|
||||
codegen::{CodeGenContext, expr::infer_and_call_function},
|
||||
inkwell::{AddressSpace, AtomicOrdering, values::BasicValueEnum},
|
||||
};
|
||||
|
||||
/// Functions for manipulating the timeline.
|
||||
|
@ -2,7 +2,7 @@
|
||||
name = "nac3ast"
|
||||
version = "0.1.0"
|
||||
authors = ["RustPython Team", "M-Labs"]
|
||||
edition = "2021"
|
||||
edition = "2024"
|
||||
|
||||
[features]
|
||||
default = ["constant-optimization", "fold"]
|
||||
@ -11,5 +11,5 @@ fold = []
|
||||
|
||||
[dependencies]
|
||||
parking_lot = "0.12"
|
||||
string-interner = "0.18"
|
||||
string-interner = "0.19"
|
||||
fxhash = "0.2"
|
||||
|
@ -6,7 +6,7 @@ pub use crate::location::Location;
|
||||
use fxhash::FxBuildHasher;
|
||||
use parking_lot::{Mutex, MutexGuard};
|
||||
use std::{cell::RefCell, collections::HashMap, fmt, sync::LazyLock};
|
||||
use string_interner::{symbol::SymbolU32, DefaultBackend, StringInterner};
|
||||
use string_interner::{DefaultBackend, StringInterner, symbol::SymbolU32};
|
||||
|
||||
pub type Interner = StringInterner<DefaultBackend, FxBuildHasher>;
|
||||
static INTERNER: LazyLock<Mutex<Interner>> =
|
||||
|
@ -1,6 +1,6 @@
|
||||
use crate::StrRef;
|
||||
use crate::constant;
|
||||
use crate::fold::Fold;
|
||||
use crate::StrRef;
|
||||
|
||||
pub(crate) trait Foldable<T, U> {
|
||||
type Mapped;
|
||||
|
@ -2,7 +2,7 @@
|
||||
name = "nac3core"
|
||||
version = "0.1.0"
|
||||
authors = ["M-Labs"]
|
||||
edition = "2021"
|
||||
edition = "2024"
|
||||
|
||||
[features]
|
||||
default = ["derive"]
|
||||
@ -12,7 +12,7 @@ no-escape-analysis = []
|
||||
[dependencies]
|
||||
itertools = "0.14"
|
||||
crossbeam = "0.8"
|
||||
indexmap = "2.7"
|
||||
indexmap = "2.8"
|
||||
parking_lot = "0.12"
|
||||
nac3core_derive = { path = "nac3core_derive", optional = true }
|
||||
nac3parser = { path = "../nac3parser" }
|
||||
|
@ -1,7 +1,7 @@
|
||||
[package]
|
||||
name = "nac3core_derive"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
edition = "2024"
|
||||
|
||||
[lib]
|
||||
proc-macro = true
|
||||
|
@ -2,8 +2,8 @@ use proc_macro::TokenStream;
|
||||
use proc_macro_error::{abort, proc_macro_error};
|
||||
use quote::quote;
|
||||
use syn::{
|
||||
parse_macro_input, spanned::Spanned, Data, DataStruct, Expr, ExprField, ExprMethodCall,
|
||||
ExprPath, GenericArgument, Ident, LitStr, Path, PathArguments, Type, TypePath,
|
||||
Data, DataStruct, Expr, ExprField, ExprMethodCall, ExprPath, GenericArgument, Ident, LitStr,
|
||||
Path, PathArguments, Type, TypePath, parse_macro_input, spanned::Spanned,
|
||||
};
|
||||
|
||||
/// Extracts all generic arguments of a [`Type`] into a [`Vec`].
|
||||
@ -59,11 +59,7 @@ fn replace_top_level_receiver(expr: &mut Expr, ident: Ident) -> Option<&mut Expr
|
||||
| Expr::Field(ExprField { base: operand, .. }) = expr
|
||||
{
|
||||
return if extract_dot_operand(operand).is_some() {
|
||||
if replace_top_level_receiver(operand, ident).is_some() {
|
||||
Some(expr)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
if replace_top_level_receiver(operand, ident).is_some() { Some(expr) } else { None }
|
||||
} else {
|
||||
*operand = Box::new(Expr::Path(ExprPath {
|
||||
attrs: Vec::default(),
|
||||
@ -105,7 +101,7 @@ fn normalize_value_expr(expr: &Expr) -> proc_macro2::TokenStream {
|
||||
abort!(
|
||||
path,
|
||||
format!(
|
||||
"Expected one of `size_t`, `usize`, or an implicit call expression in #[value_type(...)], found {}",
|
||||
"Expected one of `size_t`, `usize`, or an implicit call expression in #[value_type(...)], found {}",
|
||||
quote!(#expr).to_string(),
|
||||
)
|
||||
)
|
||||
@ -154,7 +150,7 @@ fn normalize_value_expr(expr: &Expr) -> proc_macro2::TokenStream {
|
||||
abort!(
|
||||
expr,
|
||||
format!(
|
||||
"Expected one of `size_t`, `usize`, or an implicit call expression in #[value_type(...)], found {}",
|
||||
"Expected one of `size_t`, `usize`, or an implicit call expression in #[value_type(...)], found {}",
|
||||
quote!(#expr).to_string(),
|
||||
)
|
||||
)
|
||||
@ -224,10 +220,9 @@ pub fn derive(input: TokenStream) -> TokenStream {
|
||||
let Data::Struct(DataStruct { fields, .. }) = &input.data else {
|
||||
abort!(input, "Only structs with named fields are supported");
|
||||
};
|
||||
if let Err(err_span) =
|
||||
fields
|
||||
.iter()
|
||||
.try_for_each(|field| if field.ident.is_some() { Ok(()) } else { Err(field.span()) })
|
||||
if let Err(err_span) = fields
|
||||
.iter()
|
||||
.try_for_each(|field| if field.ident.is_some() { Ok(()) } else { Err(field.span()) })
|
||||
{
|
||||
abort!(err_span, "Only structs with named fields are supported");
|
||||
};
|
||||
|
@ -1,8 +1,8 @@
|
||||
use nac3core::{
|
||||
codegen::types::structure::StructField,
|
||||
inkwell::{
|
||||
values::{IntValue, PointerValue},
|
||||
AddressSpace,
|
||||
values::{IntValue, PointerValue},
|
||||
},
|
||||
};
|
||||
use nac3core_derive::StructFields;
|
||||
|
@ -1,8 +1,8 @@
|
||||
use nac3core::{
|
||||
codegen::types::structure::StructField,
|
||||
inkwell::{
|
||||
values::{IntValue, PointerValue},
|
||||
AddressSpace,
|
||||
values::{IntValue, PointerValue},
|
||||
},
|
||||
};
|
||||
use nac3core_derive::StructFields;
|
||||
|
@ -1,8 +1,8 @@
|
||||
use nac3core::{
|
||||
codegen::types::structure::StructField,
|
||||
inkwell::{
|
||||
values::{IntValue, PointerValue},
|
||||
AddressSpace,
|
||||
values::{IntValue, PointerValue},
|
||||
},
|
||||
};
|
||||
use nac3core_derive::StructFields;
|
||||
|
@ -1,8 +1,8 @@
|
||||
use nac3core::{
|
||||
codegen::types::structure::StructField,
|
||||
inkwell::{
|
||||
values::{IntValue, PointerValue},
|
||||
AddressSpace,
|
||||
values::{IntValue, PointerValue},
|
||||
},
|
||||
};
|
||||
use nac3core_derive::StructFields;
|
||||
|
@ -1,8 +1,8 @@
|
||||
use nac3core::{
|
||||
codegen::types::structure::StructField,
|
||||
inkwell::{
|
||||
values::{IntValue, PointerValue},
|
||||
AddressSpace,
|
||||
values::{IntValue, PointerValue},
|
||||
},
|
||||
};
|
||||
use nac3core_derive::StructFields;
|
||||
|
@ -1,26 +1,26 @@
|
||||
use inkwell::{
|
||||
FloatPredicate, IntPredicate, OptimizationLevel,
|
||||
types::BasicTypeEnum,
|
||||
values::{BasicValueEnum, IntValue},
|
||||
FloatPredicate, IntPredicate, OptimizationLevel,
|
||||
};
|
||||
use itertools::Itertools;
|
||||
|
||||
use super::{
|
||||
CodeGenContext, CodeGenerator,
|
||||
expr::destructure_range,
|
||||
extern_fns, irrt,
|
||||
irrt::calculate_len_for_slice_range,
|
||||
llvm_intrinsics,
|
||||
macros::codegen_unreachable,
|
||||
types::{ndarray::NDArrayType, ListType, RangeType, TupleType},
|
||||
types::{ListType, RangeType, TupleType, ndarray::NDArrayType},
|
||||
values::{
|
||||
ndarray::{NDArrayOut, NDArrayValue, ScalarOrNDArray},
|
||||
ProxyValue, TypedArrayLikeAccessor, UntypedArrayLikeAccessor,
|
||||
ndarray::{NDArrayOut, NDArrayValue, ScalarOrNDArray},
|
||||
},
|
||||
CodeGenContext, CodeGenerator,
|
||||
};
|
||||
use crate::{
|
||||
toplevel::{
|
||||
helper::{arraylike_flatten_element_type, extract_ndims, PrimDef},
|
||||
helper::{PrimDef, arraylike_flatten_element_type, extract_ndims},
|
||||
numpy::unpack_ndarray_var_tys,
|
||||
},
|
||||
typecheck::typedef::{Type, TypeEnum},
|
||||
@ -99,17 +99,21 @@ pub fn call_int32<'ctx, G: CodeGenerator + ?Sized>(
|
||||
}
|
||||
|
||||
BasicValueEnum::IntValue(n) if n.get_type().get_bit_width() == 32 => {
|
||||
debug_assert!([ctx.primitives.int32, ctx.primitives.uint32,]
|
||||
.iter()
|
||||
.any(|ty| ctx.unifier.unioned(n_ty, *ty)));
|
||||
debug_assert!(
|
||||
[ctx.primitives.int32, ctx.primitives.uint32,]
|
||||
.iter()
|
||||
.any(|ty| ctx.unifier.unioned(n_ty, *ty))
|
||||
);
|
||||
|
||||
n.into()
|
||||
}
|
||||
|
||||
BasicValueEnum::IntValue(n) if n.get_type().get_bit_width() == 64 => {
|
||||
debug_assert!([ctx.primitives.int64, ctx.primitives.uint64,]
|
||||
.iter()
|
||||
.any(|ty| ctx.unifier.unioned(n_ty, *ty)));
|
||||
debug_assert!(
|
||||
[ctx.primitives.int64, ctx.primitives.uint64,]
|
||||
.iter()
|
||||
.any(|ty| ctx.unifier.unioned(n_ty, *ty))
|
||||
);
|
||||
|
||||
ctx.builder.build_int_truncate(n, llvm_i32, "trunc").map(Into::into).unwrap()
|
||||
}
|
||||
@ -155,9 +159,11 @@ pub fn call_int64<'ctx, G: CodeGenerator + ?Sized>(
|
||||
|
||||
Ok(match n {
|
||||
BasicValueEnum::IntValue(n) if matches!(n.get_type().get_bit_width(), 1 | 8 | 32) => {
|
||||
debug_assert!([ctx.primitives.bool, ctx.primitives.int32, ctx.primitives.uint32,]
|
||||
.iter()
|
||||
.any(|ty| ctx.unifier.unioned(n_ty, *ty)));
|
||||
debug_assert!(
|
||||
[ctx.primitives.bool, ctx.primitives.int32, ctx.primitives.uint32,]
|
||||
.iter()
|
||||
.any(|ty| ctx.unifier.unioned(n_ty, *ty))
|
||||
);
|
||||
|
||||
if ctx.unifier.unioned(n_ty, ctx.primitives.int32) {
|
||||
ctx.builder.build_int_s_extend(n, llvm_i64, "sext").map(Into::into).unwrap()
|
||||
@ -167,9 +173,11 @@ pub fn call_int64<'ctx, G: CodeGenerator + ?Sized>(
|
||||
}
|
||||
|
||||
BasicValueEnum::IntValue(n) if n.get_type().get_bit_width() == 64 => {
|
||||
debug_assert!([ctx.primitives.int64, ctx.primitives.uint64,]
|
||||
.iter()
|
||||
.any(|ty| ctx.unifier.unioned(n_ty, *ty)));
|
||||
debug_assert!(
|
||||
[ctx.primitives.int64, ctx.primitives.uint64,]
|
||||
.iter()
|
||||
.any(|ty| ctx.unifier.unioned(n_ty, *ty))
|
||||
);
|
||||
|
||||
n.into()
|
||||
}
|
||||
@ -222,9 +230,11 @@ pub fn call_uint32<'ctx, G: CodeGenerator + ?Sized>(
|
||||
}
|
||||
|
||||
BasicValueEnum::IntValue(n) if n.get_type().get_bit_width() == 32 => {
|
||||
debug_assert!([ctx.primitives.int32, ctx.primitives.uint32,]
|
||||
.iter()
|
||||
.any(|ty| ctx.unifier.unioned(n_ty, *ty)));
|
||||
debug_assert!(
|
||||
[ctx.primitives.int32, ctx.primitives.uint32,]
|
||||
.iter()
|
||||
.any(|ty| ctx.unifier.unioned(n_ty, *ty))
|
||||
);
|
||||
|
||||
n.into()
|
||||
}
|
||||
@ -293,9 +303,11 @@ pub fn call_uint64<'ctx, G: CodeGenerator + ?Sized>(
|
||||
|
||||
Ok(match n {
|
||||
BasicValueEnum::IntValue(n) if matches!(n.get_type().get_bit_width(), 1 | 8 | 32) => {
|
||||
debug_assert!([ctx.primitives.bool, ctx.primitives.int32, ctx.primitives.uint32,]
|
||||
.iter()
|
||||
.any(|ty| ctx.unifier.unioned(n_ty, *ty)));
|
||||
debug_assert!(
|
||||
[ctx.primitives.bool, ctx.primitives.int32, ctx.primitives.uint32,]
|
||||
.iter()
|
||||
.any(|ty| ctx.unifier.unioned(n_ty, *ty))
|
||||
);
|
||||
|
||||
if ctx.unifier.unioned(n_ty, ctx.primitives.int32) {
|
||||
ctx.builder.build_int_s_extend(n, llvm_i64, "sext").map(Into::into).unwrap()
|
||||
@ -305,9 +317,11 @@ pub fn call_uint64<'ctx, G: CodeGenerator + ?Sized>(
|
||||
}
|
||||
|
||||
BasicValueEnum::IntValue(n) if n.get_type().get_bit_width() == 64 => {
|
||||
debug_assert!([ctx.primitives.int64, ctx.primitives.uint64,]
|
||||
.iter()
|
||||
.any(|ty| ctx.unifier.unioned(n_ty, *ty)));
|
||||
debug_assert!(
|
||||
[ctx.primitives.int64, ctx.primitives.uint64,]
|
||||
.iter()
|
||||
.any(|ty| ctx.unifier.unioned(n_ty, *ty))
|
||||
);
|
||||
|
||||
n.into()
|
||||
}
|
||||
@ -359,15 +373,17 @@ pub fn call_float<'ctx, G: CodeGenerator + ?Sized>(
|
||||
|
||||
Ok(match n {
|
||||
BasicValueEnum::IntValue(n) if matches!(n.get_type().get_bit_width(), 1 | 8 | 32 | 64) => {
|
||||
debug_assert!([
|
||||
ctx.primitives.bool,
|
||||
ctx.primitives.int32,
|
||||
ctx.primitives.uint32,
|
||||
ctx.primitives.int64,
|
||||
ctx.primitives.uint64,
|
||||
]
|
||||
.iter()
|
||||
.any(|ty| ctx.unifier.unioned(n_ty, *ty)));
|
||||
debug_assert!(
|
||||
[
|
||||
ctx.primitives.bool,
|
||||
ctx.primitives.int32,
|
||||
ctx.primitives.uint32,
|
||||
ctx.primitives.int64,
|
||||
ctx.primitives.uint64,
|
||||
]
|
||||
.iter()
|
||||
.any(|ty| ctx.unifier.unioned(n_ty, *ty))
|
||||
);
|
||||
|
||||
if [ctx.primitives.bool, ctx.primitives.int32, ctx.primitives.int64]
|
||||
.iter()
|
||||
@ -515,14 +531,16 @@ pub fn call_bool<'ctx, G: CodeGenerator + ?Sized>(
|
||||
}
|
||||
|
||||
BasicValueEnum::IntValue(n) => {
|
||||
debug_assert!([
|
||||
ctx.primitives.int32,
|
||||
ctx.primitives.uint32,
|
||||
ctx.primitives.int64,
|
||||
ctx.primitives.uint64,
|
||||
]
|
||||
.iter()
|
||||
.any(|ty| ctx.unifier.unioned(n_ty, *ty)));
|
||||
debug_assert!(
|
||||
[
|
||||
ctx.primitives.int32,
|
||||
ctx.primitives.uint32,
|
||||
ctx.primitives.int64,
|
||||
ctx.primitives.uint64,
|
||||
]
|
||||
.iter()
|
||||
.any(|ty| ctx.unifier.unioned(n_ty, *ty))
|
||||
);
|
||||
|
||||
ctx.builder
|
||||
.build_int_compare(IntPredicate::NE, n, n.get_type().const_zero(), FN_NAME)
|
||||
@ -683,15 +701,17 @@ pub fn call_min<'ctx>(
|
||||
|
||||
match (m, n) {
|
||||
(BasicValueEnum::IntValue(m), BasicValueEnum::IntValue(n)) => {
|
||||
debug_assert!([
|
||||
ctx.primitives.bool,
|
||||
ctx.primitives.int32,
|
||||
ctx.primitives.uint32,
|
||||
ctx.primitives.int64,
|
||||
ctx.primitives.uint64,
|
||||
]
|
||||
.iter()
|
||||
.any(|ty| ctx.unifier.unioned(common_ty, *ty)));
|
||||
debug_assert!(
|
||||
[
|
||||
ctx.primitives.bool,
|
||||
ctx.primitives.int32,
|
||||
ctx.primitives.uint32,
|
||||
ctx.primitives.int64,
|
||||
ctx.primitives.uint64,
|
||||
]
|
||||
.iter()
|
||||
.any(|ty| ctx.unifier.unioned(common_ty, *ty))
|
||||
);
|
||||
|
||||
if [ctx.primitives.int32, ctx.primitives.int64]
|
||||
.iter()
|
||||
@ -726,16 +746,18 @@ pub fn call_numpy_minimum<'ctx, G: CodeGenerator + ?Sized>(
|
||||
|
||||
Ok(match (x1, x2) {
|
||||
(BasicValueEnum::IntValue(x1), BasicValueEnum::IntValue(x2)) => {
|
||||
debug_assert!([
|
||||
ctx.primitives.bool,
|
||||
ctx.primitives.int32,
|
||||
ctx.primitives.uint32,
|
||||
ctx.primitives.int64,
|
||||
ctx.primitives.uint64,
|
||||
ctx.primitives.float,
|
||||
]
|
||||
.iter()
|
||||
.any(|ty| ctx.unifier.unioned(common_ty.unwrap(), *ty)));
|
||||
debug_assert!(
|
||||
[
|
||||
ctx.primitives.bool,
|
||||
ctx.primitives.int32,
|
||||
ctx.primitives.uint32,
|
||||
ctx.primitives.int64,
|
||||
ctx.primitives.uint64,
|
||||
ctx.primitives.float,
|
||||
]
|
||||
.iter()
|
||||
.any(|ty| ctx.unifier.unioned(common_ty.unwrap(), *ty))
|
||||
);
|
||||
|
||||
call_min(ctx, (x1_ty, x1.into()), (x2_ty, x2.into()))
|
||||
}
|
||||
@ -800,15 +822,17 @@ pub fn call_max<'ctx>(
|
||||
|
||||
match (m, n) {
|
||||
(BasicValueEnum::IntValue(m), BasicValueEnum::IntValue(n)) => {
|
||||
debug_assert!([
|
||||
ctx.primitives.bool,
|
||||
ctx.primitives.int32,
|
||||
ctx.primitives.uint32,
|
||||
ctx.primitives.int64,
|
||||
ctx.primitives.uint64,
|
||||
]
|
||||
.iter()
|
||||
.any(|ty| ctx.unifier.unioned(common_ty, *ty)));
|
||||
debug_assert!(
|
||||
[
|
||||
ctx.primitives.bool,
|
||||
ctx.primitives.int32,
|
||||
ctx.primitives.uint32,
|
||||
ctx.primitives.int64,
|
||||
ctx.primitives.uint64,
|
||||
]
|
||||
.iter()
|
||||
.any(|ty| ctx.unifier.unioned(common_ty, *ty))
|
||||
);
|
||||
|
||||
if [ctx.primitives.int32, ctx.primitives.int64]
|
||||
.iter()
|
||||
@ -845,16 +869,18 @@ pub fn call_numpy_max_min<'ctx, G: CodeGenerator + ?Sized>(
|
||||
|
||||
Ok(match a {
|
||||
BasicValueEnum::IntValue(_) | BasicValueEnum::FloatValue(_) => {
|
||||
debug_assert!([
|
||||
ctx.primitives.bool,
|
||||
ctx.primitives.int32,
|
||||
ctx.primitives.uint32,
|
||||
ctx.primitives.int64,
|
||||
ctx.primitives.uint64,
|
||||
ctx.primitives.float,
|
||||
]
|
||||
.iter()
|
||||
.any(|ty| ctx.unifier.unioned(a_ty, *ty)));
|
||||
debug_assert!(
|
||||
[
|
||||
ctx.primitives.bool,
|
||||
ctx.primitives.int32,
|
||||
ctx.primitives.uint32,
|
||||
ctx.primitives.int64,
|
||||
ctx.primitives.uint64,
|
||||
ctx.primitives.float,
|
||||
]
|
||||
.iter()
|
||||
.any(|ty| ctx.unifier.unioned(a_ty, *ty))
|
||||
);
|
||||
|
||||
match fn_name {
|
||||
"np_argmin" | "np_argmax" => llvm_int64.const_zero().into(),
|
||||
@ -986,16 +1012,18 @@ pub fn call_numpy_maximum<'ctx, G: CodeGenerator + ?Sized>(
|
||||
|
||||
Ok(match (x1, x2) {
|
||||
(BasicValueEnum::IntValue(x1), BasicValueEnum::IntValue(x2)) => {
|
||||
debug_assert!([
|
||||
ctx.primitives.bool,
|
||||
ctx.primitives.int32,
|
||||
ctx.primitives.uint32,
|
||||
ctx.primitives.int64,
|
||||
ctx.primitives.uint64,
|
||||
ctx.primitives.float,
|
||||
]
|
||||
.iter()
|
||||
.any(|ty| ctx.unifier.unioned(common_ty.unwrap(), *ty)));
|
||||
debug_assert!(
|
||||
[
|
||||
ctx.primitives.bool,
|
||||
ctx.primitives.int32,
|
||||
ctx.primitives.uint32,
|
||||
ctx.primitives.int64,
|
||||
ctx.primitives.uint64,
|
||||
ctx.primitives.float,
|
||||
]
|
||||
.iter()
|
||||
.any(|ty| ctx.unifier.unioned(common_ty.unwrap(), *ty))
|
||||
);
|
||||
|
||||
call_max(ctx, (x1_ty, x1.into()), (x2_ty, x2.into()))
|
||||
}
|
||||
@ -1101,15 +1129,17 @@ pub fn call_abs<'ctx, G: CodeGenerator + ?Sized>(
|
||||
&|_ctx, elem_ty| elem_ty,
|
||||
&|_generator, ctx, val_ty, val| match val {
|
||||
BasicValueEnum::IntValue(n) => Some({
|
||||
debug_assert!([
|
||||
ctx.primitives.bool,
|
||||
ctx.primitives.int32,
|
||||
ctx.primitives.uint32,
|
||||
ctx.primitives.int64,
|
||||
ctx.primitives.uint64,
|
||||
]
|
||||
.iter()
|
||||
.any(|ty| ctx.unifier.unioned(val_ty, *ty)));
|
||||
debug_assert!(
|
||||
[
|
||||
ctx.primitives.bool,
|
||||
ctx.primitives.int32,
|
||||
ctx.primitives.uint32,
|
||||
ctx.primitives.int64,
|
||||
ctx.primitives.uint64,
|
||||
]
|
||||
.iter()
|
||||
.any(|ty| ctx.unifier.unioned(val_ty, *ty))
|
||||
);
|
||||
|
||||
if [ctx.primitives.int32, ctx.primitives.int64]
|
||||
.iter()
|
||||
|
@ -10,7 +10,7 @@ use crate::{
|
||||
typecheck::{
|
||||
type_inferencer::PrimitiveStore,
|
||||
typedef::{
|
||||
into_var_map, FunSignature, FuncArg, Type, TypeEnum, TypeVar, TypeVarId, Unifier,
|
||||
FunSignature, FuncArg, Type, TypeEnum, TypeVar, TypeVarId, Unifier, into_var_map,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
@ -6,12 +6,12 @@ use std::{
|
||||
};
|
||||
|
||||
use inkwell::{
|
||||
AddressSpace, IntPredicate, OptimizationLevel,
|
||||
attributes::{Attribute, AttributeLoc},
|
||||
types::{AnyType, BasicType, BasicTypeEnum},
|
||||
values::{BasicValueEnum, CallSiteValue, FunctionValue, IntValue, PointerValue, StructValue},
|
||||
AddressSpace, IntPredicate, OptimizationLevel,
|
||||
};
|
||||
use itertools::{izip, Either, Itertools};
|
||||
use itertools::{Either, Itertools, izip};
|
||||
|
||||
use nac3parser::ast::{
|
||||
self, Boolop, Cmpop, Comprehension, Constant, Expr, ExprKind, Location, Operator, StrRef,
|
||||
@ -19,6 +19,7 @@ use nac3parser::ast::{
|
||||
};
|
||||
|
||||
use super::{
|
||||
CodeGenContext, CodeGenTask, CodeGenerator,
|
||||
concrete_type::{ConcreteFuncArg, ConcreteTypeEnum, ConcreteTypeStore},
|
||||
gen_in_range_check, get_llvm_abi_type, get_llvm_type, get_va_count_arg_name,
|
||||
irrt::*,
|
||||
@ -33,21 +34,20 @@ use super::{
|
||||
gen_var,
|
||||
},
|
||||
types::{
|
||||
ndarray::NDArrayType, ExceptionType, ListType, OptionType, RangeType, StringType, TupleType,
|
||||
ExceptionType, ListType, OptionType, RangeType, StringType, TupleType, ndarray::NDArrayType,
|
||||
},
|
||||
values::{
|
||||
ndarray::{NDArrayOut, RustNDIndex, ScalarOrNDArray},
|
||||
ArrayLikeIndexer, ArrayLikeValue, ListValue, ProxyValue, RangeValue,
|
||||
UntypedArrayLikeAccessor,
|
||||
ndarray::{NDArrayOut, RustNDIndex, ScalarOrNDArray},
|
||||
},
|
||||
CodeGenContext, CodeGenTask, CodeGenerator,
|
||||
};
|
||||
use crate::{
|
||||
symbol_resolver::{SymbolValue, ValueEnum},
|
||||
toplevel::{
|
||||
helper::{arraylike_flatten_element_type, extract_ndims, PrimDef},
|
||||
numpy::unpack_ndarray_var_tys,
|
||||
DefinitionId, TopLevelDef,
|
||||
helper::{PrimDef, arraylike_flatten_element_type, extract_ndims},
|
||||
numpy::unpack_ndarray_var_tys,
|
||||
},
|
||||
typecheck::{
|
||||
magic_methods::{Binop, BinopVariant, HasOpInfo},
|
||||
@ -73,7 +73,7 @@ pub fn get_subst_key(
|
||||
})
|
||||
.unwrap_or_default();
|
||||
vars.extend(fun_vars);
|
||||
let sorted = vars.keys().filter(|id| filter.map_or(true, |v| v.contains(id))).sorted();
|
||||
let sorted = vars.keys().filter(|id| filter.is_none_or(|v| v.contains(id))).sorted();
|
||||
sorted
|
||||
.map(|id| {
|
||||
unifier.internal_stringify(
|
||||
@ -137,7 +137,7 @@ impl<'ctx> CodeGenContext<'ctx, '_> {
|
||||
(field_index.0, None)
|
||||
} else {
|
||||
let attribute_index = attributes.iter().find_position(|x| x.0 == attr).unwrap();
|
||||
(attribute_index.0, Some(attribute_index.1 .2.clone()))
|
||||
(attribute_index.0, Some(attribute_index.1.2.clone()))
|
||||
}
|
||||
} else if let TopLevelDef::Module { attributes, .. } = &*def.read() {
|
||||
(attributes.iter().find_position(|x| x.0 == attr).unwrap().0, None)
|
||||
@ -782,11 +782,10 @@ pub fn gen_call<'ctx, G: CodeGenerator>(
|
||||
) -> Result<Option<BasicValueEnum<'ctx>>, String> {
|
||||
let llvm_usize = ctx.get_size_type();
|
||||
|
||||
let definition = ctx.top_level.definitions.read().get(fun.1 .0).cloned().unwrap();
|
||||
let definition = ctx.top_level.definitions.read().get(fun.1.0).cloned().unwrap();
|
||||
let id;
|
||||
let key;
|
||||
let param_vals;
|
||||
let is_extern;
|
||||
let vararg_arg;
|
||||
|
||||
// Ensure that the function object only contains up to 1 vararg parameter
|
||||
@ -805,7 +804,6 @@ pub fn gen_call<'ctx, G: CodeGenerator>(
|
||||
if let Some(callback) = codegen_callback {
|
||||
return callback.run(ctx, obj, fun, params, generator);
|
||||
}
|
||||
is_extern = instance_to_stmt.is_empty();
|
||||
vararg_arg = fun.0.args.iter().find(|arg| arg.is_vararg);
|
||||
let old_key = ctx.get_subst_key(obj.as_ref().map(|a| a.0), fun.0, None);
|
||||
let mut keys = fun.0.args.clone();
|
||||
@ -865,9 +863,10 @@ pub fn gen_call<'ctx, G: CodeGenerator>(
|
||||
} else {
|
||||
mapping.insert(
|
||||
k.name,
|
||||
vec![ctx
|
||||
.gen_symbol_val(generator, &k.default_value.unwrap(), k.ty)
|
||||
.into()],
|
||||
vec![
|
||||
ctx.gen_symbol_val(generator, &k.default_value.unwrap(), k.ty)
|
||||
.into(),
|
||||
],
|
||||
);
|
||||
}
|
||||
}
|
||||
@ -937,7 +936,7 @@ pub fn gen_call<'ctx, G: CodeGenerator>(
|
||||
instance_to_symbol.get(&key).cloned().ok_or_else(String::new)
|
||||
}
|
||||
TopLevelDef::Class { .. } => {
|
||||
return Ok(Some(generator.gen_constructor(ctx, fun.0, &def, params)?))
|
||||
return Ok(Some(generator.gen_constructor(ctx, fun.0, &def, params)?));
|
||||
}
|
||||
TopLevelDef::Variable { .. } | TopLevelDef::Module { .. } => unreachable!(),
|
||||
}
|
||||
@ -959,21 +958,10 @@ pub fn gen_call<'ctx, G: CodeGenerator>(
|
||||
Some(ctx.get_llvm_abi_type(generator, fun.0.ret))
|
||||
};
|
||||
let has_sret = ret_type.is_some_and(|ret_type| need_sret(ret_type));
|
||||
let mut byrefs = Vec::new();
|
||||
let mut params = args
|
||||
.iter()
|
||||
.enumerate()
|
||||
.filter(|(_, arg)| !arg.is_vararg)
|
||||
.map(|(i, arg)| {
|
||||
match ctx.get_llvm_abi_type(generator, arg.ty) {
|
||||
BasicTypeEnum::StructType(ty) if is_extern => {
|
||||
byrefs.push((i, ty));
|
||||
ty.ptr_type(AddressSpace::default()).into()
|
||||
}
|
||||
x => x,
|
||||
}
|
||||
.into()
|
||||
})
|
||||
.filter(|arg| !arg.is_vararg)
|
||||
.map(|arg| ctx.get_llvm_abi_type(generator, arg.ty).into())
|
||||
.collect_vec();
|
||||
if has_sret {
|
||||
params.insert(0, ret_type.unwrap().ptr_type(AddressSpace::default()).into());
|
||||
@ -987,7 +975,7 @@ pub fn gen_call<'ctx, G: CodeGenerator>(
|
||||
_ => ctx.ctx.void_type().fn_type(¶ms, is_vararg),
|
||||
};
|
||||
let fun_val = ctx.module.add_function(&symbol, fun_ty, None);
|
||||
let offset = if has_sret {
|
||||
if has_sret {
|
||||
fun_val.add_attribute(
|
||||
AttributeLoc::Param(0),
|
||||
ctx.ctx.create_type_attribute(
|
||||
@ -995,23 +983,8 @@ pub fn gen_call<'ctx, G: CodeGenerator>(
|
||||
ret_type.unwrap().as_any_type_enum(),
|
||||
),
|
||||
);
|
||||
1
|
||||
} else {
|
||||
0
|
||||
};
|
||||
|
||||
// The attribute ID used to mark arguments of a structure type.
|
||||
// Structure-Typed parameters of extern functions must **not** be marked as `byval`, as
|
||||
// `byval` explicitly specifies that the argument is to be passed on the stack, which breaks
|
||||
// on most ABIs where the first several arguments are expected to be passed in registers.
|
||||
let passing_attr_id =
|
||||
Attribute::get_named_enum_kind_id(if is_extern { "byref" } else { "byval" });
|
||||
for (i, ty) in byrefs {
|
||||
fun_val.add_attribute(
|
||||
AttributeLoc::Param((i as u32) + offset),
|
||||
ctx.ctx.create_type_attribute(passing_attr_id, ty.as_any_type_enum()),
|
||||
);
|
||||
}
|
||||
|
||||
fun_val
|
||||
});
|
||||
|
||||
@ -1573,7 +1546,6 @@ pub fn gen_binop_expr_with_values<'ctx, G: CodeGenerator>(
|
||||
vec![(None, right_val.into())],
|
||||
)
|
||||
.map(Option::unwrap)
|
||||
.map(BasicValueEnum::into)
|
||||
}
|
||||
}
|
||||
|
||||
@ -2591,7 +2563,7 @@ pub fn gen_expr<'ctx, G: CodeGenerator>(
|
||||
}
|
||||
ExprKind::UnaryOp { op, operand } => return gen_unaryop_expr(generator, ctx, *op, operand),
|
||||
ExprKind::Compare { left, ops, comparators } => {
|
||||
return gen_cmpop_expr(generator, ctx, left, ops, comparators)
|
||||
return gen_cmpop_expr(generator, ctx, left, ops, comparators);
|
||||
}
|
||||
ExprKind::IfExp { test, body, orelse } => {
|
||||
let test = match generator.gen_expr(ctx, test)? {
|
||||
@ -3035,9 +3007,8 @@ pub fn create_and_call_function<'ctx>(
|
||||
value_name: Option<&str>,
|
||||
configure: Option<&dyn Fn(&FunctionValue<'ctx>)>,
|
||||
) -> Option<BasicValueEnum<'ctx>> {
|
||||
let param_tys = params.iter().map(|(ty, _)| ty).copied().map(BasicTypeEnum::into).collect_vec();
|
||||
let arg_values =
|
||||
params.iter().map(|(_, value)| value).copied().map(BasicValueEnum::into).collect_vec();
|
||||
let param_tys = params.iter().map(|(ty, _)| ty).copied().collect_vec();
|
||||
let arg_values = params.iter().map(|(_, value)| value).copied().collect_vec();
|
||||
|
||||
create_fn_and_call(
|
||||
ctx,
|
||||
|
@ -3,7 +3,7 @@ use inkwell::{
|
||||
values::{BasicValueEnum, FloatValue},
|
||||
};
|
||||
|
||||
use super::{expr::infer_and_call_function, CodeGenContext};
|
||||
use super::{CodeGenContext, expr::infer_and_call_function};
|
||||
|
||||
/// Macro to generate extern function
|
||||
/// Both function return type and function parameter type are `FloatValue`
|
||||
|
@ -7,7 +7,7 @@ use inkwell::{
|
||||
|
||||
use nac3parser::ast::{Expr, Stmt, StrRef};
|
||||
|
||||
use super::{bool_to_int_type, expr::*, stmt::*, values::ArraySliceValue, CodeGenContext};
|
||||
use super::{CodeGenContext, bool_to_int_type, expr::*, stmt::*, values::ArraySliceValue};
|
||||
use crate::{
|
||||
symbol_resolver::ValueEnum,
|
||||
toplevel::{DefinitionId, TopLevelDef},
|
||||
@ -308,10 +308,6 @@ impl CodeGenerator for DefaultCodeGenerator {
|
||||
fn get_size_type<'ctx>(&self, ctx: &'ctx Context) -> IntType<'ctx> {
|
||||
// it should be unsigned, but we don't really need unsigned and this could save us from
|
||||
// having to do a bit cast...
|
||||
if self.size_t == 32 {
|
||||
ctx.i32_type()
|
||||
} else {
|
||||
ctx.i64_type()
|
||||
}
|
||||
if self.size_t == 32 { ctx.i32_type() } else { ctx.i64_type() }
|
||||
}
|
||||
}
|
||||
|
@ -3,7 +3,7 @@ use inkwell::{
|
||||
values::{BasicValueEnum, FloatValue, IntValue},
|
||||
};
|
||||
|
||||
use crate::codegen::{expr::infer_and_call_function, CodeGenContext};
|
||||
use crate::codegen::{CodeGenContext, expr::infer_and_call_function};
|
||||
|
||||
/// Generates a call to [`isinf`](https://en.cppreference.com/w/c/numeric/math/isinf) in IR. Returns
|
||||
/// an `i1` representing the result.
|
||||
|
@ -1,16 +1,16 @@
|
||||
use inkwell::{
|
||||
AddressSpace, IntPredicate,
|
||||
types::BasicTypeEnum,
|
||||
values::{BasicValueEnum, IntValue},
|
||||
AddressSpace, IntPredicate,
|
||||
};
|
||||
|
||||
use super::calculate_len_for_slice_range;
|
||||
use crate::codegen::{
|
||||
CodeGenContext, CodeGenerator,
|
||||
expr::infer_and_call_function,
|
||||
macros::codegen_unreachable,
|
||||
stmt::gen_if_callback,
|
||||
values::{ArrayLikeValue, ListValue},
|
||||
CodeGenContext, CodeGenerator,
|
||||
};
|
||||
|
||||
/// This function handles 'end' **inclusively**.
|
||||
|
@ -1,6 +1,6 @@
|
||||
use inkwell::{
|
||||
values::{BasicValueEnum, FloatValue, IntValue},
|
||||
IntPredicate,
|
||||
values::{BasicValueEnum, FloatValue, IntValue},
|
||||
};
|
||||
|
||||
use crate::codegen::{
|
||||
|
@ -1,10 +1,10 @@
|
||||
use inkwell::{
|
||||
IntPredicate,
|
||||
attributes::{Attribute, AttributeLoc},
|
||||
context::Context,
|
||||
memory_buffer::MemoryBuffer,
|
||||
module::Module,
|
||||
values::{BasicValue, BasicValueEnum, IntValue},
|
||||
IntPredicate,
|
||||
};
|
||||
|
||||
use nac3parser::ast::Expr;
|
||||
|
@ -1,10 +1,10 @@
|
||||
use inkwell::{types::BasicTypeEnum, values::IntValue};
|
||||
|
||||
use crate::codegen::{
|
||||
CodeGenContext, CodeGenerator,
|
||||
expr::infer_and_call_function,
|
||||
irrt::get_usize_dependent_function_name,
|
||||
values::{ndarray::NDArrayValue, ListValue, ProxyValue, TypedArrayLikeAccessor},
|
||||
CodeGenContext, CodeGenerator,
|
||||
values::{ListValue, ProxyValue, TypedArrayLikeAccessor, ndarray::NDArrayValue},
|
||||
};
|
||||
|
||||
/// Generates a call to `__nac3_ndarray_array_set_and_validate_list_shape`.
|
||||
|
@ -1,13 +1,13 @@
|
||||
use inkwell::{
|
||||
values::{BasicValueEnum, IntValue, PointerValue},
|
||||
AddressSpace,
|
||||
values::{BasicValueEnum, IntValue, PointerValue},
|
||||
};
|
||||
|
||||
use crate::codegen::{
|
||||
CodeGenContext, CodeGenerator,
|
||||
expr::infer_and_call_function,
|
||||
irrt::get_usize_dependent_function_name,
|
||||
values::{ndarray::NDArrayValue, ProxyValue, TypedArrayLikeAccessor},
|
||||
CodeGenContext, CodeGenerator,
|
||||
values::{ProxyValue, TypedArrayLikeAccessor, ndarray::NDArrayValue},
|
||||
};
|
||||
|
||||
/// Generates a call to `__nac3_ndarray_util_assert_shape_no_negative`.
|
||||
|
@ -1,14 +1,14 @@
|
||||
use inkwell::values::IntValue;
|
||||
|
||||
use crate::codegen::{
|
||||
CodeGenContext, CodeGenerator,
|
||||
expr::infer_and_call_function,
|
||||
irrt::get_usize_dependent_function_name,
|
||||
types::{ndarray::ShapeEntryType, ProxyType},
|
||||
types::{ProxyType, ndarray::ShapeEntryType},
|
||||
values::{
|
||||
ndarray::NDArrayValue, ArrayLikeValue, ArraySliceValue, ProxyValue, TypedArrayLikeAccessor,
|
||||
TypedArrayLikeMutator,
|
||||
ArrayLikeValue, ArraySliceValue, ProxyValue, TypedArrayLikeAccessor, TypedArrayLikeMutator,
|
||||
ndarray::NDArrayValue,
|
||||
},
|
||||
CodeGenContext, CodeGenerator,
|
||||
};
|
||||
|
||||
/// Generates a call to `__nac3_ndarray_broadcast_to`.
|
||||
@ -55,11 +55,13 @@ pub fn call_nac3_ndarray_broadcast_shapes<'ctx, G, Shape>(
|
||||
let llvm_usize = ctx.get_size_type();
|
||||
|
||||
assert_eq!(num_shape_entries.get_type(), llvm_usize);
|
||||
assert!(ShapeEntryType::is_representable(
|
||||
shape_entries.base_ptr(ctx, generator).get_type(),
|
||||
llvm_usize,
|
||||
)
|
||||
.is_ok());
|
||||
assert!(
|
||||
ShapeEntryType::is_representable(
|
||||
shape_entries.base_ptr(ctx, generator).get_type(),
|
||||
llvm_usize,
|
||||
)
|
||||
.is_ok()
|
||||
);
|
||||
assert_eq!(dst_ndims.get_type(), llvm_usize);
|
||||
assert_eq!(dst_shape.element_type(ctx, generator), llvm_usize.into());
|
||||
|
||||
|
@ -1,8 +1,8 @@
|
||||
use crate::codegen::{
|
||||
CodeGenContext, CodeGenerator,
|
||||
expr::infer_and_call_function,
|
||||
irrt::get_usize_dependent_function_name,
|
||||
values::{ndarray::NDArrayValue, ArrayLikeValue, ArraySliceValue, ProxyValue},
|
||||
CodeGenContext, CodeGenerator,
|
||||
values::{ArrayLikeValue, ArraySliceValue, ProxyValue, ndarray::NDArrayValue},
|
||||
};
|
||||
|
||||
/// Generates a call to `__nac3_ndarray_index`.
|
||||
|
@ -1,13 +1,13 @@
|
||||
use inkwell::values::{BasicValueEnum, IntValue};
|
||||
|
||||
use crate::codegen::{
|
||||
CodeGenContext, CodeGenerator,
|
||||
expr::infer_and_call_function,
|
||||
irrt::get_usize_dependent_function_name,
|
||||
values::{
|
||||
ndarray::{NDArrayValue, NDIterValue},
|
||||
ProxyValue, TypedArrayLikeAccessor,
|
||||
ndarray::{NDArrayValue, NDIterValue},
|
||||
},
|
||||
CodeGenContext, CodeGenerator,
|
||||
};
|
||||
|
||||
/// Generates a call to `__nac3_nditer_initialize`.
|
||||
|
@ -1,8 +1,8 @@
|
||||
use inkwell::values::IntValue;
|
||||
|
||||
use crate::codegen::{
|
||||
expr::infer_and_call_function, irrt::get_usize_dependent_function_name,
|
||||
values::TypedArrayLikeAccessor, CodeGenContext, CodeGenerator,
|
||||
CodeGenContext, CodeGenerator, expr::infer_and_call_function,
|
||||
irrt::get_usize_dependent_function_name, values::TypedArrayLikeAccessor,
|
||||
};
|
||||
|
||||
/// Generates a call to `__nac3_ndarray_matmul_calculate_shapes`.
|
||||
|
@ -1,10 +1,10 @@
|
||||
use inkwell::values::IntValue;
|
||||
|
||||
use crate::codegen::{
|
||||
CodeGenContext, CodeGenerator,
|
||||
expr::infer_and_call_function,
|
||||
irrt::get_usize_dependent_function_name,
|
||||
values::{ArrayLikeValue, ArraySliceValue},
|
||||
CodeGenContext, CodeGenerator,
|
||||
};
|
||||
|
||||
/// Generates a call to `__nac3_ndarray_reshape_resolve_and_check_new_shape`.
|
||||
|
@ -1,10 +1,10 @@
|
||||
use inkwell::{values::IntValue, AddressSpace};
|
||||
use inkwell::{AddressSpace, values::IntValue};
|
||||
|
||||
use crate::codegen::{
|
||||
CodeGenContext, CodeGenerator,
|
||||
expr::infer_and_call_function,
|
||||
irrt::get_usize_dependent_function_name,
|
||||
values::{ndarray::NDArrayValue, ProxyValue, TypedArrayLikeAccessor},
|
||||
CodeGenContext, CodeGenerator,
|
||||
values::{ProxyValue, TypedArrayLikeAccessor, ndarray::NDArrayValue},
|
||||
};
|
||||
|
||||
/// Generates a call to `__nac3_ndarray_transpose`.
|
||||
|
@ -1,9 +1,9 @@
|
||||
use inkwell::{
|
||||
values::{BasicValueEnum, IntValue},
|
||||
IntPredicate,
|
||||
values::{BasicValueEnum, IntValue},
|
||||
};
|
||||
|
||||
use crate::codegen::{expr::infer_and_call_function, CodeGenContext, CodeGenerator};
|
||||
use crate::codegen::{CodeGenContext, CodeGenerator, expr::infer_and_call_function};
|
||||
|
||||
/// Invokes the `__nac3_range_slice_len` in IRRT.
|
||||
///
|
||||
|
@ -3,7 +3,7 @@ use inkwell::values::{BasicValueEnum, IntValue};
|
||||
use nac3parser::ast::Expr;
|
||||
|
||||
use crate::{
|
||||
codegen::{expr::infer_and_call_function, CodeGenContext, CodeGenerator},
|
||||
codegen::{CodeGenContext, CodeGenerator, expr::infer_and_call_function},
|
||||
typecheck::typedef::Type,
|
||||
};
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
use inkwell::values::{BasicValueEnum, IntValue};
|
||||
|
||||
use super::get_usize_dependent_function_name;
|
||||
use crate::codegen::{expr::infer_and_call_function, values::StringValue, CodeGenContext};
|
||||
use crate::codegen::{CodeGenContext, expr::infer_and_call_function, values::StringValue};
|
||||
|
||||
/// Generates a call to string equality comparison. Returns an `i1` representing whether the strings are equal.
|
||||
pub fn call_string_eq<'ctx>(
|
||||
|
@ -1,8 +1,8 @@
|
||||
use inkwell::{
|
||||
AddressSpace,
|
||||
intrinsics::Intrinsic,
|
||||
types::AnyTypeEnum::IntType,
|
||||
values::{BasicValueEnum, CallSiteValue, FloatValue, IntValue, PointerValue},
|
||||
AddressSpace,
|
||||
};
|
||||
use itertools::Either;
|
||||
|
||||
|
@ -2,14 +2,15 @@ use std::{
|
||||
cell::OnceCell,
|
||||
collections::{HashMap, HashSet},
|
||||
sync::{
|
||||
atomic::{AtomicBool, Ordering},
|
||||
Arc,
|
||||
atomic::{AtomicBool, Ordering},
|
||||
},
|
||||
thread,
|
||||
};
|
||||
|
||||
use crossbeam::channel::{unbounded, Receiver, Sender};
|
||||
use crossbeam::channel::{Receiver, Sender, unbounded};
|
||||
use inkwell::{
|
||||
AddressSpace, IntPredicate, OptimizationLevel,
|
||||
attributes::{Attribute, AttributeLoc},
|
||||
basic_block::BasicBlock,
|
||||
builder::Builder,
|
||||
@ -22,7 +23,6 @@ use inkwell::{
|
||||
targets::{CodeModel, RelocMode, Target, TargetMachine, TargetTriple},
|
||||
types::{AnyType, BasicType, BasicTypeEnum, IntType},
|
||||
values::{BasicValueEnum, FunctionValue, IntValue, PhiValue, PointerValue},
|
||||
AddressSpace, IntPredicate, OptimizationLevel,
|
||||
};
|
||||
use itertools::Itertools;
|
||||
use parking_lot::{Condvar, Mutex};
|
||||
@ -32,9 +32,9 @@ use nac3parser::ast::{Location, Stmt, StrRef};
|
||||
use crate::{
|
||||
symbol_resolver::{StaticValue, SymbolResolver},
|
||||
toplevel::{
|
||||
helper::{extract_ndims, PrimDef},
|
||||
numpy::unpack_ndarray_var_tys,
|
||||
TopLevelContext, TopLevelDef,
|
||||
helper::{PrimDef, extract_ndims},
|
||||
numpy::unpack_ndarray_var_tys,
|
||||
},
|
||||
typecheck::{
|
||||
type_inferencer::{CodeLocation, PrimitiveStore},
|
||||
@ -44,8 +44,8 @@ use crate::{
|
||||
use concrete_type::{ConcreteType, ConcreteTypeEnum, ConcreteTypeStore};
|
||||
pub use generator::{CodeGenerator, DefaultCodeGenerator};
|
||||
use types::{
|
||||
ndarray::NDArrayType, ExceptionType, ListType, OptionType, ProxyType, RangeType, StringType,
|
||||
TupleType,
|
||||
ExceptionType, ListType, OptionType, ProxyType, RangeType, StringType, TupleType,
|
||||
ndarray::NDArrayType,
|
||||
};
|
||||
|
||||
pub mod builtin_fns;
|
||||
@ -1028,8 +1028,7 @@ pub fn gen_func_impl<
|
||||
);
|
||||
let generator_llvm_usize = generator.get_size_type(context);
|
||||
assert_eq!(
|
||||
generator_llvm_usize,
|
||||
target_llvm_usize,
|
||||
generator_llvm_usize, target_llvm_usize,
|
||||
"CodeGenerator (size_t = {generator_llvm_usize}) is not compatible with CodeGen Target (size_t = {target_llvm_usize})",
|
||||
);
|
||||
|
||||
|
@ -1,23 +1,23 @@
|
||||
use inkwell::{
|
||||
values::{BasicValue, BasicValueEnum, PointerValue},
|
||||
IntPredicate,
|
||||
values::{BasicValue, BasicValueEnum, PointerValue},
|
||||
};
|
||||
|
||||
use nac3parser::ast::StrRef;
|
||||
|
||||
use super::{
|
||||
CodeGenContext, CodeGenerator,
|
||||
macros::codegen_unreachable,
|
||||
stmt::gen_for_callback,
|
||||
types::ndarray::{NDArrayType, NDIterType},
|
||||
values::{ndarray::shape::parse_numpy_int_sequence, ProxyValue},
|
||||
CodeGenContext, CodeGenerator,
|
||||
values::{ProxyValue, ndarray::shape::parse_numpy_int_sequence},
|
||||
};
|
||||
use crate::{
|
||||
symbol_resolver::ValueEnum,
|
||||
toplevel::{
|
||||
DefinitionId,
|
||||
helper::{arraylike_flatten_element_type, extract_ndims},
|
||||
numpy::unpack_ndarray_var_tys,
|
||||
DefinitionId,
|
||||
},
|
||||
typecheck::typedef::{FunSignature, Type},
|
||||
};
|
||||
|
@ -1,35 +1,35 @@
|
||||
use inkwell::{
|
||||
IntPredicate,
|
||||
attributes::{Attribute, AttributeLoc},
|
||||
basic_block::BasicBlock,
|
||||
builder::Builder,
|
||||
types::{BasicType, BasicTypeEnum},
|
||||
values::{BasicValue, BasicValueEnum, FunctionValue, IntValue, PointerValue},
|
||||
IntPredicate,
|
||||
};
|
||||
use itertools::{izip, Itertools};
|
||||
use itertools::{Itertools, izip};
|
||||
|
||||
use nac3parser::ast::{
|
||||
Constant, ExcepthandlerKind, Expr, ExprKind, Location, Stmt, StmtKind, StrRef,
|
||||
};
|
||||
|
||||
use super::{
|
||||
CodeGenContext, CodeGenerator,
|
||||
expr::{destructure_range, gen_binop_expr},
|
||||
gen_in_range_check,
|
||||
irrt::{handle_slice_indices, list_slice_assignment},
|
||||
macros::codegen_unreachable,
|
||||
types::{ndarray::NDArrayType, ExceptionType, RangeType},
|
||||
types::{ExceptionType, RangeType, ndarray::NDArrayType},
|
||||
values::{
|
||||
ndarray::{RustNDIndex, ScalarOrNDArray},
|
||||
ArrayLikeIndexer, ArraySliceValue, ExceptionValue, ListValue, ProxyValue,
|
||||
ndarray::{RustNDIndex, ScalarOrNDArray},
|
||||
},
|
||||
CodeGenContext, CodeGenerator,
|
||||
};
|
||||
use crate::{
|
||||
symbol_resolver::ValueEnum,
|
||||
toplevel::{DefinitionId, TopLevelDef},
|
||||
typecheck::{
|
||||
magic_methods::Binop,
|
||||
typedef::{iter_type_vars, FunSignature, Type, TypeEnum},
|
||||
typedef::{FunSignature, Type, TypeEnum, iter_type_vars},
|
||||
},
|
||||
};
|
||||
|
||||
@ -234,7 +234,7 @@ pub fn gen_assign_target_list<'ctx, G: CodeGenerator>(
|
||||
|
||||
let a = starred_target_index; // Number of RHS values before the starred target
|
||||
let b = tuple_tys.len() - (targets.len() - 1 - starred_target_index); // Number of RHS values after the starred target
|
||||
// Thus `tuple[a..b]` is assigned to the starred target.
|
||||
// Thus `tuple[a..b]` is assigned to the starred target.
|
||||
|
||||
// Handle assignment before the starred target
|
||||
for (target, val, val_ty) in
|
||||
@ -1468,7 +1468,7 @@ pub fn gen_try<'ctx, 'a, G: CodeGenerator>(
|
||||
ctx.outer_catch_clauses = old_clauses;
|
||||
ctx.unwind_target = old_unwind;
|
||||
ctx.return_target = old_return;
|
||||
ctx.loop_target = old_loop_target.or(ctx.loop_target).take();
|
||||
ctx.loop_target = old_loop_target.or(ctx.loop_target);
|
||||
|
||||
let old_unwind = if finalbody.is_empty() {
|
||||
None
|
||||
@ -1592,7 +1592,7 @@ pub fn gen_try<'ctx, 'a, G: CodeGenerator>(
|
||||
}
|
||||
|
||||
ctx.unwind_target = old_unwind;
|
||||
ctx.loop_target = old_loop_target.or(ctx.loop_target).take();
|
||||
ctx.loop_target = old_loop_target.or(ctx.loop_target);
|
||||
ctx.return_target = old_return;
|
||||
|
||||
ctx.builder.position_at_end(landingpad);
|
||||
@ -1828,7 +1828,7 @@ pub fn gen_with<'ctx, 'a, G: CodeGenerator>(
|
||||
// reset old_unwind
|
||||
ctx.unwind_target = old_unwind;
|
||||
ctx.return_target = old_return;
|
||||
ctx.loop_target = old_loop_target.or(ctx.loop_target).take();
|
||||
ctx.loop_target = old_loop_target.or(ctx.loop_target);
|
||||
|
||||
let final_landingpad = ctx.ctx.append_basic_block(current_fun, "with.catch.final");
|
||||
ctx.builder.position_at_end(final_landingpad);
|
||||
@ -1882,7 +1882,7 @@ pub fn gen_with<'ctx, 'a, G: CodeGenerator>(
|
||||
let old_return = Some(return_target);
|
||||
|
||||
ctx.unwind_target = old_unwind;
|
||||
ctx.loop_target = old_loop_target.or(ctx.loop_target).take();
|
||||
ctx.loop_target = old_loop_target.or(ctx.loop_target);
|
||||
ctx.return_target = old_return;
|
||||
|
||||
ctx.builder.position_at_end(landingpad);
|
||||
|
@ -7,26 +7,26 @@ use function_name::named;
|
||||
use indexmap::IndexMap;
|
||||
use indoc::indoc;
|
||||
use inkwell::{
|
||||
targets::{InitializationConfig, Target},
|
||||
OptimizationLevel,
|
||||
targets::{InitializationConfig, Target},
|
||||
};
|
||||
use nac3parser::{
|
||||
ast::{fold::Fold, FileName, StrRef},
|
||||
ast::{FileName, StrRef, fold::Fold},
|
||||
parser::parse_program,
|
||||
};
|
||||
use parking_lot::RwLock;
|
||||
|
||||
use super::{
|
||||
concrete_type::ConcreteTypeStore,
|
||||
types::{ndarray::NDArrayType, ListType, ProxyType, RangeType},
|
||||
CodeGenContext, CodeGenLLVMOptions, CodeGenTargetMachineOptions, CodeGenTask, CodeGenerator,
|
||||
DefaultCodeGenerator, WithCall, WorkerRegistry,
|
||||
concrete_type::ConcreteTypeStore,
|
||||
types::{ListType, ProxyType, RangeType, ndarray::NDArrayType},
|
||||
};
|
||||
use crate::{
|
||||
symbol_resolver::{SymbolResolver, ValueEnum},
|
||||
toplevel::{
|
||||
composer::{ComposerConfig, TopLevelComposer},
|
||||
DefinitionId, FunInstance, TopLevelContext, TopLevelDef,
|
||||
composer::{ComposerConfig, TopLevelComposer},
|
||||
},
|
||||
typecheck::{
|
||||
type_inferencer::{FunctionData, IdentifierInfo, Inferencer, PrimitiveStore},
|
||||
|
@ -1,19 +1,19 @@
|
||||
use inkwell::{
|
||||
AddressSpace,
|
||||
context::{AsContextRef, Context},
|
||||
types::{AnyTypeEnum, BasicType, BasicTypeEnum, IntType, PointerType, StructType},
|
||||
values::{IntValue, PointerValue, StructValue},
|
||||
AddressSpace,
|
||||
};
|
||||
use itertools::Itertools;
|
||||
|
||||
use nac3core_derive::StructFields;
|
||||
|
||||
use super::{
|
||||
structure::{check_struct_type_matches_fields, StructField, StructFields, StructProxyType},
|
||||
ProxyType,
|
||||
structure::{StructField, StructFields, StructProxyType, check_struct_type_matches_fields},
|
||||
};
|
||||
use crate::{
|
||||
codegen::{values::ExceptionValue, CodeGenContext, CodeGenerator},
|
||||
codegen::{CodeGenContext, CodeGenerator, values::ExceptionValue},
|
||||
typecheck::typedef::{Type, TypeEnum},
|
||||
};
|
||||
|
||||
|
@ -1,8 +1,8 @@
|
||||
use inkwell::{
|
||||
AddressSpace, IntPredicate, OptimizationLevel,
|
||||
context::Context,
|
||||
types::{AnyTypeEnum, BasicType, BasicTypeEnum, IntType, PointerType, StructType},
|
||||
values::{IntValue, PointerValue, StructValue},
|
||||
AddressSpace, IntPredicate, OptimizationLevel,
|
||||
};
|
||||
use itertools::Itertools;
|
||||
|
||||
@ -11,14 +11,14 @@ use nac3core_derive::StructFields;
|
||||
use super::ProxyType;
|
||||
use crate::{
|
||||
codegen::{
|
||||
CodeGenContext, CodeGenerator,
|
||||
types::structure::{
|
||||
check_struct_type_matches_fields, FieldIndexCounter, StructField, StructFields,
|
||||
StructProxyType,
|
||||
FieldIndexCounter, StructField, StructFields, StructProxyType,
|
||||
check_struct_type_matches_fields,
|
||||
},
|
||||
values::ListValue,
|
||||
CodeGenContext, CodeGenerator,
|
||||
},
|
||||
typecheck::typedef::{iter_type_vars, Type, TypeEnum},
|
||||
typecheck::typedef::{Type, TypeEnum, iter_type_vars},
|
||||
};
|
||||
|
||||
/// Proxy type for a `list` type in LLVM.
|
||||
|
@ -1,19 +1,18 @@
|
||||
use inkwell::{
|
||||
AddressSpace,
|
||||
types::BasicTypeEnum,
|
||||
values::{BasicValueEnum, IntValue},
|
||||
AddressSpace,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
codegen::{
|
||||
irrt,
|
||||
CodeGenContext, CodeGenerator, irrt,
|
||||
stmt::gen_if_else_expr_callback,
|
||||
types::{ndarray::NDArrayType, ListType, ProxyType},
|
||||
types::{ListType, ProxyType, ndarray::NDArrayType},
|
||||
values::{
|
||||
ndarray::NDArrayValue, ArrayLikeValue, ArraySliceValue, ListValue, ProxyValue,
|
||||
TypedArrayLikeAdapter, TypedArrayLikeMutator,
|
||||
ArrayLikeValue, ArraySliceValue, ListValue, ProxyValue, TypedArrayLikeAdapter,
|
||||
TypedArrayLikeMutator, ndarray::NDArrayValue,
|
||||
},
|
||||
CodeGenContext, CodeGenerator,
|
||||
},
|
||||
toplevel::helper::{arraylike_flatten_element_type, arraylike_get_ndims},
|
||||
typecheck::typedef::{Type, TypeEnum},
|
||||
|
@ -1,20 +1,20 @@
|
||||
use inkwell::{
|
||||
AddressSpace,
|
||||
context::{AsContextRef, Context},
|
||||
types::{AnyTypeEnum, BasicType, BasicTypeEnum, IntType, PointerType, StructType},
|
||||
values::{IntValue, PointerValue, StructValue},
|
||||
AddressSpace,
|
||||
};
|
||||
use itertools::Itertools;
|
||||
|
||||
use nac3core_derive::StructFields;
|
||||
|
||||
use crate::codegen::{
|
||||
CodeGenContext, CodeGenerator,
|
||||
types::{
|
||||
structure::{check_struct_type_matches_fields, StructField, StructFields, StructProxyType},
|
||||
ProxyType,
|
||||
structure::{StructField, StructFields, StructProxyType, check_struct_type_matches_fields},
|
||||
},
|
||||
values::ndarray::ShapeEntryValue,
|
||||
CodeGenContext, CodeGenerator,
|
||||
};
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
|
||||
|
@ -1,8 +1,8 @@
|
||||
use inkwell::{
|
||||
AddressSpace,
|
||||
context::Context,
|
||||
types::{AnyTypeEnum, BasicType, BasicTypeEnum, IntType, PointerType, StructType},
|
||||
values::{IntValue, PointerValue, StructValue},
|
||||
AddressSpace,
|
||||
};
|
||||
use itertools::Itertools;
|
||||
|
||||
@ -10,15 +10,15 @@ use nac3core_derive::StructFields;
|
||||
|
||||
use crate::{
|
||||
codegen::{
|
||||
CodeGenContext, CodeGenerator,
|
||||
types::{
|
||||
structure::{
|
||||
check_struct_type_matches_fields, FieldIndexCounter, StructField, StructFields,
|
||||
StructProxyType,
|
||||
},
|
||||
ProxyType,
|
||||
structure::{
|
||||
FieldIndexCounter, StructField, StructFields, StructProxyType,
|
||||
check_struct_type_matches_fields,
|
||||
},
|
||||
},
|
||||
values::ndarray::ContiguousNDArrayValue,
|
||||
CodeGenContext, CodeGenerator,
|
||||
},
|
||||
toplevel::numpy::unpack_ndarray_var_tys,
|
||||
typecheck::typedef::Type,
|
||||
|
@ -1,12 +1,12 @@
|
||||
use inkwell::{
|
||||
values::{BasicValueEnum, IntValue},
|
||||
IntPredicate,
|
||||
values::{BasicValueEnum, IntValue},
|
||||
};
|
||||
|
||||
use super::NDArrayType;
|
||||
use crate::{
|
||||
codegen::{
|
||||
irrt, types::ProxyType, values::TypedArrayLikeAccessor, CodeGenContext, CodeGenerator,
|
||||
CodeGenContext, CodeGenerator, irrt, types::ProxyType, values::TypedArrayLikeAccessor,
|
||||
},
|
||||
typecheck::typedef::Type,
|
||||
};
|
||||
|
@ -1,23 +1,23 @@
|
||||
use inkwell::{
|
||||
AddressSpace,
|
||||
context::{AsContextRef, Context},
|
||||
types::{AnyTypeEnum, BasicType, BasicTypeEnum, IntType, PointerType, StructType},
|
||||
values::{IntValue, PointerValue, StructValue},
|
||||
AddressSpace,
|
||||
};
|
||||
use itertools::Itertools;
|
||||
|
||||
use nac3core_derive::StructFields;
|
||||
|
||||
use crate::codegen::{
|
||||
CodeGenContext, CodeGenerator,
|
||||
types::{
|
||||
structure::{check_struct_type_matches_fields, StructField, StructFields, StructProxyType},
|
||||
ProxyType,
|
||||
structure::{StructField, StructFields, StructProxyType, check_struct_type_matches_fields},
|
||||
},
|
||||
values::{
|
||||
ndarray::{NDIndexValue, RustNDIndex},
|
||||
ArrayLikeIndexer, ArraySliceValue,
|
||||
ndarray::{NDIndexValue, RustNDIndex},
|
||||
},
|
||||
CodeGenContext, CodeGenerator,
|
||||
};
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
|
||||
|
@ -2,16 +2,16 @@ use inkwell::{types::BasicTypeEnum, values::BasicValueEnum};
|
||||
use itertools::Itertools;
|
||||
|
||||
use crate::codegen::{
|
||||
CodeGenContext, CodeGenerator,
|
||||
stmt::gen_for_callback,
|
||||
types::{
|
||||
ndarray::{NDArrayType, NDIterType},
|
||||
ProxyType,
|
||||
ndarray::{NDArrayType, NDIterType},
|
||||
},
|
||||
values::{
|
||||
ndarray::{NDArrayOut, NDArrayValue, ScalarOrNDArray},
|
||||
ArrayLikeValue, ProxyValue,
|
||||
ndarray::{NDArrayOut, NDArrayValue, ScalarOrNDArray},
|
||||
},
|
||||
CodeGenContext, CodeGenerator,
|
||||
};
|
||||
|
||||
impl<'ctx> NDArrayType<'ctx> {
|
||||
|
@ -1,20 +1,20 @@
|
||||
use inkwell::{
|
||||
AddressSpace,
|
||||
context::{AsContextRef, Context},
|
||||
types::{AnyTypeEnum, BasicType, BasicTypeEnum, IntType, PointerType, StructType},
|
||||
values::{BasicValue, IntValue, PointerValue, StructValue},
|
||||
AddressSpace,
|
||||
};
|
||||
use itertools::Itertools;
|
||||
|
||||
use nac3core_derive::StructFields;
|
||||
|
||||
use super::{
|
||||
structure::{check_struct_type_matches_fields, StructField, StructFields, StructProxyType},
|
||||
ProxyType,
|
||||
structure::{StructField, StructFields, StructProxyType, check_struct_type_matches_fields},
|
||||
};
|
||||
use crate::{
|
||||
codegen::{
|
||||
values::{ndarray::NDArrayValue, TypedArrayLikeMutator},
|
||||
values::{TypedArrayLikeMutator, ndarray::NDArrayValue},
|
||||
{CodeGenContext, CodeGenerator},
|
||||
},
|
||||
toplevel::{helper::extract_ndims, numpy::unpack_ndarray_var_tys},
|
||||
|
@ -1,8 +1,8 @@
|
||||
use inkwell::{
|
||||
AddressSpace,
|
||||
context::{AsContextRef, Context},
|
||||
types::{AnyTypeEnum, BasicType, BasicTypeEnum, IntType, PointerType, StructType},
|
||||
values::{IntValue, PointerValue, StructValue},
|
||||
AddressSpace,
|
||||
};
|
||||
use itertools::Itertools;
|
||||
|
||||
@ -10,15 +10,14 @@ use nac3core_derive::StructFields;
|
||||
|
||||
use super::ProxyType;
|
||||
use crate::codegen::{
|
||||
irrt,
|
||||
CodeGenContext, CodeGenerator, irrt,
|
||||
types::structure::{
|
||||
check_struct_type_matches_fields, StructField, StructFields, StructProxyType,
|
||||
StructField, StructFields, StructProxyType, check_struct_type_matches_fields,
|
||||
},
|
||||
values::{
|
||||
ndarray::{NDArrayValue, NDIterValue},
|
||||
ArrayLikeValue, ArraySliceValue, ProxyValue, TypedArrayLikeAdapter,
|
||||
ndarray::{NDArrayValue, NDIterValue},
|
||||
},
|
||||
CodeGenContext, CodeGenerator,
|
||||
};
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
|
||||
|
@ -1,14 +1,14 @@
|
||||
use inkwell::{
|
||||
AddressSpace,
|
||||
context::Context,
|
||||
types::{BasicType, BasicTypeEnum, IntType, PointerType},
|
||||
values::{BasicValue, BasicValueEnum, PointerValue},
|
||||
AddressSpace,
|
||||
};
|
||||
|
||||
use super::ProxyType;
|
||||
use crate::{
|
||||
codegen::{values::OptionValue, CodeGenContext, CodeGenerator},
|
||||
typecheck::typedef::{iter_type_vars, Type, TypeEnum},
|
||||
codegen::{CodeGenContext, CodeGenerator, values::OptionValue},
|
||||
typecheck::typedef::{Type, TypeEnum, iter_type_vars},
|
||||
};
|
||||
|
||||
/// Proxy type for an `Option` type in LLVM.
|
||||
|
@ -1,8 +1,8 @@
|
||||
use inkwell::{
|
||||
AddressSpace,
|
||||
context::Context,
|
||||
types::{AnyTypeEnum, ArrayType, BasicType, BasicTypeEnum, IntType, PointerType},
|
||||
values::{ArrayValue, PointerValue},
|
||||
AddressSpace,
|
||||
};
|
||||
|
||||
use super::ProxyType;
|
||||
|
@ -1,18 +1,18 @@
|
||||
use inkwell::{
|
||||
AddressSpace,
|
||||
context::Context,
|
||||
types::{BasicType, BasicTypeEnum, IntType, PointerType, StructType},
|
||||
values::{GlobalValue, IntValue, PointerValue, StructValue},
|
||||
AddressSpace,
|
||||
};
|
||||
use itertools::Itertools;
|
||||
|
||||
use nac3core_derive::StructFields;
|
||||
|
||||
use super::{
|
||||
structure::{check_struct_type_matches_fields, StructField, StructFields},
|
||||
ProxyType,
|
||||
structure::{StructField, StructFields, check_struct_type_matches_fields},
|
||||
};
|
||||
use crate::codegen::{values::StringValue, CodeGenContext, CodeGenerator};
|
||||
use crate::codegen::{CodeGenContext, CodeGenerator, values::StringValue};
|
||||
|
||||
/// Proxy type for a `str` type in LLVM.
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
|
||||
|
@ -1,10 +1,10 @@
|
||||
use std::marker::PhantomData;
|
||||
|
||||
use inkwell::{
|
||||
AddressSpace,
|
||||
context::AsContextRef,
|
||||
types::{BasicTypeEnum, IntType, PointerType, StructType},
|
||||
values::{AggregateValueEnum, BasicValue, BasicValueEnum, IntValue, PointerValue, StructValue},
|
||||
AddressSpace,
|
||||
};
|
||||
use itertools::Itertools;
|
||||
|
||||
|
@ -7,7 +7,7 @@ use itertools::Itertools;
|
||||
|
||||
use super::ProxyType;
|
||||
use crate::{
|
||||
codegen::{values::TupleValue, CodeGenContext, CodeGenerator},
|
||||
codegen::{CodeGenContext, CodeGenerator, values::TupleValue},
|
||||
typecheck::typedef::{Type, TypeEnum},
|
||||
};
|
||||
|
||||
@ -110,7 +110,7 @@ impl<'ctx> TupleType<'ctx> {
|
||||
/// The caller must ensure that the index is valid.
|
||||
#[must_use]
|
||||
pub unsafe fn type_at_index_unchecked(&self, index: u32) -> BasicTypeEnum<'ctx> {
|
||||
self.ty.get_field_type_at_index_unchecked(index)
|
||||
unsafe { self.ty.get_field_type_at_index_unchecked(index) }
|
||||
}
|
||||
|
||||
/// Constructs a [`TupleValue`] from this type by zero-initializing the tuple value.
|
||||
@ -131,10 +131,11 @@ impl<'ctx> TupleType<'ctx> {
|
||||
let values = objects.into_iter().collect_vec();
|
||||
|
||||
assert_eq!(values.len(), self.num_elements() as usize);
|
||||
assert!(values
|
||||
.iter()
|
||||
.enumerate()
|
||||
.all(|(i, v)| { v.get_type() == unsafe { self.type_at_index_unchecked(i as u32) } }));
|
||||
assert!(
|
||||
values.iter().enumerate().all(|(i, v)| {
|
||||
v.get_type() == unsafe { self.type_at_index_unchecked(i as u32) }
|
||||
})
|
||||
);
|
||||
|
||||
let mut value = self.construct(name);
|
||||
for (i, val) in values.into_iter().enumerate() {
|
||||
|
@ -1,23 +1,23 @@
|
||||
use inkwell::{
|
||||
AddressSpace,
|
||||
context::{AsContextRef, Context, ContextRef},
|
||||
types::{AnyTypeEnum, BasicType, BasicTypeEnum, IntType, PointerType, StructType},
|
||||
values::{IntValue, PointerValue, StructValue},
|
||||
AddressSpace,
|
||||
};
|
||||
use itertools::Itertools;
|
||||
|
||||
use nac3core_derive::StructFields;
|
||||
|
||||
use crate::codegen::{
|
||||
CodeGenContext, CodeGenerator,
|
||||
types::{
|
||||
structure::{
|
||||
check_struct_type_matches_fields, FieldIndexCounter, StructField, StructFields,
|
||||
StructProxyType,
|
||||
},
|
||||
ProxyType,
|
||||
structure::{
|
||||
FieldIndexCounter, StructField, StructFields, StructProxyType,
|
||||
check_struct_type_matches_fields,
|
||||
},
|
||||
},
|
||||
values::utils::SliceValue,
|
||||
CodeGenContext, CodeGenerator,
|
||||
};
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
|
||||
|
@ -1,7 +1,7 @@
|
||||
use inkwell::{
|
||||
IntPredicate,
|
||||
types::AnyTypeEnum,
|
||||
values::{BasicValueEnum, IntValue, PointerValue},
|
||||
IntPredicate,
|
||||
};
|
||||
|
||||
use crate::codegen::{CodeGenContext, CodeGenerator};
|
||||
|
@ -6,13 +6,13 @@ use itertools::Itertools;
|
||||
|
||||
use nac3parser::ast::Location;
|
||||
|
||||
use super::{structure::StructProxyValue, ProxyValue, StringValue};
|
||||
use super::{ProxyValue, StringValue, structure::StructProxyValue};
|
||||
use crate::codegen::{
|
||||
types::{
|
||||
structure::{StructField, StructProxyType},
|
||||
ExceptionType,
|
||||
},
|
||||
CodeGenContext, CodeGenerator,
|
||||
types::{
|
||||
ExceptionType,
|
||||
structure::{StructField, StructProxyType},
|
||||
},
|
||||
};
|
||||
|
||||
/// Proxy type for accessing an `Exception` value in LLVM.
|
||||
|
@ -1,17 +1,17 @@
|
||||
use inkwell::{
|
||||
AddressSpace, IntPredicate,
|
||||
types::{AnyTypeEnum, BasicType, BasicTypeEnum, IntType},
|
||||
values::{BasicValueEnum, IntValue, PointerValue, StructValue},
|
||||
AddressSpace, IntPredicate,
|
||||
};
|
||||
|
||||
use super::{
|
||||
structure::StructProxyValue, ArrayLikeIndexer, ArrayLikeValue, ProxyValue,
|
||||
UntypedArrayLikeAccessor, UntypedArrayLikeMutator,
|
||||
ArrayLikeIndexer, ArrayLikeValue, ProxyValue, UntypedArrayLikeAccessor,
|
||||
UntypedArrayLikeMutator, structure::StructProxyValue,
|
||||
};
|
||||
use crate::codegen::{
|
||||
types::{
|
||||
structure::{StructField, StructProxyType},
|
||||
ListType, ProxyType,
|
||||
structure::{StructField, StructProxyType},
|
||||
},
|
||||
{CodeGenContext, CodeGenerator},
|
||||
};
|
||||
|
@ -1,6 +1,6 @@
|
||||
use inkwell::{types::IntType, values::BasicValue};
|
||||
|
||||
use super::{types::ProxyType, CodeGenContext};
|
||||
use super::{CodeGenContext, types::ProxyType};
|
||||
pub use array::*;
|
||||
pub use exception::*;
|
||||
pub use list::*;
|
||||
|
@ -5,18 +5,17 @@ use inkwell::{
|
||||
use itertools::Itertools;
|
||||
|
||||
use crate::codegen::{
|
||||
irrt,
|
||||
CodeGenContext, CodeGenerator, irrt,
|
||||
types::{
|
||||
ProxyType,
|
||||
ndarray::{NDArrayType, ShapeEntryType},
|
||||
structure::{StructField, StructProxyType},
|
||||
ProxyType,
|
||||
},
|
||||
values::{
|
||||
ndarray::NDArrayValue, structure::StructProxyValue, ArrayLikeIndexer, ArrayLikeValue,
|
||||
ArraySliceValue, ProxyValue, TypedArrayLikeAccessor, TypedArrayLikeAdapter,
|
||||
TypedArrayLikeMutator,
|
||||
ArrayLikeIndexer, ArrayLikeValue, ArraySliceValue, ProxyValue, TypedArrayLikeAccessor,
|
||||
TypedArrayLikeAdapter, TypedArrayLikeMutator, ndarray::NDArrayValue,
|
||||
structure::StructProxyValue,
|
||||
},
|
||||
CodeGenContext, CodeGenerator,
|
||||
};
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
@ -168,9 +167,11 @@ fn broadcast_shapes<'ctx, G, Shape>(
|
||||
let llvm_usize = ctx.get_size_type();
|
||||
let llvm_shape_ty = ShapeEntryType::new(ctx);
|
||||
|
||||
assert!(in_shape_entries
|
||||
.iter()
|
||||
.all(|entry| entry.0.element_type(ctx, generator) == llvm_usize.into()));
|
||||
assert!(
|
||||
in_shape_entries
|
||||
.iter()
|
||||
.all(|entry| entry.0.element_type(ctx, generator) == llvm_usize.into())
|
||||
);
|
||||
assert_eq!(broadcast_shape.element_type(ctx, generator), llvm_usize.into());
|
||||
|
||||
// Prepare input shape entries to be passed to `call_nac3_ndarray_broadcast_shapes`.
|
||||
|
@ -1,18 +1,18 @@
|
||||
use inkwell::{
|
||||
AddressSpace,
|
||||
types::{BasicType, BasicTypeEnum, IntType},
|
||||
values::{IntValue, PointerValue, StructValue},
|
||||
AddressSpace,
|
||||
};
|
||||
|
||||
use super::NDArrayValue;
|
||||
use crate::codegen::{
|
||||
CodeGenContext, CodeGenerator,
|
||||
stmt::gen_if_callback,
|
||||
types::{
|
||||
ndarray::{ContiguousNDArrayType, NDArrayType},
|
||||
structure::{StructField, StructProxyType},
|
||||
},
|
||||
values::{structure::StructProxyValue, ArrayLikeValue, ProxyValue},
|
||||
CodeGenContext, CodeGenerator,
|
||||
values::{ArrayLikeValue, ProxyValue, structure::StructProxyValue},
|
||||
};
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
|
@ -2,9 +2,9 @@ use inkwell::values::{BasicValue, BasicValueEnum};
|
||||
|
||||
use super::{NDArrayValue, NDIterValue, ScalarOrNDArray};
|
||||
use crate::codegen::{
|
||||
stmt::{gen_for_callback, BreakContinueHooks},
|
||||
types::ndarray::NDIterType,
|
||||
CodeGenContext, CodeGenerator,
|
||||
stmt::{BreakContinueHooks, gen_for_callback},
|
||||
types::ndarray::NDIterType,
|
||||
};
|
||||
|
||||
impl<'ctx> NDArrayValue<'ctx> {
|
||||
|
@ -1,7 +1,7 @@
|
||||
use inkwell::{
|
||||
AddressSpace,
|
||||
types::IntType,
|
||||
values::{IntValue, PointerValue, StructValue},
|
||||
AddressSpace,
|
||||
};
|
||||
use itertools::Itertools;
|
||||
|
||||
@ -9,16 +9,15 @@ use nac3parser::ast::{Expr, ExprKind};
|
||||
|
||||
use crate::{
|
||||
codegen::{
|
||||
irrt,
|
||||
CodeGenContext, CodeGenerator, irrt,
|
||||
types::{
|
||||
ndarray::{NDArrayType, NDIndexType},
|
||||
structure::{StructField, StructProxyType},
|
||||
utils::SliceType,
|
||||
},
|
||||
values::{
|
||||
ndarray::NDArrayValue, structure::StructProxyValue, utils::RustSlice, ProxyValue,
|
||||
ProxyValue, ndarray::NDArrayValue, structure::StructProxyValue, utils::RustSlice,
|
||||
},
|
||||
CodeGenContext, CodeGenerator,
|
||||
},
|
||||
typecheck::typedef::Type,
|
||||
};
|
||||
|
@ -1,11 +1,11 @@
|
||||
use inkwell::{types::BasicTypeEnum, values::BasicValueEnum};
|
||||
|
||||
use crate::codegen::{
|
||||
values::{
|
||||
ndarray::{NDArrayOut, NDArrayValue, ScalarOrNDArray},
|
||||
ProxyValue,
|
||||
},
|
||||
CodeGenContext, CodeGenerator,
|
||||
values::{
|
||||
ProxyValue,
|
||||
ndarray::{NDArrayOut, NDArrayValue, ScalarOrNDArray},
|
||||
},
|
||||
};
|
||||
|
||||
impl<'ctx> NDArrayValue<'ctx> {
|
||||
|
@ -5,6 +5,7 @@ use nac3parser::ast::Operator;
|
||||
use super::{NDArrayOut, NDArrayValue, RustNDIndex};
|
||||
use crate::{
|
||||
codegen::{
|
||||
CodeGenContext, CodeGenerator,
|
||||
expr::gen_binop_expr_with_values,
|
||||
irrt,
|
||||
stmt::gen_for_callback_incrementing,
|
||||
@ -13,7 +14,6 @@ use crate::{
|
||||
ArrayLikeValue, ArraySliceValue, TypedArrayLikeAccessor, TypedArrayLikeAdapter,
|
||||
UntypedArrayLikeAccessor, UntypedArrayLikeMutator,
|
||||
},
|
||||
CodeGenContext, CodeGenerator,
|
||||
},
|
||||
toplevel::helper::arraylike_flatten_element_type,
|
||||
typecheck::{magic_methods::Binop, typedef::Type},
|
||||
|
@ -1,29 +1,28 @@
|
||||
use std::iter::repeat_n;
|
||||
|
||||
use inkwell::{
|
||||
AddressSpace, IntPredicate,
|
||||
types::{AnyType, AnyTypeEnum, BasicType, BasicTypeEnum, IntType},
|
||||
values::{BasicValue, BasicValueEnum, IntValue, PointerValue, StructValue},
|
||||
AddressSpace, IntPredicate,
|
||||
};
|
||||
use itertools::Itertools;
|
||||
|
||||
use super::{
|
||||
structure::StructProxyValue, ArrayLikeIndexer, ArrayLikeValue, ProxyValue, TupleValue,
|
||||
TypedArrayLikeAccessor, TypedArrayLikeAdapter, TypedArrayLikeMutator, UntypedArrayLikeAccessor,
|
||||
UntypedArrayLikeMutator,
|
||||
ArrayLikeIndexer, ArrayLikeValue, ProxyValue, TupleValue, TypedArrayLikeAccessor,
|
||||
TypedArrayLikeAdapter, TypedArrayLikeMutator, UntypedArrayLikeAccessor,
|
||||
UntypedArrayLikeMutator, structure::StructProxyValue,
|
||||
};
|
||||
use crate::{
|
||||
codegen::{
|
||||
irrt,
|
||||
CodeGenContext, CodeGenerator, irrt,
|
||||
llvm_intrinsics::{call_int_umin, call_memcpy_generic_array},
|
||||
stmt::gen_for_callback_incrementing,
|
||||
type_aligned_alloca,
|
||||
types::{
|
||||
TupleType,
|
||||
ndarray::NDArrayType,
|
||||
structure::{StructField, StructProxyType},
|
||||
TupleType,
|
||||
},
|
||||
CodeGenContext, CodeGenerator,
|
||||
},
|
||||
typecheck::typedef::{Type, TypeEnum},
|
||||
};
|
||||
|
@ -1,19 +1,18 @@
|
||||
use inkwell::{
|
||||
AddressSpace,
|
||||
types::{BasicType, IntType},
|
||||
values::{BasicValueEnum, IntValue, PointerValue, StructValue},
|
||||
AddressSpace,
|
||||
};
|
||||
|
||||
use super::NDArrayValue;
|
||||
use crate::codegen::{
|
||||
irrt,
|
||||
stmt::{gen_for_callback, BreakContinueHooks},
|
||||
CodeGenContext, CodeGenerator, irrt,
|
||||
stmt::{BreakContinueHooks, gen_for_callback},
|
||||
types::{
|
||||
ndarray::NDIterType,
|
||||
structure::{StructField, StructProxyType},
|
||||
},
|
||||
values::{structure::StructProxyValue, ArraySliceValue, ProxyValue, TypedArrayLikeAdapter},
|
||||
CodeGenContext, CodeGenerator,
|
||||
values::{ArraySliceValue, ProxyValue, TypedArrayLikeAdapter, structure::StructProxyValue},
|
||||
};
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
|
@ -2,13 +2,13 @@ use inkwell::values::{BasicValueEnum, IntValue};
|
||||
|
||||
use crate::{
|
||||
codegen::{
|
||||
CodeGenContext, CodeGenerator,
|
||||
stmt::gen_for_callback_incrementing,
|
||||
types::{ListType, TupleType},
|
||||
values::{
|
||||
ArraySliceValue, ProxyValue, TypedArrayLikeAccessor, TypedArrayLikeAdapter,
|
||||
TypedArrayLikeMutator, UntypedArrayLikeAccessor,
|
||||
},
|
||||
CodeGenContext, CodeGenerator,
|
||||
},
|
||||
typecheck::typedef::{Type, TypeEnum},
|
||||
};
|
||||
@ -29,7 +29,7 @@ pub fn parse_numpy_int_sequence<'ctx, G: CodeGenerator + ?Sized>(
|
||||
generator: &mut G,
|
||||
ctx: &mut CodeGenContext<'ctx, '_>,
|
||||
(input_seq_ty, input_seq): (Type, BasicValueEnum<'ctx>),
|
||||
) -> impl TypedArrayLikeAccessor<'ctx, G, IntValue<'ctx>> {
|
||||
) -> impl TypedArrayLikeAccessor<'ctx, G, IntValue<'ctx>> + use<'ctx, G> {
|
||||
let llvm_usize = ctx.get_size_type();
|
||||
let zero = llvm_usize.const_zero();
|
||||
let one = llvm_usize.const_int(1, false);
|
||||
|
@ -4,14 +4,13 @@ use inkwell::values::{IntValue, PointerValue};
|
||||
use itertools::Itertools;
|
||||
|
||||
use crate::codegen::{
|
||||
irrt,
|
||||
CodeGenContext, CodeGenerator, irrt,
|
||||
stmt::gen_if_callback,
|
||||
types::ndarray::NDArrayType,
|
||||
values::{
|
||||
ndarray::{NDArrayValue, RustNDIndex},
|
||||
ArrayLikeValue, ArraySliceValue, ProxyValue, TypedArrayLikeAccessor, TypedArrayLikeAdapter,
|
||||
ndarray::{NDArrayValue, RustNDIndex},
|
||||
},
|
||||
CodeGenContext, CodeGenerator,
|
||||
};
|
||||
|
||||
impl<'ctx> NDArrayValue<'ctx> {
|
||||
|
@ -4,7 +4,7 @@ use inkwell::{
|
||||
};
|
||||
|
||||
use super::ProxyValue;
|
||||
use crate::codegen::{types::OptionType, CodeGenContext};
|
||||
use crate::codegen::{CodeGenContext, types::OptionType};
|
||||
|
||||
/// Proxy type for accessing a `Option` value in LLVM.
|
||||
#[derive(Copy, Clone)]
|
||||
|
@ -4,7 +4,7 @@ use inkwell::{
|
||||
};
|
||||
|
||||
use super::ProxyValue;
|
||||
use crate::codegen::{types::RangeType, CodeGenContext, CodeGenerator};
|
||||
use crate::codegen::{CodeGenContext, CodeGenerator, types::RangeType};
|
||||
|
||||
/// Proxy type for accessing a `range` value in LLVM.
|
||||
#[derive(Copy, Clone)]
|
||||
|
@ -4,9 +4,9 @@ use inkwell::{
|
||||
};
|
||||
|
||||
use crate::codegen::{
|
||||
types::{structure::StructField, StringType},
|
||||
values::ProxyValue,
|
||||
CodeGenContext,
|
||||
types::{StringType, structure::StructField},
|
||||
values::ProxyValue,
|
||||
};
|
||||
|
||||
/// Proxy type for accessing a `str` value in LLVM.
|
||||
|
@ -1,7 +1,7 @@
|
||||
use inkwell::values::{BasicValueEnum, PointerValue, StructValue};
|
||||
|
||||
use super::ProxyValue;
|
||||
use crate::codegen::{types::structure::StructProxyType, CodeGenContext};
|
||||
use crate::codegen::{CodeGenContext, types::structure::StructProxyType};
|
||||
|
||||
/// An LLVM value that is used to represent a corresponding structure-like value in NAC3.
|
||||
pub trait StructProxyValue<'ctx>:
|
||||
|
@ -4,7 +4,7 @@ use inkwell::{
|
||||
};
|
||||
|
||||
use super::ProxyValue;
|
||||
use crate::codegen::{types::TupleType, CodeGenContext};
|
||||
use crate::codegen::{CodeGenContext, types::TupleType};
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
pub struct TupleValue<'ctx> {
|
||||
|
@ -7,12 +7,12 @@ use nac3parser::ast::Expr;
|
||||
|
||||
use crate::{
|
||||
codegen::{
|
||||
CodeGenContext, CodeGenerator,
|
||||
types::{
|
||||
structure::{StructField, StructProxyType},
|
||||
utils::SliceType,
|
||||
},
|
||||
values::{structure::StructProxyValue, ProxyValue},
|
||||
CodeGenContext, CodeGenerator,
|
||||
values::{ProxyValue, structure::StructProxyValue},
|
||||
},
|
||||
typecheck::typedef::Type,
|
||||
};
|
||||
|
@ -6,14 +6,14 @@ use std::{
|
||||
};
|
||||
|
||||
use inkwell::values::{BasicValueEnum, FloatValue, IntValue, PointerValue, StructValue};
|
||||
use itertools::{izip, Itertools};
|
||||
use itertools::{Itertools, izip};
|
||||
use parking_lot::RwLock;
|
||||
|
||||
use nac3parser::ast::{Constant, Expr, Location, StrRef};
|
||||
|
||||
use crate::{
|
||||
codegen::{CodeGenContext, CodeGenerator},
|
||||
toplevel::{type_annotation::TypeAnnotation, DefinitionId, TopLevelDef},
|
||||
toplevel::{DefinitionId, TopLevelDef, type_annotation::TypeAnnotation},
|
||||
typecheck::{
|
||||
type_inferencer::PrimitiveStore,
|
||||
typedef::{Type, TypeEnum, Unifier, VarMap},
|
||||
|
@ -1,13 +1,13 @@
|
||||
use std::iter::once;
|
||||
|
||||
use indexmap::IndexMap;
|
||||
use inkwell::{values::BasicValue, IntPredicate};
|
||||
use inkwell::{IntPredicate, values::BasicValue};
|
||||
use strum::IntoEnumIterator;
|
||||
|
||||
use super::{
|
||||
helper::{
|
||||
arraylike_flatten_element_type, debug_assert_prim_is_allowed, extract_ndims,
|
||||
make_exception_fields, PrimDef, PrimDefDetails,
|
||||
PrimDef, PrimDefDetails, arraylike_flatten_element_type, debug_assert_prim_is_allowed,
|
||||
extract_ndims, make_exception_fields,
|
||||
},
|
||||
numpy::{make_ndarray_ty, unpack_ndarray_var_tys},
|
||||
*,
|
||||
@ -17,14 +17,14 @@ use crate::{
|
||||
builtin_fns,
|
||||
numpy::*,
|
||||
stmt::{exn_constructor, gen_if_callback},
|
||||
types::{ndarray::NDArrayType, RangeType},
|
||||
types::{RangeType, ndarray::NDArrayType},
|
||||
values::{
|
||||
ndarray::{shape::parse_numpy_int_sequence, ScalarOrNDArray},
|
||||
ProxyValue,
|
||||
ndarray::{ScalarOrNDArray, shape::parse_numpy_int_sequence},
|
||||
},
|
||||
},
|
||||
symbol_resolver::SymbolValue,
|
||||
typecheck::typedef::{into_var_map, iter_type_vars, TypeVar, VarMap},
|
||||
typecheck::typedef::{TypeVar, VarMap, into_var_map, iter_type_vars},
|
||||
};
|
||||
|
||||
type BuiltinInfo = Vec<(Arc<RwLock<TopLevelDef>>, Option<Stmt>)>;
|
||||
@ -479,7 +479,9 @@ impl<'a> BuiltinBuilder<'a> {
|
||||
assert_eq!(simple_name, &exp_simple_name.into());
|
||||
}
|
||||
_ => {
|
||||
panic!("Class/function variant of the constructed TopLevelDef of PrimDef {prim:?} is different than what is defined by {prim:?}")
|
||||
panic!(
|
||||
"Class/function variant of the constructed TopLevelDef of PrimDef {prim:?} is different than what is defined by {prim:?}"
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
use std::rc::Rc;
|
||||
|
||||
use indexmap::IndexMap;
|
||||
use nac3parser::ast::{fold::Fold, ExprKind, Ident};
|
||||
use nac3parser::ast::{ExprKind, Ident, fold::Fold};
|
||||
|
||||
use super::*;
|
||||
use crate::{
|
||||
@ -265,8 +265,7 @@ impl TopLevelComposer {
|
||||
if self.keyword_list.contains(class_name) {
|
||||
return Err(format!(
|
||||
"cannot use keyword `{}` as a class name (at {})",
|
||||
class_name,
|
||||
ast.location
|
||||
class_name, ast.location
|
||||
));
|
||||
}
|
||||
let fully_qualified_class_name = if mod_path.is_empty() {
|
||||
@ -277,8 +276,7 @@ impl TopLevelComposer {
|
||||
if !defined_names.insert(fully_qualified_class_name.into()) {
|
||||
return Err(format!(
|
||||
"duplicate definition of class `{}` (at {})",
|
||||
class_name,
|
||||
ast.location
|
||||
class_name, ast.location
|
||||
));
|
||||
}
|
||||
|
||||
@ -294,7 +292,7 @@ impl TopLevelComposer {
|
||||
resolver.clone(),
|
||||
fully_qualified_class_name,
|
||||
Some(constructor_ty),
|
||||
Some(ast.location)
|
||||
Some(ast.location),
|
||||
))),
|
||||
None,
|
||||
);
|
||||
@ -321,8 +319,7 @@ impl TopLevelComposer {
|
||||
if self.keyword_list.contains(method_name) {
|
||||
return Err(format!(
|
||||
"cannot use keyword `{}` as a method name (at {})",
|
||||
method_name,
|
||||
b.location
|
||||
method_name, b.location
|
||||
));
|
||||
}
|
||||
let global_class_method_name = Self::make_class_method_name(
|
||||
@ -332,8 +329,7 @@ impl TopLevelComposer {
|
||||
if !defined_names.insert(global_class_method_name.clone()) {
|
||||
return Err(format!(
|
||||
"class method `{}` defined twice (at {})",
|
||||
global_class_method_name,
|
||||
b.location
|
||||
global_class_method_name, b.location
|
||||
));
|
||||
}
|
||||
let method_def_id = self.definition_ast_list.len() + {
|
||||
@ -380,7 +376,11 @@ impl TopLevelComposer {
|
||||
self.definition_ast_list.push((def, Some(ast)));
|
||||
}
|
||||
|
||||
let result_ty = if allow_no_constructor || contains_constructor { Some(constructor_ty) } else { None };
|
||||
let result_ty = if allow_no_constructor || contains_constructor {
|
||||
Some(constructor_ty)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
Ok((class_name, DefinitionId(class_def_id), result_ty))
|
||||
}
|
||||
|
||||
@ -393,8 +393,7 @@ impl TopLevelComposer {
|
||||
if !defined_names.insert(global_fun_name.clone()) {
|
||||
return Err(format!(
|
||||
"top level function `{}` defined twice (at {})",
|
||||
global_fun_name,
|
||||
ast.location
|
||||
global_fun_name, ast.location
|
||||
));
|
||||
}
|
||||
|
||||
@ -408,7 +407,7 @@ impl TopLevelComposer {
|
||||
// dummy here, unify with correct type later
|
||||
ty_to_be_unified,
|
||||
resolver,
|
||||
Some(ast.location)
|
||||
Some(ast.location),
|
||||
))
|
||||
.into(),
|
||||
Some(ast),
|
||||
@ -432,7 +431,10 @@ impl TopLevelComposer {
|
||||
// Make callers use `register_top_level_var` instead, as it provides more
|
||||
// fine-grained control over which symbols to register, while also simplifying the
|
||||
// usage of this function.
|
||||
panic!("Registration of top-level Assign statements must use TopLevelComposer::register_top_level_var (at {})", ast.location);
|
||||
panic!(
|
||||
"Registration of top-level Assign statements must use TopLevelComposer::register_top_level_var (at {})",
|
||||
ast.location
|
||||
);
|
||||
}
|
||||
|
||||
ast::StmtKind::AnnAssign { target, annotation, .. } => {
|
||||
@ -1405,14 +1407,14 @@ impl TopLevelComposer {
|
||||
);
|
||||
if !ok {
|
||||
return Err(HashSet::from([format!(
|
||||
"method {class_method_name} has same name as ancestors' method, but incompatible type"),
|
||||
]));
|
||||
"method {class_method_name} has same name as ancestors' method, but incompatible type"
|
||||
)]));
|
||||
}
|
||||
}
|
||||
}
|
||||
class_methods_def.clear();
|
||||
class_methods_def
|
||||
.extend(new_child_methods.iter().map(|f| (*f.0, f.1 .0, f.1 .1)).collect_vec());
|
||||
.extend(new_child_methods.iter().map(|f| (*f.0, f.1.0, f.1.1)).collect_vec());
|
||||
|
||||
// handle class fields
|
||||
let mut new_child_fields: IndexMap<StrRef, (Type, bool)> =
|
||||
@ -1441,10 +1443,10 @@ impl TopLevelComposer {
|
||||
|
||||
class_fields_def.clear();
|
||||
class_fields_def
|
||||
.extend(new_child_fields.iter().map(|f| (*f.0, f.1 .0, f.1 .1)).collect_vec());
|
||||
.extend(new_child_fields.iter().map(|f| (*f.0, f.1.0, f.1.1)).collect_vec());
|
||||
class_attribute_def.clear();
|
||||
class_attribute_def.extend(
|
||||
new_child_attributes.iter().map(|f| (*f.0, f.1 .0, f.1 .1.clone())).collect_vec(),
|
||||
new_child_attributes.iter().map(|f| (*f.0, f.1.0, f.1.1.clone())).collect_vec(),
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
@ -1621,14 +1623,10 @@ impl TopLevelComposer {
|
||||
)?;
|
||||
for (f, _, _) in fields {
|
||||
if !all_inited.contains(f) {
|
||||
return Err(HashSet::from([
|
||||
format!(
|
||||
"fields `{}` of class `{}` not fully initialized in the initializer (at {})",
|
||||
f,
|
||||
class_name,
|
||||
body[0].location,
|
||||
),
|
||||
]));
|
||||
return Err(HashSet::from([format!(
|
||||
"fields `{}` of class `{}` not fully initialized in the initializer (at {})",
|
||||
f, class_name, body[0].location,
|
||||
)]));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1900,8 +1898,8 @@ impl TopLevelComposer {
|
||||
let base_repr = inferencer.unifier.stringify(*base);
|
||||
let subtype_repr = inferencer.unifier.stringify(*subtype);
|
||||
return Err(HashSet::from([format!(
|
||||
"Expected a subtype of {base_repr}, but got {subtype_repr} (at {loc})"),
|
||||
]));
|
||||
"Expected a subtype of {base_repr}, but got {subtype_repr} (at {loc})"
|
||||
)]));
|
||||
}
|
||||
};
|
||||
let subtype_entry = defs[subtype_id.0].read();
|
||||
@ -1915,8 +1913,8 @@ impl TopLevelComposer {
|
||||
let base_repr = inferencer.unifier.stringify(*base);
|
||||
let subtype_repr = inferencer.unifier.stringify(*subtype);
|
||||
return Err(HashSet::from([format!(
|
||||
"Expected a subtype of {base_repr}, but got {subtype_repr} (at {loc})"),
|
||||
]));
|
||||
"Expected a subtype of {base_repr}, but got {subtype_repr} (at {loc})"
|
||||
)]));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -8,7 +8,7 @@ use nac3parser::ast::{Constant, ExprKind, Location};
|
||||
use super::{numpy::unpack_ndarray_var_tys, *};
|
||||
use crate::{
|
||||
symbol_resolver::SymbolValue,
|
||||
typecheck::typedef::{into_var_map, iter_type_vars, Mapping, TypeVarId, VarMap},
|
||||
typecheck::typedef::{Mapping, TypeVarId, VarMap, into_var_map, iter_type_vars},
|
||||
};
|
||||
|
||||
/// All primitive types and functions in nac3core.
|
||||
@ -757,7 +757,7 @@ impl TopLevelComposer {
|
||||
return Err(HashSet::from([format!(
|
||||
"redundant type annotation for class fields at {}",
|
||||
s.location
|
||||
)]))
|
||||
)]));
|
||||
}
|
||||
ast::StmtKind::Assign { targets, .. } => {
|
||||
for t in targets {
|
||||
@ -1038,7 +1038,10 @@ impl TopLevelComposer {
|
||||
}
|
||||
ast::ExprKind::Name { .. } | ast::ExprKind::Subscript { .. } => {
|
||||
if has_base {
|
||||
return Err(HashSet::from([format!("a class definition can only have at most one base class declaration and one generic declaration (at {})", b.location )]));
|
||||
return Err(HashSet::from([format!(
|
||||
"a class definition can only have at most one base class declaration and one generic declaration (at {})",
|
||||
b.location
|
||||
)]));
|
||||
}
|
||||
has_base = true;
|
||||
// the function parse_ast_to make sure that no type var occurred in
|
||||
@ -1233,7 +1236,9 @@ pub fn arraylike_get_ndims(unifier: &mut Unifier, ty: Type) -> u64 {
|
||||
};
|
||||
|
||||
if values.len() > 1 {
|
||||
todo!("Getting num of dimensions for ndarray with more than one ndim bound is unimplemented")
|
||||
todo!(
|
||||
"Getting num of dimensions for ndarray with more than one ndim bound is unimplemented"
|
||||
)
|
||||
}
|
||||
|
||||
u64::try_from(values[0].clone()).unwrap()
|
||||
|
@ -5,11 +5,11 @@ use parking_lot::Mutex;
|
||||
use test_case::test_case;
|
||||
|
||||
use nac3parser::{
|
||||
ast::{fold::Fold, FileName},
|
||||
ast::{FileName, fold::Fold},
|
||||
parser::parse_program,
|
||||
};
|
||||
|
||||
use super::{helper::PrimDef, DefinitionId, *};
|
||||
use super::{DefinitionId, helper::PrimDef, *};
|
||||
use crate::{
|
||||
codegen::CodeGenContext,
|
||||
symbol_resolver::{SymbolResolver, ValueEnum},
|
||||
|
@ -43,11 +43,7 @@ impl TypeAnnotation {
|
||||
format!("{}{}", class_name, {
|
||||
let param_list =
|
||||
params.iter().map(|p| p.stringify(unifier)).collect_vec().join(", ");
|
||||
if param_list.is_empty() {
|
||||
String::new()
|
||||
} else {
|
||||
format!("[{param_list}]")
|
||||
}
|
||||
if param_list.is_empty() { String::new() } else { format!("[{param_list}]") }
|
||||
})
|
||||
}
|
||||
Literal(values) => {
|
||||
@ -214,12 +210,10 @@ pub fn parse_ast_to_type_annotation_kinds<T, S: std::hash::BuildHasher + Clone>(
|
||||
if no_type_var {
|
||||
result
|
||||
} else {
|
||||
return Err(HashSet::from([
|
||||
format!(
|
||||
"application of type vars to generic class is not currently supported (at {})",
|
||||
params_ast[0].location
|
||||
),
|
||||
]));
|
||||
return Err(HashSet::from([format!(
|
||||
"application of type vars to generic class is not currently supported (at {})",
|
||||
params_ast[0].location
|
||||
)]));
|
||||
}
|
||||
};
|
||||
Ok(TypeAnnotation::CustomClass { id: obj_id, params: param_type_infos })
|
||||
|
@ -139,7 +139,7 @@ impl Inferencer<'_> {
|
||||
return Err(HashSet::from([format!(
|
||||
"type error at identifier `{}` ({}) at {}",
|
||||
id, e, expr.location
|
||||
)]))
|
||||
)]));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -376,13 +376,11 @@ impl Inferencer<'_> {
|
||||
}
|
||||
|
||||
if !self.check_return_value_ty(ret_ty) {
|
||||
return Err(HashSet::from([
|
||||
format!(
|
||||
"return value of type {} must be a primitive or a tuple of primitives at {}",
|
||||
self.unifier.stringify(ret_ty),
|
||||
value.location,
|
||||
),
|
||||
]));
|
||||
return Err(HashSet::from([format!(
|
||||
"return value of type {} must be a primitive or a tuple of primitives at {}",
|
||||
self.unifier.stringify(ret_ty),
|
||||
value.location,
|
||||
)]));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -425,7 +423,7 @@ impl Inferencer<'_> {
|
||||
return Err(HashSet::from([format!(
|
||||
"type error at identifier `{}` ({}) at {}",
|
||||
id, e, stmt.location
|
||||
)]))
|
||||
)]));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,18 +1,18 @@
|
||||
use std::{cmp::max, collections::HashMap, rc::Rc};
|
||||
|
||||
use itertools::{iproduct, Itertools};
|
||||
use itertools::{Itertools, iproduct};
|
||||
use strum::IntoEnumIterator;
|
||||
|
||||
use nac3parser::ast::{Cmpop, Operator, StrRef, Unaryop};
|
||||
|
||||
use super::{
|
||||
type_inferencer::*,
|
||||
typedef::{into_var_map, FunSignature, FuncArg, Type, TypeEnum, Unifier, VarMap},
|
||||
typedef::{FunSignature, FuncArg, Type, TypeEnum, Unifier, VarMap, into_var_map},
|
||||
};
|
||||
use crate::{
|
||||
symbol_resolver::SymbolValue,
|
||||
toplevel::{
|
||||
helper::{extract_ndims, PrimDef},
|
||||
helper::{PrimDef, extract_ndims},
|
||||
numpy::{make_ndarray_ty, unpack_ndarray_var_tys},
|
||||
},
|
||||
};
|
||||
@ -498,11 +498,7 @@ pub fn typeof_binop(
|
||||
));
|
||||
}
|
||||
|
||||
if is_left_list {
|
||||
lhs
|
||||
} else {
|
||||
rhs
|
||||
}
|
||||
if is_left_list { lhs } else { rhs }
|
||||
} else if is_left_ndarray || is_right_ndarray {
|
||||
typeof_ndarray_broadcast(unifier, primitives, lhs, rhs)?
|
||||
} else if unifier.unioned(lhs, rhs) {
|
||||
@ -526,7 +522,9 @@ pub fn typeof_binop(
|
||||
_ => {
|
||||
let lhs_str = unifier.stringify(lhs);
|
||||
let rhs_str = unifier.stringify(rhs);
|
||||
return Err(format!("ndarray.__matmul__ only accepts ndarray operands, but left operand has type {lhs_str}, and right operand has type {rhs_str}"));
|
||||
return Err(format!(
|
||||
"ndarray.__matmul__ only accepts ndarray operands, but left operand has type {lhs_str}, and right operand has type {rhs_str}"
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
@ -552,7 +550,7 @@ pub fn typeof_binop(
|
||||
(0, _) | (_, 0) => {
|
||||
return Err(
|
||||
"ndarray.__matmul__ does not allow unsized ndarray input".to_string()
|
||||
)
|
||||
);
|
||||
}
|
||||
(1, 1) => 0,
|
||||
(1, _) => rhs_ndims - 1,
|
||||
|
@ -108,7 +108,10 @@ impl Display for DisplayTypeError<'_> {
|
||||
let expected_count = expected_min_count; // or expected_max_count
|
||||
write!(f, "Too many arguments. Expected {expected_count} but got {got_count}")
|
||||
} else {
|
||||
write!(f, "Too many arguments. Expected {expected_min_count} to {expected_max_count} arguments but got {got_count}")
|
||||
write!(
|
||||
f,
|
||||
"Too many arguments. Expected {expected_min_count} to {expected_max_count} arguments but got {got_count}"
|
||||
)
|
||||
}
|
||||
}
|
||||
MissingArgs { missing_arg_names } => {
|
||||
@ -123,7 +126,10 @@ impl Display for DisplayTypeError<'_> {
|
||||
let expected_rhs_type_str =
|
||||
self.unifier.stringify_with_notes(*expected_rhs_type, &mut notes);
|
||||
|
||||
write!(f, "Unsupported operand type(s) for {op_symbol}: '{lhs_type_str}' and '{rhs_type_str}' (right operand should have type {expected_rhs_type_str})")
|
||||
write!(
|
||||
f,
|
||||
"Unsupported operand type(s) for {op_symbol}: '{lhs_type_str}' and '{rhs_type_str}' (right operand should have type {expected_rhs_type_str})"
|
||||
)
|
||||
}
|
||||
UnsupportedComparsionOpTypes { operator, lhs_type, rhs_type, expected_rhs_type } => {
|
||||
let op_symbol = operator.op_info().symbol;
|
||||
@ -133,7 +139,10 @@ impl Display for DisplayTypeError<'_> {
|
||||
let expected_rhs_type_str =
|
||||
self.unifier.stringify_with_notes(*expected_rhs_type, &mut notes);
|
||||
|
||||
write!(f, "'{op_symbol}' not supported between instances of '{lhs_type_str}' and '{rhs_type_str}' (right operand should have type {expected_rhs_type_str})")
|
||||
write!(
|
||||
f,
|
||||
"'{op_symbol}' not supported between instances of '{lhs_type_str}' and '{rhs_type_str}' (right operand should have type {expected_rhs_type_str})"
|
||||
)
|
||||
}
|
||||
UnknownArgName(name) => {
|
||||
write!(f, "Unknown argument name: {name}")
|
||||
@ -141,7 +150,10 @@ impl Display for DisplayTypeError<'_> {
|
||||
IncorrectArgType { name, expected, got } => {
|
||||
let expected = self.unifier.stringify_with_notes(*expected, &mut notes);
|
||||
let got = self.unifier.stringify_with_notes(*got, &mut notes);
|
||||
write!(f, "Incorrect argument type for parameter {name}. Expected {expected}, but got {got}")
|
||||
write!(
|
||||
f,
|
||||
"Incorrect argument type for parameter {name}. Expected {expected}, but got {got}"
|
||||
)
|
||||
}
|
||||
FieldUnificationError { field, types, loc } => {
|
||||
let lhs = self.unifier.stringify_with_notes(types.0, &mut notes);
|
||||
|
@ -7,29 +7,28 @@ use std::{
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
use itertools::{izip, Itertools};
|
||||
use itertools::{Itertools, izip};
|
||||
|
||||
use nac3parser::ast::{
|
||||
self,
|
||||
self, Arguments, Comprehension, ExprContext, ExprKind, Ident, Located, Location, StrRef,
|
||||
fold::{self, Fold},
|
||||
Arguments, Comprehension, ExprContext, ExprKind, Ident, Located, Location, StrRef,
|
||||
};
|
||||
|
||||
use super::{
|
||||
magic_methods::*,
|
||||
type_error::{TypeError, TypeErrorKind},
|
||||
typedef::{
|
||||
into_var_map, iter_type_vars, Call, CallId, FunSignature, FuncArg, Mapping, OperatorInfo,
|
||||
RecordField, RecordKey, Type, TypeEnum, TypeVar, Unifier, VarMap,
|
||||
Call, CallId, FunSignature, FuncArg, Mapping, OperatorInfo, RecordField, RecordKey, Type,
|
||||
TypeEnum, TypeVar, Unifier, VarMap, into_var_map, iter_type_vars,
|
||||
},
|
||||
};
|
||||
use crate::{
|
||||
symbol_resolver::{SymbolResolver, SymbolValue},
|
||||
toplevel::{
|
||||
helper::{arraylike_flatten_element_type, arraylike_get_ndims, PrimDef},
|
||||
TopLevelContext, TopLevelDef,
|
||||
helper::{PrimDef, arraylike_flatten_element_type, arraylike_get_ndims},
|
||||
numpy::{make_ndarray_ty, unpack_ndarray_var_tys},
|
||||
type_annotation::TypeAnnotation,
|
||||
TopLevelContext, TopLevelDef,
|
||||
},
|
||||
};
|
||||
|
||||
@ -1018,13 +1017,11 @@ impl Inferencer<'_> {
|
||||
// This means the user is passing an expression of type `List`,
|
||||
// but it is done so indirectly (like putting a variable referencing a `List`)
|
||||
// rather than writing a List literal. We need to report an error.
|
||||
return Err(HashSet::from([
|
||||
format!(
|
||||
"Expected list literal, tuple, or int32 for argument {arg_num} of {id} at {location}. Input argument is of type list but not a list literal.",
|
||||
arg_num = arg_index + 1,
|
||||
location = shape.location
|
||||
)
|
||||
]));
|
||||
return Err(HashSet::from([format!(
|
||||
"Expected list literal, tuple, or int32 for argument {arg_num} of {id} at {location}. Input argument is of type list but not a list literal.",
|
||||
arg_num = arg_index + 1,
|
||||
location = shape.location
|
||||
)]));
|
||||
}
|
||||
}
|
||||
TypeEnum::TTuple { ty: tuple_element_types, .. } => {
|
||||
@ -1143,7 +1140,7 @@ impl Inferencer<'_> {
|
||||
)
|
||||
.as_str(),
|
||||
obj.location,
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@ -2277,7 +2274,7 @@ impl Inferencer<'_> {
|
||||
targets.len() - 1,
|
||||
rhs_tys.len()
|
||||
),
|
||||
*target_list_location
|
||||
*target_list_location,
|
||||
);
|
||||
}
|
||||
|
||||
@ -2335,7 +2332,7 @@ impl Inferencer<'_> {
|
||||
targets.len() - 1,
|
||||
rhs_tys.len()
|
||||
),
|
||||
*target_list_location
|
||||
*target_list_location,
|
||||
);
|
||||
}
|
||||
|
||||
@ -2562,7 +2559,9 @@ impl Inferencer<'_> {
|
||||
|
||||
if new_ndims_values.iter().any(|v| *v == 0) {
|
||||
// TODO: Difficult to implement since now the return may both be a scalar type, or an ndarray type.
|
||||
unimplemented!("Inference for ndarray subscript operator with Literal[0, ...] bound unimplemented")
|
||||
unimplemented!(
|
||||
"Inference for ndarray subscript operator with Literal[0, ...] bound unimplemented"
|
||||
)
|
||||
}
|
||||
|
||||
let new_ndarray_ty =
|
||||
|
@ -11,7 +11,7 @@ use super::*;
|
||||
use crate::{
|
||||
codegen::{CodeGenContext, CodeGenerator},
|
||||
symbol_resolver::ValueEnum,
|
||||
toplevel::{helper::PrimDef, DefinitionId, TopLevelDef},
|
||||
toplevel::{DefinitionId, TopLevelDef, helper::PrimDef},
|
||||
typecheck::{magic_methods::with_fields, typedef::*},
|
||||
};
|
||||
|
||||
|
@ -21,7 +21,7 @@ use super::{
|
||||
};
|
||||
use crate::{
|
||||
symbol_resolver::SymbolValue,
|
||||
toplevel::{helper::PrimDef, DefinitionId, TopLevelContext, TopLevelDef},
|
||||
toplevel::{DefinitionId, TopLevelContext, TopLevelDef, helper::PrimDef},
|
||||
};
|
||||
|
||||
#[cfg(test)]
|
||||
@ -410,11 +410,7 @@ impl Unifier {
|
||||
|
||||
pub fn get_call_signature(&mut self, id: CallId) -> Option<FunSignature> {
|
||||
let fun = self.calls.get(id.0).unwrap().fun.borrow().unwrap();
|
||||
if let TypeEnum::TFunc(sign) = &*self.get_ty(fun) {
|
||||
Some(sign.clone())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
if let TypeEnum::TFunc(sign) = &*self.get_ty(fun) { Some(sign.clone()) } else { None }
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
@ -1224,7 +1220,7 @@ impl Unifier {
|
||||
return Err(TypeError::new(
|
||||
TypeErrorKind::NoSuchField(*k, b),
|
||||
field.loc,
|
||||
))
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -357,7 +357,7 @@ fn test_invalid_unification(
|
||||
pairs.push((t1, t2));
|
||||
}
|
||||
let (t1, t2) =
|
||||
(env.parse(erroneous_pair.0 .0, &mapping), env.parse(erroneous_pair.0 .1, &mapping));
|
||||
(env.parse(erroneous_pair.0.0, &mapping), env.parse(erroneous_pair.0.1, &mapping));
|
||||
for (a, b) in pairs {
|
||||
env.unifier.unify(a, b).unwrap();
|
||||
}
|
||||
|
@ -121,7 +121,7 @@ impl<V> UnificationTable<V> {
|
||||
let (log_len, generation) = snapshot;
|
||||
assert!(self.log.len() >= log_len, "snapshot restoration error");
|
||||
assert!(
|
||||
matches!(self.log[log_len - 1], Action::Marker { generation: gen } if gen == generation),
|
||||
matches!(self.log[log_len - 1], Action::Marker { generation: r#gen } if r#gen == generation),
|
||||
"snapshot restoration error"
|
||||
);
|
||||
for action in self.log.drain(log_len - 1..).rev() {
|
||||
@ -144,7 +144,7 @@ impl<V> UnificationTable<V> {
|
||||
let (log_len, generation) = snapshot;
|
||||
assert!(self.log.len() >= log_len, "snapshot discard error");
|
||||
assert!(
|
||||
matches!(self.log[log_len - 1], Action::Marker { generation: gen } if gen == generation),
|
||||
matches!(self.log[log_len - 1], Action::Marker { generation: r#gen } if r#gen == generation),
|
||||
"snapshot discard error"
|
||||
);
|
||||
self.log.clear();
|
||||
|
@ -2,7 +2,7 @@
|
||||
name = "nac3ld"
|
||||
version = "0.1.0"
|
||||
authors = ["M-Labs"]
|
||||
edition = "2021"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
byteorder = { version = "1.5", default-features = false }
|
||||
|
@ -192,11 +192,7 @@ fn read_encoded_pointer_with_pc(reader: &mut DwarfReader, encoding: u8) -> Resul
|
||||
|
||||
#[inline]
|
||||
fn round_up(unrounded: usize, align: usize) -> Result<usize, ()> {
|
||||
if align.is_power_of_two() {
|
||||
Ok((unrounded + align - 1) & !(align - 1))
|
||||
} else {
|
||||
Err(())
|
||||
}
|
||||
if align.is_power_of_two() { Ok((unrounded + align - 1) & !(align - 1)) } else { Err(()) }
|
||||
}
|
||||
|
||||
/// Minimalistic structure to store everything needed for parsing FDEs to synthesize `.eh_frame_hdr`
|
||||
@ -209,8 +205,8 @@ pub struct EH_Frame<'a> {
|
||||
}
|
||||
|
||||
impl<'a> EH_Frame<'a> {
|
||||
/// Creates an [EH_Frame] using the bytes in the `.eh_frame` section and its address in the ELF
|
||||
/// file.
|
||||
/// Creates an [`EH_Frame`] using the bytes in the `.eh_frame` section and its address in the
|
||||
/// ELF file.
|
||||
pub fn new(eh_frame_slice: &[u8], eh_frame_addr: u32) -> EH_Frame {
|
||||
EH_Frame { reader: DwarfReader::new(eh_frame_slice, eh_frame_addr) }
|
||||
}
|
||||
@ -307,7 +303,7 @@ impl<'a> CFI_Record<'a> {
|
||||
Ok(CFI_Record { fde_pointer_encoding, fde_reader })
|
||||
}
|
||||
|
||||
/// Returns a [DwarfReader] initialized to the first Frame Description Entry (FDE) of this CFI
|
||||
/// Returns a [`DwarfReader`] initialized to the first Frame Description Entry (FDE) of this CFI
|
||||
/// record.
|
||||
pub fn get_fde_reader(&self) -> DwarfReader<'a> {
|
||||
self.fde_reader.clone()
|
||||
@ -424,7 +420,8 @@ pub struct EH_Frame_Hdr<'a> {
|
||||
}
|
||||
|
||||
impl EH_Frame_Hdr<'_> {
|
||||
/// Create a [EH_Frame_Hdr] object, and write out the fixed fields of `.eh_frame_hdr` to memory.
|
||||
/// Create a [`EH_Frame_Hdr`] object, and write out the fixed fields of `.eh_frame_hdr` to
|
||||
/// memory.
|
||||
///
|
||||
/// Load address is not known at this point.
|
||||
pub fn new(
|
||||
|
@ -2879,7 +2879,7 @@ pub fn ELF32_R_TYPE(info: Elf32_Word) -> u8 {
|
||||
info as u8
|
||||
}
|
||||
pub fn ELF32_R_INFO(sym: Elf32_Word, ty: u8) -> Elf32_Word {
|
||||
sym << 8 | ty as Elf32_Word
|
||||
(sym << 8) | ty as Elf32_Word
|
||||
}
|
||||
|
||||
pub fn ELF32_ST_BIND(info: u8) -> u8 {
|
||||
|
@ -4,7 +4,6 @@
|
||||
clippy::cast_possible_truncation,
|
||||
clippy::cast_possible_wrap,
|
||||
clippy::cast_sign_loss,
|
||||
clippy::doc_markdown,
|
||||
clippy::enum_glob_use,
|
||||
clippy::missing_errors_doc,
|
||||
clippy::missing_panics_doc,
|
||||
@ -179,6 +178,7 @@ pub struct Linker<'a> {
|
||||
section_map: HashMap<usize, usize>,
|
||||
image: Vec<u8>,
|
||||
load_offset: u32,
|
||||
image_offset: u32,
|
||||
rela_dyn_relas: Vec<Elf32_Rela>,
|
||||
}
|
||||
|
||||
@ -197,15 +197,26 @@ impl<'a> Linker<'a> {
|
||||
|
||||
// Maintain alignment requirement specified in sh_addralign
|
||||
let align = shdr.sh_addralign;
|
||||
let padding = (align - (self.load_offset % align)) % align;
|
||||
self.load_offset += padding;
|
||||
let load_padding = (align - (self.load_offset % align)) % align;
|
||||
let image_padding = (align - (self.image_offset % align)) % align;
|
||||
|
||||
elf_shdr.sh_addr =
|
||||
if (shdr.sh_flags as usize & SHF_ALLOC) == SHF_ALLOC { self.load_offset } else { 0 };
|
||||
elf_shdr.sh_offset = self.load_offset;
|
||||
let section_load_offset = if (shdr.sh_flags as usize & SHF_ALLOC) == SHF_ALLOC {
|
||||
self.load_offset + load_padding
|
||||
} else {
|
||||
0
|
||||
};
|
||||
let section_image_offset = self.image_offset + image_padding;
|
||||
|
||||
elf_shdr.sh_addr = section_load_offset;
|
||||
elf_shdr.sh_offset = section_image_offset;
|
||||
self.elf_shdrs.push(SectionRecord { shdr: elf_shdr, name: sh_name_str, data });
|
||||
|
||||
self.load_offset += shdr.sh_size;
|
||||
if (shdr.sh_flags as usize & SHF_ALLOC) == SHF_ALLOC {
|
||||
self.load_offset = section_load_offset + shdr.sh_size;
|
||||
}
|
||||
if shdr.sh_type as usize != SHT_NOBITS {
|
||||
self.image_offset = section_image_offset + shdr.sh_size;
|
||||
}
|
||||
|
||||
self.elf_shdrs.len() - 1
|
||||
}
|
||||
@ -265,7 +276,7 @@ impl<'a> Linker<'a> {
|
||||
};
|
||||
|
||||
let classify = |reloc: &R, sym_option: Option<&Elf32_Sym>| -> Option<RelocInfo<R>> {
|
||||
let defined_val = sym_option.map_or(true, |sym| {
|
||||
let defined_val = sym_option.is_none_or(|sym| {
|
||||
sym.st_shndx != SHN_UNDEF || ELF32_ST_BIND(sym.st_info) == STB_LOCAL
|
||||
});
|
||||
match self.isa {
|
||||
@ -755,6 +766,7 @@ impl<'a> Linker<'a> {
|
||||
section_map,
|
||||
image,
|
||||
load_offset: elf_sh_data_off as u32,
|
||||
image_offset: elf_sh_data_off as u32,
|
||||
rela_dyn_relas,
|
||||
};
|
||||
|
||||
@ -1303,8 +1315,7 @@ impl<'a> Linker<'a> {
|
||||
let bss_elf_index = linker.load_section(
|
||||
shdr,
|
||||
section_name,
|
||||
data[shdr.sh_offset as usize..(shdr.sh_offset + shdr.sh_size) as usize]
|
||||
.to_vec(),
|
||||
vec![0; 0], // NOBITS section has no data
|
||||
);
|
||||
linker.section_map.insert(bss_section_index, bss_elf_index);
|
||||
|
||||
@ -1402,10 +1413,12 @@ impl<'a> Linker<'a> {
|
||||
linker.implement_eh_frame_hdr()?;
|
||||
}
|
||||
|
||||
// Load all section data into the image
|
||||
// Load all non-NOBITS section data into the image
|
||||
for rec in &linker.elf_shdrs[1..] {
|
||||
linker.image.extend(vec![0; (rec.shdr.sh_offset as usize) - linker.image.len()]);
|
||||
linker.image.extend(&rec.data);
|
||||
if rec.shdr.sh_type as usize != SHT_NOBITS {
|
||||
linker.image.extend(vec![0; (rec.shdr.sh_offset as usize) - linker.image.len()]);
|
||||
linker.image.extend(&rec.data);
|
||||
}
|
||||
}
|
||||
|
||||
// Load all section headers to the image
|
||||
|
@ -5,7 +5,7 @@ description = "Parser for python code."
|
||||
authors = [ "RustPython Team", "M-Labs" ]
|
||||
build = "build.rs"
|
||||
license = "MIT"
|
||||
edition = "2021"
|
||||
edition = "2024"
|
||||
|
||||
[build-dependencies]
|
||||
lalrpop = "0.22"
|
||||
|
@ -17,12 +17,10 @@ pub fn make_config_comment(
|
||||
return Err(ParseError::User {
|
||||
error: LexicalError {
|
||||
location: com_loc,
|
||||
error: LexicalErrorType::OtherError(
|
||||
format!(
|
||||
"config comment at top must have the same indentation with what it applies (comment at {com_loc}, statement at {stmt_loc})",
|
||||
)
|
||||
)
|
||||
}
|
||||
error: LexicalErrorType::OtherError(format!(
|
||||
"config comment at top must have the same indentation with what it applies (comment at {com_loc}, statement at {stmt_loc})",
|
||||
)),
|
||||
},
|
||||
});
|
||||
};
|
||||
Ok(nac3com_above
|
||||
@ -42,14 +40,11 @@ pub fn handle_small_stmt<U>(
|
||||
return Err(ParseError::User {
|
||||
error: LexicalError {
|
||||
location: com_above_loc,
|
||||
error: LexicalErrorType::OtherError(
|
||||
format!(
|
||||
"config comment at top must have the same indentation with what it applies (comment at {}, statement at {})",
|
||||
com_above_loc,
|
||||
stmts[0].location,
|
||||
)
|
||||
)
|
||||
}
|
||||
error: LexicalErrorType::OtherError(format!(
|
||||
"config comment at top must have the same indentation with what it applies (comment at {}, statement at {})",
|
||||
com_above_loc, stmts[0].location,
|
||||
)),
|
||||
},
|
||||
});
|
||||
}
|
||||
apply_config_comments(&mut stmts[0], nac3com_above.into_iter().map(|(com, _)| com).collect());
|
||||
|
@ -161,14 +161,16 @@ impl<'a> FStringParser<'a> {
|
||||
return Err(EmptyExpression);
|
||||
}
|
||||
let ret = if pred_expression_text.is_empty() {
|
||||
vec![self.expr(ExprKind::FormattedValue {
|
||||
value: Box::new(
|
||||
parse_fstring_expr(&expression)
|
||||
.map_err(|e| InvalidExpression(Box::new(e.error)))?,
|
||||
),
|
||||
conversion,
|
||||
format_spec: spec,
|
||||
})]
|
||||
vec![
|
||||
self.expr(ExprKind::FormattedValue {
|
||||
value: Box::new(
|
||||
parse_fstring_expr(&expression)
|
||||
.map_err(|e| InvalidExpression(Box::new(e.error)))?,
|
||||
),
|
||||
conversion,
|
||||
format_spec: spec,
|
||||
}),
|
||||
]
|
||||
} else {
|
||||
vec![
|
||||
self.expr(ExprKind::Constant {
|
||||
|
@ -281,7 +281,7 @@ where
|
||||
return Err(LexicalError {
|
||||
error: LexicalErrorType::OtherError(format!("{e:?}")),
|
||||
location: start_pos,
|
||||
})
|
||||
});
|
||||
}
|
||||
},
|
||||
};
|
||||
@ -380,11 +380,7 @@ where
|
||||
fn take_number(&mut self, radix: u32) -> Option<char> {
|
||||
let take_char = Lexer::<T>::is_digit_of_radix(self.chr0, radix);
|
||||
|
||||
if take_char {
|
||||
Some(self.next_char().unwrap())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
if take_char { Some(self.next_char().unwrap()) } else { None }
|
||||
}
|
||||
|
||||
/// Test if a digit is of a certain radix.
|
||||
@ -490,7 +486,7 @@ where
|
||||
return Err(LexicalError {
|
||||
error: LexicalErrorType::StringError,
|
||||
location: start_pos,
|
||||
})
|
||||
});
|
||||
}
|
||||
}
|
||||
let start_pos = self.get_pos();
|
||||
@ -503,7 +499,7 @@ where
|
||||
return Err(LexicalError {
|
||||
error: LexicalErrorType::StringError,
|
||||
location: self.get_pos(),
|
||||
})
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1269,9 +1265,7 @@ where
|
||||
let token = self.inner_next();
|
||||
trace!(
|
||||
"Lex token {:?}, nesting={:?}, indent stack: {:?}",
|
||||
token,
|
||||
self.nesting,
|
||||
self.indentation_stack
|
||||
token, self.nesting, self.indentation_stack
|
||||
);
|
||||
|
||||
match token {
|
||||
@ -1283,7 +1277,7 @@ where
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::{make_tokenizer, NewlineHandler, Tok};
|
||||
use super::{NewlineHandler, Tok, make_tokenizer};
|
||||
use nac3ast::FileName;
|
||||
|
||||
const WINDOWS_EOL: &str = "\r\n";
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user