forked from M-Labs/nac3
Compare commits
4 Commits
master
...
fix/unitte
Author | SHA1 | Date | |
---|---|---|---|
|
6b02ec2a07 | ||
|
45ac109c03 | ||
|
b03a5646ee | ||
637e7db70f |
288
Cargo.lock
generated
288
Cargo.lock
generated
@ -12,7 +12,7 @@ dependencies = [
|
||||
"getrandom 0.2.15",
|
||||
"once_cell",
|
||||
"version_check",
|
||||
"zerocopy 0.7.35",
|
||||
"zerocopy",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -106,9 +106,9 @@ checksum = "5e764a1d40d510daf35e07be9eb06e75770908c27d411ee6c92109c9840eaaf7"
|
||||
|
||||
[[package]]
|
||||
name = "bitflags"
|
||||
version = "2.9.0"
|
||||
version = "2.8.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5c8214115b7bf84099f1309324e63141d4c5d7cc26862f97a0a857dbefe165bd"
|
||||
checksum = "8f68f53c83ab957f72c32642f3868eec03eb974d1fb82e453128456482613d36"
|
||||
|
||||
[[package]]
|
||||
name = "block-buffer"
|
||||
@ -127,9 +127,9 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b"
|
||||
|
||||
[[package]]
|
||||
name = "cc"
|
||||
version = "1.2.18"
|
||||
version = "1.2.14"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "525046617d8376e3db1deffb079e91cef90a89fc3ca5c185bbf8c9ecdd15cd5c"
|
||||
checksum = "0c3d1b2e905a3a7b00a6141adb0e4c0bb941d11caf55349d863942a1cc44e3c9"
|
||||
dependencies = [
|
||||
"shlex",
|
||||
]
|
||||
@ -142,9 +142,9 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
|
||||
|
||||
[[package]]
|
||||
name = "clap"
|
||||
version = "4.5.35"
|
||||
version = "4.5.30"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d8aa86934b44c19c50f87cc2790e19f54f7a67aedb64101c2e1a2e5ecfb73944"
|
||||
checksum = "92b7b18d71fad5313a1e320fa9897994228ce274b60faa4d694fe0ea89cd9e6d"
|
||||
dependencies = [
|
||||
"clap_builder",
|
||||
"clap_derive",
|
||||
@ -152,9 +152,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "clap_builder"
|
||||
version = "4.5.35"
|
||||
version = "4.5.30"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2414dbb2dd0695280da6ea9261e327479e9d37b0630f6b53ba2a11c60c679fd9"
|
||||
checksum = "a35db2071778a7344791a4fb4f95308b5673d219dee3ae348b86642574ecc90c"
|
||||
dependencies = [
|
||||
"anstream",
|
||||
"anstyle",
|
||||
@ -164,14 +164,14 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "clap_derive"
|
||||
version = "4.5.32"
|
||||
version = "4.5.28"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "09176aae279615badda0765c0c0b3f6ed53f4709118af73cf4655d85d1530cd7"
|
||||
checksum = "bf4ced95c6f4a675af3da73304b9ac4ed991640c36374e4b46795c49e17cf1ed"
|
||||
dependencies = [
|
||||
"heck",
|
||||
"heck 0.5.0",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.100",
|
||||
"syn 2.0.98",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -188,9 +188,9 @@ checksum = "5b63caa9aa9397e2d9480a9b13673856c78d8ac123288526c37d7839f2a86990"
|
||||
|
||||
[[package]]
|
||||
name = "console"
|
||||
version = "0.15.11"
|
||||
version = "0.15.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "054ccb5b10f9f2cbf51eb355ca1d05c2d279ce1804688d0db74b4733a5aeafd8"
|
||||
checksum = "ea3c6ecd8059b57859df5c69830340ed3c41d30e3da0c1cbed90a96ac853041b"
|
||||
dependencies = [
|
||||
"encode_unicode",
|
||||
"libc",
|
||||
@ -222,9 +222,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "crossbeam-channel"
|
||||
version = "0.5.15"
|
||||
version = "0.5.14"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "82b8f8f868b36967f9606790d1903570de9ceaf870a7bf9fbbd3016d636a2cb2"
|
||||
checksum = "06ba6d68e24814cb8de6bb986db8222d3a027d15872cabc0d18817bc3c0e4471"
|
||||
dependencies = [
|
||||
"crossbeam-utils",
|
||||
]
|
||||
@ -285,15 +285,15 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "dissimilar"
|
||||
version = "1.0.10"
|
||||
version = "1.0.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8975ffdaa0ef3661bfe02dbdcc06c9f829dfafe6a3c474de366a8d5e44276921"
|
||||
checksum = "59f8e79d1fbf76bdfbde321e902714bf6c49df88a7dda6fc682fc2979226962d"
|
||||
|
||||
[[package]]
|
||||
name = "either"
|
||||
version = "1.15.0"
|
||||
version = "1.13.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719"
|
||||
checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0"
|
||||
|
||||
[[package]]
|
||||
name = "ena"
|
||||
@ -318,9 +318,9 @@ checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f"
|
||||
|
||||
[[package]]
|
||||
name = "errno"
|
||||
version = "0.3.11"
|
||||
version = "0.3.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "976dd42dc7e85965fe702eb8164f21f450704bdde31faefd6471dba214cb594e"
|
||||
checksum = "33d852cb9b869c2a9b3df2f71a3074817f01e1844f839a144f5fcef059a4eb5d"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"windows-sys 0.59.0",
|
||||
@ -340,9 +340,9 @@ checksum = "1d674e81391d1e1ab681a28d99df07927c6d4aa5b027d7da16ba32d1d21ecd99"
|
||||
|
||||
[[package]]
|
||||
name = "foldhash"
|
||||
version = "0.1.5"
|
||||
version = "0.1.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2"
|
||||
checksum = "a0d2fde1f7b3d48b8395d5f2de76c18a528bd6a9cdde438df747bfcba3e05d6f"
|
||||
|
||||
[[package]]
|
||||
name = "function_name"
|
||||
@ -400,14 +400,14 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "getrandom"
|
||||
version = "0.3.2"
|
||||
version = "0.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "73fea8450eea4bac3940448fb7ae50d91f034f941199fcd9d909a5a07aa455f0"
|
||||
checksum = "43a49c392881ce6d5c3b8cb70f98717b7c07aabbdff06687b9030dbfbe2725f8"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"libc",
|
||||
"r-efi",
|
||||
"wasi 0.14.2+wasi-0.2.4",
|
||||
"wasi 0.13.3+wasi-0.2.2",
|
||||
"windows-targets",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -425,6 +425,12 @@ dependencies = [
|
||||
"foldhash",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "heck"
|
||||
version = "0.4.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8"
|
||||
|
||||
[[package]]
|
||||
name = "heck"
|
||||
version = "0.5.0"
|
||||
@ -442,9 +448,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "indexmap"
|
||||
version = "2.9.0"
|
||||
version = "2.7.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cea70ddb795996207ad57735b50c5982d8844f38ba9ee5f1aedcfb708a2aa11e"
|
||||
checksum = "8c9c992b02b5b4c94ea26e32fe5bccb7aa7d9f390ab5c1221ff895bc7ea8b652"
|
||||
dependencies = [
|
||||
"equivalent",
|
||||
"hashbrown",
|
||||
@ -452,9 +458,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "indoc"
|
||||
version = "2.0.6"
|
||||
version = "2.0.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f4c7245a08504955605670dbf141fceab975f15ca21570696aebe9d2e71576bd"
|
||||
checksum = "b248f5224d1d606005e02c97f5aa4e88eeb230488bcc03bc9ca4d7991399f2b5"
|
||||
|
||||
[[package]]
|
||||
name = "inkwell"
|
||||
@ -476,14 +482,14 @@ source = "git+https://github.com/Derppening/inkwell?tag=0.5.0_llvm15-typed-ptr#9
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.100",
|
||||
"syn 2.0.98",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "insta"
|
||||
version = "1.42.2"
|
||||
version = "1.42.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "50259abbaa67d11d2bcafc7ba1d094ed7a0c70e3ce893f0d0997f73558cb3084"
|
||||
checksum = "71c1b125e30d93896b365e156c33dadfffab45ee8400afcbba4752f59de08a86"
|
||||
dependencies = [
|
||||
"console",
|
||||
"linked-hash-map",
|
||||
@ -509,9 +515,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "itoa"
|
||||
version = "1.0.15"
|
||||
version = "1.0.14"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c"
|
||||
checksum = "d75a2a4b1b190afb6f5425f10f6a8f959d2ea0b9c2b1d79553551850539e4674"
|
||||
|
||||
[[package]]
|
||||
name = "keccak"
|
||||
@ -562,9 +568,9 @@ checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
|
||||
|
||||
[[package]]
|
||||
name = "libc"
|
||||
version = "0.2.171"
|
||||
version = "0.2.169"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c19937216e9d3aa9956d9bb8dfc0b0c8beb6058fc4f7a4dc4d850edf86a237d6"
|
||||
checksum = "b5aba8db14291edd000dfcc4d620c7ebfb122c613afb886ca8803fa4e128a20a"
|
||||
|
||||
[[package]]
|
||||
name = "libloading"
|
||||
@ -584,9 +590,9 @@ checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f"
|
||||
|
||||
[[package]]
|
||||
name = "linux-raw-sys"
|
||||
version = "0.9.4"
|
||||
version = "0.4.15"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cd945864f07fe9f5371a27ad7b52a172b4b499999f1d97574c9fa68373937e12"
|
||||
checksum = "d26c52dbd32dccf2d10cac7725f8eae5296885fb5703b261f7d0a0739ec807ab"
|
||||
|
||||
[[package]]
|
||||
name = "llvm-sys"
|
||||
@ -613,9 +619,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "log"
|
||||
version = "0.4.27"
|
||||
version = "0.4.25"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94"
|
||||
checksum = "04cbf5b083de1c7e0222a7a51dbfdba1cbe1c6ab0b15e29fff3f6c077fd9cd9f"
|
||||
|
||||
[[package]]
|
||||
name = "memchr"
|
||||
@ -682,7 +688,7 @@ dependencies = [
|
||||
"proc-macro-error",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.100",
|
||||
"syn 2.0.98",
|
||||
"trybuild",
|
||||
]
|
||||
|
||||
@ -726,9 +732,9 @@ checksum = "650eef8c711430f1a879fdd01d4745a7deea475becfb90269c06775983bbf086"
|
||||
|
||||
[[package]]
|
||||
name = "once_cell"
|
||||
version = "1.21.3"
|
||||
version = "1.20.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d"
|
||||
checksum = "945462a4b81e43c4e3ba96bd7b49d834c6f61198356aa858733bc4acf3cbe62e"
|
||||
|
||||
[[package]]
|
||||
name = "parking_lot"
|
||||
@ -803,7 +809,7 @@ dependencies = [
|
||||
"phf_shared",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.100",
|
||||
"syn 2.0.98",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -823,37 +829,37 @@ checksum = "5be167a7af36ee22fe3115051bc51f6e6c7054c9348e28deb4f49bd6f705a315"
|
||||
|
||||
[[package]]
|
||||
name = "pin-project"
|
||||
version = "1.1.10"
|
||||
version = "1.1.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "677f1add503faace112b9f1373e43e9e054bfdd22ff1a63c1bc485eaec6a6a8a"
|
||||
checksum = "dfe2e71e1471fe07709406bf725f710b02927c9c54b2b5b2ec0e8087d97c327d"
|
||||
dependencies = [
|
||||
"pin-project-internal",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pin-project-internal"
|
||||
version = "1.1.10"
|
||||
version = "1.1.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6e918e4ff8c4549eb882f14b3a4bc8c8bc93de829416eacf579f1207a8fbf861"
|
||||
checksum = "f6e859e6e5bd50440ab63c47e3ebabc90f26251f7c73c3d3e837b74a1cc3fa67"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.100",
|
||||
"syn 2.0.98",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "portable-atomic"
|
||||
version = "1.11.0"
|
||||
version = "1.10.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "350e9b48cbc6b0e028b0473b114454c6316e57336ee184ceab6e53f72c178b3e"
|
||||
checksum = "280dc24453071f1b63954171985a0b0d30058d287960968b9b2aca264c8d4ee6"
|
||||
|
||||
[[package]]
|
||||
name = "ppv-lite86"
|
||||
version = "0.2.21"
|
||||
version = "0.2.20"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9"
|
||||
checksum = "77957b295656769bb8ad2b6a6b09d897d94f05c41b069aede1fcdaa675eaea04"
|
||||
dependencies = [
|
||||
"zerocopy 0.8.24",
|
||||
"zerocopy",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -888,24 +894,24 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro2"
|
||||
version = "1.0.94"
|
||||
version = "1.0.93"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a31971752e70b8b2686d7e46ec17fb38dad4051d94024c88df49b667caea9c84"
|
||||
checksum = "60946a68e5f9d28b0dc1c21bb8a97ee7d018a8b322fa57838ba31cc878e22d99"
|
||||
dependencies = [
|
||||
"unicode-ident",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyo3"
|
||||
version = "0.24.1"
|
||||
version = "0.21.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "17da310086b068fbdcefbba30aeb3721d5bb9af8db4987d6735b2183ca567229"
|
||||
checksum = "a5e00b96a521718e08e03b1a622f01c8a8deb50719335de3f60b3b3950f069d8"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"indoc",
|
||||
"libc",
|
||||
"memoffset",
|
||||
"once_cell",
|
||||
"parking_lot",
|
||||
"portable-atomic",
|
||||
"pyo3-build-config",
|
||||
"pyo3-ffi",
|
||||
@ -915,9 +921,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "pyo3-build-config"
|
||||
version = "0.24.1"
|
||||
version = "0.21.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e27165889bd793000a098bb966adc4300c312497ea25cf7a690a9f0ac5aa5fc1"
|
||||
checksum = "7883df5835fafdad87c0d888b266c8ec0f4c9ca48a5bed6bbb592e8dedee1b50"
|
||||
dependencies = [
|
||||
"once_cell",
|
||||
"target-lexicon",
|
||||
@ -925,9 +931,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "pyo3-ffi"
|
||||
version = "0.24.1"
|
||||
version = "0.21.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "05280526e1dbf6b420062f3ef228b78c0c54ba94e157f5cb724a609d0f2faabc"
|
||||
checksum = "01be5843dc60b916ab4dad1dca6d20b9b4e6ddc8e15f50c47fe6d85f1fb97403"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"pyo3-build-config",
|
||||
@ -935,44 +941,38 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "pyo3-macros"
|
||||
version = "0.24.1"
|
||||
version = "0.21.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5c3ce5686aa4d3f63359a5100c62a127c9f15e8398e5fdeb5deef1fed5cd5f44"
|
||||
checksum = "77b34069fc0682e11b31dbd10321cbf94808394c56fd996796ce45217dfac53c"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"pyo3-macros-backend",
|
||||
"quote",
|
||||
"syn 2.0.100",
|
||||
"syn 2.0.98",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyo3-macros-backend"
|
||||
version = "0.24.1"
|
||||
version = "0.21.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f4cf6faa0cbfb0ed08e89beb8103ae9724eb4750e3a78084ba4017cbe94f3855"
|
||||
checksum = "08260721f32db5e1a5beae69a55553f56b99bd0e1c3e6e0a5e8851a9d0f5a85c"
|
||||
dependencies = [
|
||||
"heck",
|
||||
"heck 0.4.1",
|
||||
"proc-macro2",
|
||||
"pyo3-build-config",
|
||||
"quote",
|
||||
"syn 2.0.100",
|
||||
"syn 2.0.98",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "quote"
|
||||
version = "1.0.40"
|
||||
version = "1.0.38"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d"
|
||||
checksum = "0e4dccaaaf89514f546c693ddc140f729f958c247918a13380cccc6078391acc"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "r-efi"
|
||||
version = "5.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "74765f6d916ee2faa39bc8e68e4f3ed8949b48cccdac59983d287a7cb71ce9c5"
|
||||
|
||||
[[package]]
|
||||
name = "rand"
|
||||
version = "0.8.5"
|
||||
@ -1005,9 +1005,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "redox_syscall"
|
||||
version = "0.5.11"
|
||||
version = "0.5.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d2f103c6d277498fbceb16e84d317e2a400f160f46904d5f5410848c829511a3"
|
||||
checksum = "03a862b389f93e68874fbf580b9de08dd02facb9a788ebadaf4a3fd33cf58834"
|
||||
dependencies = [
|
||||
"bitflags",
|
||||
]
|
||||
@ -1050,9 +1050,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "rustix"
|
||||
version = "1.0.5"
|
||||
version = "0.38.44"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d97817398dd4bb2e6da002002db259209759911da105da92bec29ccb12cf58bf"
|
||||
checksum = "fdb5bc1ae2baa591800df16c9ca78619bf65c0488b41b96ccec5d11220d8c154"
|
||||
dependencies = [
|
||||
"bitflags",
|
||||
"errno",
|
||||
@ -1063,15 +1063,15 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "rustversion"
|
||||
version = "1.0.20"
|
||||
version = "1.0.19"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "eded382c5f5f786b989652c49544c4877d9f015cc22e145a5ea8ea66c2921cd2"
|
||||
checksum = "f7c45b9784283f1b2e7fb61b42047c2fd678ef0960d4f6f1eba131594cc369d4"
|
||||
|
||||
[[package]]
|
||||
name = "ryu"
|
||||
version = "1.0.20"
|
||||
version = "1.0.19"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f"
|
||||
checksum = "6ea1a2d0a644769cc99faa24c3ad26b379b786fe7c36fd3c546254801650e6dd"
|
||||
|
||||
[[package]]
|
||||
name = "same-file"
|
||||
@ -1090,35 +1090,35 @@ checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49"
|
||||
|
||||
[[package]]
|
||||
name = "semver"
|
||||
version = "1.0.26"
|
||||
version = "1.0.25"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "56e6fa9c48d24d85fb3de5ad847117517440f6beceb7798af16b4a87d616b8d0"
|
||||
checksum = "f79dfe2d285b0488816f30e700a7438c5a73d816b5b7d3ac72fbc48b0d185e03"
|
||||
|
||||
[[package]]
|
||||
name = "serde"
|
||||
version = "1.0.219"
|
||||
version = "1.0.217"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5f0e2c6ed6606019b4e29e69dbaba95b11854410e5347d525002456dbbb786b6"
|
||||
checksum = "02fc4265df13d6fa1d00ecff087228cc0a2b5f3c0e87e258d8b94a156e984c70"
|
||||
dependencies = [
|
||||
"serde_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_derive"
|
||||
version = "1.0.219"
|
||||
version = "1.0.217"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00"
|
||||
checksum = "5a9bf7cf98d04a2b28aead066b7496853d4779c9cc183c440dbac457641e19a0"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.100",
|
||||
"syn 2.0.98",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_json"
|
||||
version = "1.0.140"
|
||||
version = "1.0.138"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "20068b6e96dc6c9bd23e01df8827e6c7e1f2fddd43c21810382803c136b99373"
|
||||
checksum = "d434192e7da787e94a6ea7e9670b26a036d0ca41e0b7efb2676dd32bae872949"
|
||||
dependencies = [
|
||||
"itoa",
|
||||
"memchr",
|
||||
@ -1165,15 +1165,15 @@ checksum = "56199f7ddabf13fe5074ce809e7d3f42b42ae711800501b5b16ea82ad029c39d"
|
||||
|
||||
[[package]]
|
||||
name = "smallvec"
|
||||
version = "1.15.0"
|
||||
version = "1.14.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8917285742e9f3e1683f0a9c4e6b57960b7314d0b08d30d1ecd426713ee2eee9"
|
||||
checksum = "7fcf8323ef1faaee30a44a340193b1ac6814fd9b7b4e88e9d4519a3e4abe1cfd"
|
||||
|
||||
[[package]]
|
||||
name = "string-interner"
|
||||
version = "0.19.0"
|
||||
version = "0.18.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "23de088478b31c349c9ba67816fa55d9355232d63c3afea8bf513e31f0f1d2c0"
|
||||
checksum = "1a3275464d7a9f2d4cac57c89c2ef96a8524dba2864c8d6f82e3980baf136f9b"
|
||||
dependencies = [
|
||||
"hashbrown",
|
||||
"serde",
|
||||
@ -1181,9 +1181,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "string_cache"
|
||||
version = "0.8.9"
|
||||
version = "0.8.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bf776ba3fa74f83bf4b63c3dcbbf82173db2632ed8452cb2d891d33f459de70f"
|
||||
checksum = "938d512196766101d333398efde81bc1f37b00cb42c2f8350e5df639f040bbbe"
|
||||
dependencies = [
|
||||
"new_debug_unreachable",
|
||||
"parking_lot",
|
||||
@ -1209,11 +1209,11 @@ version = "0.27.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c77a8c5abcaf0f9ce05d62342b7d298c346515365c36b673df4ebe3ced01fde8"
|
||||
dependencies = [
|
||||
"heck",
|
||||
"heck 0.5.0",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"rustversion",
|
||||
"syn 2.0.100",
|
||||
"syn 2.0.98",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -1228,9 +1228,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "2.0.100"
|
||||
version = "2.0.98"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b09a44accad81e1ba1cd74a32461ba89dee89095ba17b32f5d03683b1b1fc2a0"
|
||||
checksum = "36147f1a48ae0ec2b5b3bc5b537d267457555a10dc06f3dbc8cb11ba3006d3b1"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@ -1239,24 +1239,25 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "target-lexicon"
|
||||
version = "0.13.2"
|
||||
version = "0.12.16"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e502f78cdbb8ba4718f566c418c52bc729126ffd16baee5baa718cf25dd5a69a"
|
||||
checksum = "61c41af27dd6d1e27b1b16b489db798443478cef1f06a660c96db617ba5de3b1"
|
||||
|
||||
[[package]]
|
||||
name = "target-triple"
|
||||
version = "0.1.4"
|
||||
version = "0.1.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1ac9aa371f599d22256307c24a9d748c041e548cbf599f35d890f9d365361790"
|
||||
checksum = "42a4d50cdb458045afc8131fd91b64904da29548bcb63c7236e0844936c13078"
|
||||
|
||||
[[package]]
|
||||
name = "tempfile"
|
||||
version = "3.19.1"
|
||||
version = "3.17.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7437ac7763b9b123ccf33c338a5cc1bac6f69b45a136c19bdd8a65e3916435bf"
|
||||
checksum = "22e5a0acb1f3f55f65cc4a866c361b2fb2a0ff6366785ae6fbb5f85df07ba230"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"fastrand",
|
||||
"getrandom 0.3.2",
|
||||
"getrandom 0.3.1",
|
||||
"once_cell",
|
||||
"rustix",
|
||||
"windows-sys 0.59.0",
|
||||
@ -1299,7 +1300,7 @@ dependencies = [
|
||||
"cfg-if",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.100",
|
||||
"syn 2.0.98",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -1310,7 +1311,7 @@ checksum = "5c89e72a01ed4c579669add59014b9a524d609c0c88c6a585ce37485879f6ffb"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.100",
|
||||
"syn 2.0.98",
|
||||
"test-case-core",
|
||||
]
|
||||
|
||||
@ -1331,7 +1332,7 @@ checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.100",
|
||||
"syn 2.0.98",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -1370,9 +1371,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "trybuild"
|
||||
version = "1.0.104"
|
||||
version = "1.0.103"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6ae08be68c056db96f0e6c6dd820727cca756ced9e1f4cc7fdd20e2a55e23898"
|
||||
checksum = "b812699e0c4f813b872b373a4471717d9eb550da14b311058a4d9cf4173cbca6"
|
||||
dependencies = [
|
||||
"dissimilar",
|
||||
"glob",
|
||||
@ -1444,9 +1445,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "unicode-ident"
|
||||
version = "1.0.18"
|
||||
version = "1.0.17"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512"
|
||||
checksum = "00e2473a93778eb0bad35909dff6a10d28e63f792f16ed15e404fca9d5eeedbe"
|
||||
|
||||
[[package]]
|
||||
name = "unicode-width"
|
||||
@ -1484,9 +1485,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "unindent"
|
||||
version = "0.2.4"
|
||||
version = "0.2.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7264e107f553ccae879d21fbea1d6724ac785e8c3bfc762137959b5802826ef3"
|
||||
checksum = "c7de7d73e1754487cb58364ee906a499937a0dfabd86bcb980fa99ec8c8fa2ce"
|
||||
|
||||
[[package]]
|
||||
name = "utf8parse"
|
||||
@ -1518,9 +1519,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
|
||||
|
||||
[[package]]
|
||||
name = "wasi"
|
||||
version = "0.14.2+wasi-0.2.4"
|
||||
version = "0.13.3+wasi-0.2.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9683f9a5a998d873c0d21fcbe3c083009670149a8fab228644b8bd36b2c48cb3"
|
||||
checksum = "26816d2e1a4a36a2940b96c5296ce403917633dff8f3440e9b236ed6f6bacad2"
|
||||
dependencies = [
|
||||
"wit-bindgen-rt",
|
||||
]
|
||||
@ -1618,18 +1619,18 @@ checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"
|
||||
|
||||
[[package]]
|
||||
name = "winnow"
|
||||
version = "0.7.6"
|
||||
version = "0.7.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "63d3fcd9bba44b03821e7d699eeee959f3126dcc4aa8e4ae18ec617c2a5cea10"
|
||||
checksum = "59690dea168f2198d1a3b0cac23b8063efcd11012f10ae4698f284808c8ef603"
|
||||
dependencies = [
|
||||
"memchr",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wit-bindgen-rt"
|
||||
version = "0.39.0"
|
||||
version = "0.33.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6f42320e61fe2cfd34354ecb597f86f413484a798ba44a8ca1165c58d42da6c1"
|
||||
checksum = "3268f3d866458b787f390cf61f4bbb563b922d091359f9608842999eaee3943c"
|
||||
dependencies = [
|
||||
"bitflags",
|
||||
]
|
||||
@ -1640,16 +1641,8 @@ version = "0.7.35"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0"
|
||||
dependencies = [
|
||||
"zerocopy-derive 0.7.35",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zerocopy"
|
||||
version = "0.8.24"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2586fea28e186957ef732a5f8b3be2da217d65c5969d4b1e17f973ebbe876879"
|
||||
dependencies = [
|
||||
"zerocopy-derive 0.8.24",
|
||||
"byteorder",
|
||||
"zerocopy-derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -1660,16 +1653,5 @@ checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.100",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zerocopy-derive"
|
||||
version = "0.8.24"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a996a8f63c5c4448cd959ac1bab0aaa3306ccfd060472f85943ee0750f0169be"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.100",
|
||||
"syn 2.0.98",
|
||||
]
|
||||
|
6
flake.lock
generated
6
flake.lock
generated
@ -2,11 +2,11 @@
|
||||
"nodes": {
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1744098102,
|
||||
"narHash": "sha256-tzCdyIJj9AjysC3OuKA+tMD/kDEDAF9mICPDU7ix0JA=",
|
||||
"lastModified": 1738680400,
|
||||
"narHash": "sha256-ooLh+XW8jfa+91F1nhf9OF7qhuA/y1ChLx6lXDNeY5U=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "c8cd81426f45942bb2906d5ed2fe21d2f19d95b7",
|
||||
"rev": "799ba5bffed04ced7067a91798353d360788b30d",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
|
@ -2,18 +2,18 @@
|
||||
name = "nac3artiq"
|
||||
version = "0.1.0"
|
||||
authors = ["M-Labs"]
|
||||
edition = "2024"
|
||||
edition = "2021"
|
||||
|
||||
[lib]
|
||||
name = "nac3artiq"
|
||||
crate-type = ["cdylib"]
|
||||
|
||||
[dependencies]
|
||||
indexmap = "2.8"
|
||||
indexmap = "2.7"
|
||||
itertools = "0.14"
|
||||
pyo3 = { version = "0.24", features = ["extension-module"] }
|
||||
pyo3 = { version = "0.21", features = ["extension-module", "gil-refs"] }
|
||||
parking_lot = "0.12"
|
||||
tempfile = "3.19"
|
||||
tempfile = "3.16"
|
||||
nac3core = { path = "../nac3core" }
|
||||
nac3ld = { path = "../nac3ld" }
|
||||
|
||||
|
@ -1,27 +0,0 @@
|
||||
from min_artiq import kernel, KernelInvariant, nac3
|
||||
import min_artiq as artiq
|
||||
|
||||
|
||||
@nac3
|
||||
class Demo:
|
||||
core: KernelInvariant[artiq.Core]
|
||||
led0: KernelInvariant[artiq.TTLOut]
|
||||
led1: KernelInvariant[artiq.TTLOut]
|
||||
|
||||
def __init__(self):
|
||||
self.core = artiq.Core()
|
||||
self.led0 = artiq.TTLOut(self.core, 18)
|
||||
self.led1 = artiq.TTLOut(self.core, 19)
|
||||
|
||||
@kernel
|
||||
def run(self):
|
||||
self.core.reset()
|
||||
while True:
|
||||
with artiq.parallel:
|
||||
self.led0.pulse(100.*artiq.ms)
|
||||
self.led1.pulse(100.*artiq.ms)
|
||||
self.core.delay(100.*artiq.ms)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
Demo().run()
|
@ -1,9 +1,9 @@
|
||||
from inspect import getfullargspec
|
||||
from functools import wraps
|
||||
from types import SimpleNamespace
|
||||
from numpy import int32, int64
|
||||
from typing import Generic, TypeVar
|
||||
from math import floor, ceil
|
||||
from numpy import int32, int64, uint32, uint64, float64, bool_, str_, ndarray
|
||||
from types import GenericAlias, ModuleType, SimpleNamespace
|
||||
from typing import _GenericAlias, Generic, TypeVar
|
||||
|
||||
import nac3artiq
|
||||
|
||||
@ -40,10 +40,10 @@ class Option(Generic[T]):
|
||||
|
||||
def is_none(self):
|
||||
return self._nac3_option is None
|
||||
|
||||
|
||||
def is_some(self):
|
||||
return not self.is_none()
|
||||
|
||||
|
||||
def unwrap(self):
|
||||
if self.is_none():
|
||||
raise UnwrapNoneError()
|
||||
@ -54,7 +54,7 @@ class Option(Generic[T]):
|
||||
return "none"
|
||||
else:
|
||||
return "Some({})".format(repr(self._nac3_option))
|
||||
|
||||
|
||||
def __str__(self) -> str:
|
||||
if self.is_none():
|
||||
return "none"
|
||||
@ -85,46 +85,13 @@ def ceil64(x):
|
||||
import device_db
|
||||
core_arguments = device_db.device_db["core"]["arguments"]
|
||||
|
||||
builtins = {
|
||||
"int": int,
|
||||
"float": float,
|
||||
"bool": bool,
|
||||
"str": str,
|
||||
"list": list,
|
||||
"tuple": tuple,
|
||||
"Exception": Exception,
|
||||
|
||||
"types": {
|
||||
"GenericAlias": GenericAlias,
|
||||
"ModuleType": ModuleType,
|
||||
},
|
||||
|
||||
"typing": {
|
||||
"_GenericAlias": _GenericAlias,
|
||||
"TypeVar": TypeVar,
|
||||
},
|
||||
|
||||
"numpy": {
|
||||
"int32": int32,
|
||||
"int64": int64,
|
||||
"uint32": uint32,
|
||||
"uint64": uint64,
|
||||
"float64": float64,
|
||||
"bool_": bool_,
|
||||
"str_": str_,
|
||||
"ndarray": ndarray,
|
||||
},
|
||||
|
||||
"artiq": {
|
||||
"Kernel": Kernel,
|
||||
"KernelInvariant": KernelInvariant,
|
||||
"_ConstGenericMarker": _ConstGenericMarker,
|
||||
"none": none,
|
||||
"virtual": virtual,
|
||||
"Option": Option,
|
||||
},
|
||||
artiq_builtins = {
|
||||
"none": none,
|
||||
"virtual": virtual,
|
||||
"_ConstGenericMarker": _ConstGenericMarker,
|
||||
"Option": Option,
|
||||
}
|
||||
compiler = nac3artiq.NAC3(core_arguments["target"], builtins)
|
||||
compiler = nac3artiq.NAC3(core_arguments["target"], artiq_builtins)
|
||||
allow_registration = True
|
||||
# Delay NAC3 analysis until all referenced variables are supposed to exist on the CPython side.
|
||||
registered_functions = set()
|
||||
@ -185,9 +152,9 @@ def nac3(cls):
|
||||
return cls
|
||||
|
||||
|
||||
ms: KernelInvariant[float] = 1e-3
|
||||
us: KernelInvariant[float] = 1e-6
|
||||
ns: KernelInvariant[float] = 1e-9
|
||||
ms = 1e-3
|
||||
us = 1e-6
|
||||
ns = 1e-9
|
||||
|
||||
@extern
|
||||
def rtio_init():
|
||||
@ -368,9 +335,9 @@ class UnwrapNoneError(Exception):
|
||||
"""raised when unwrapping a none value"""
|
||||
artiq_builtin = True
|
||||
|
||||
parallel: KernelInvariant[KernelContextManager] = KernelContextManager()
|
||||
legacy_parallel: KernelInvariant[KernelContextManager] = KernelContextManager()
|
||||
sequential: KernelInvariant[KernelContextManager] = KernelContextManager()
|
||||
parallel = KernelContextManager()
|
||||
legacy_parallel = KernelContextManager()
|
||||
sequential = KernelContextManager()
|
||||
|
||||
special_ids = {
|
||||
"parallel": id(parallel),
|
||||
|
37
nac3artiq/demo/rpc_kwargs_test.py
Normal file
37
nac3artiq/demo/rpc_kwargs_test.py
Normal file
@ -0,0 +1,37 @@
|
||||
from min_artiq import *
|
||||
from numpy import int32
|
||||
|
||||
@rpc
|
||||
def sum_3(a: int32, b: int32 = 10, c: int32 = 20) -> int32:
|
||||
"""
|
||||
An RPC function to test NAC3's handling of positional/keyword arguments.
|
||||
"""
|
||||
return int32(a + b + c)
|
||||
|
||||
@nac3
|
||||
class RpcKwargTest:
|
||||
core: KernelInvariant[Core]
|
||||
|
||||
def __init__(self):
|
||||
self.core = Core()
|
||||
|
||||
@kernel
|
||||
def run(self):
|
||||
#1) All positional => a=1, b=2, c=3 -> total=6
|
||||
s1 = sum_3(1, 2, 3)
|
||||
assert s1 == 6
|
||||
|
||||
#2) Use the default b=10, c=20 => a=5 => total=35
|
||||
s2 = sum_3(5)
|
||||
assert s2 == 35
|
||||
|
||||
#3) a=1 (positional), b=100 (keyword), omit c => c=20 => total=121
|
||||
s3 = sum_3(1, b=100)
|
||||
assert s3 == 121
|
||||
|
||||
#4) a=2, c=300 => b=10 (default) => total=312
|
||||
s4 = sum_3(a=2, c=300)
|
||||
assert s4 == 312
|
||||
|
||||
if __name__ == "__main__":
|
||||
RpcKwargTest().run()
|
@ -1,5 +1,5 @@
|
||||
use std::{
|
||||
collections::{HashMap, hash_map::DefaultHasher},
|
||||
collections::hash_map::DefaultHasher,
|
||||
hash::{Hash, Hasher},
|
||||
iter::once,
|
||||
mem,
|
||||
@ -8,47 +8,45 @@ use std::{
|
||||
|
||||
use itertools::Itertools;
|
||||
use pyo3::{
|
||||
PyObject, PyResult, Python,
|
||||
prelude::*,
|
||||
types::{PyDict, PyList},
|
||||
PyObject, PyResult, Python,
|
||||
};
|
||||
|
||||
use super::{symbol_resolver::InnerResolver, timeline::TimeFns, SpecialPythonId};
|
||||
use nac3core::{
|
||||
codegen::{
|
||||
CodeGenContext, CodeGenerator,
|
||||
expr::{create_fn_and_call, destructure_range, gen_call, infer_and_call_function},
|
||||
llvm_intrinsics::{call_int_smax, call_memcpy, call_stackrestore, call_stacksave},
|
||||
stmt::{gen_block, gen_for_callback_incrementing, gen_if_callback, gen_with},
|
||||
type_aligned_alloca,
|
||||
types::{RangeType, ndarray::NDArrayType},
|
||||
types::{ndarray::NDArrayType, RangeType},
|
||||
values::{
|
||||
ArrayLikeIndexer, ArrayLikeValue, ArraySliceValue, ListValue, ProxyValue,
|
||||
UntypedArrayLikeAccessor,
|
||||
},
|
||||
CodeGenContext, CodeGenerator,
|
||||
},
|
||||
inkwell::{
|
||||
AddressSpace, IntPredicate, OptimizationLevel,
|
||||
context::Context,
|
||||
module::Linkage,
|
||||
targets::TargetMachine,
|
||||
types::{BasicType, IntType},
|
||||
values::{BasicValueEnum, IntValue, PointerValue, StructValue},
|
||||
AddressSpace, IntPredicate, OptimizationLevel,
|
||||
},
|
||||
nac3parser::ast::{Expr, ExprKind, Located, Stmt, StmtKind, StrRef},
|
||||
symbol_resolver::ValueEnum,
|
||||
toplevel::{
|
||||
DefinitionId, GenCall,
|
||||
helper::{PrimDef, extract_ndims},
|
||||
helper::{extract_ndims, PrimDef},
|
||||
numpy::unpack_ndarray_var_tys,
|
||||
DefinitionId, GenCall,
|
||||
},
|
||||
typecheck::{
|
||||
type_inferencer::PrimitiveStore,
|
||||
typedef::{FunSignature, FuncArg, Type, TypeEnum, VarMap, iter_type_vars},
|
||||
typedef::{iter_type_vars, FunSignature, FuncArg, Type, TypeEnum, VarMap},
|
||||
},
|
||||
};
|
||||
|
||||
use super::{SpecialPythonId, symbol_resolver::InnerResolver, timeline::TimeFns};
|
||||
|
||||
/// The parallelism mode within a block.
|
||||
#[derive(Copy, Clone, Eq, PartialEq)]
|
||||
enum ParallelMode {
|
||||
@ -191,7 +189,11 @@ impl CodeGenerator for ArtiqCodeGenerator<'_> {
|
||||
}
|
||||
|
||||
fn get_size_type<'ctx>(&self, ctx: &'ctx Context) -> IntType<'ctx> {
|
||||
if self.size_t == 32 { ctx.i32_type() } else { ctx.i64_type() }
|
||||
if self.size_t == 32 {
|
||||
ctx.i32_type()
|
||||
} else {
|
||||
ctx.i64_type()
|
||||
}
|
||||
}
|
||||
|
||||
fn gen_block<'ctx, 'a, 'c, I: Iterator<Item = &'c Stmt<Option<Type>>>>(
|
||||
@ -228,10 +230,21 @@ impl CodeGenerator for ArtiqCodeGenerator<'_> {
|
||||
obj: Option<(Type, ValueEnum<'ctx>)>,
|
||||
fun: (&FunSignature, DefinitionId),
|
||||
params: Vec<(Option<StrRef>, ValueEnum<'ctx>)>,
|
||||
kwargs: Option<HashMap<StrRef, ValueEnum<'ctx>>>, // New parameter for keyword arguments
|
||||
) -> Result<Option<BasicValueEnum<'ctx>>, String> {
|
||||
let result = gen_call(self, ctx, obj, fun, params)?;
|
||||
let mut combined_params = params;
|
||||
|
||||
// If keyword arguments are provided, map them to the function signature
|
||||
if let Some(kwargs) = kwargs {
|
||||
for arg in &fun.0.args {
|
||||
if let Some(value) = kwargs.get(&arg.name) {
|
||||
combined_params.push((Some(arg.name), value.clone()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let result = gen_call(self, ctx, obj, fun, combined_params)?;
|
||||
|
||||
// Deep parallel emits timeline end-update/timeline-reset after each function call
|
||||
if self.parallel_mode == ParallelMode::Deep {
|
||||
self.timeline_update_end_max(ctx, self.end.clone(), Some("end"))?;
|
||||
self.timeline_reset_start(ctx)?;
|
||||
@ -410,10 +423,14 @@ impl CodeGenerator for ArtiqCodeGenerator<'_> {
|
||||
fn gen_rpc_tag(
|
||||
ctx: &mut CodeGenContext<'_, '_>,
|
||||
ty: Type,
|
||||
is_kwarg: bool,
|
||||
buffer: &mut Vec<u8>,
|
||||
) -> Result<(), String> {
|
||||
use nac3core::typecheck::typedef::TypeEnum::*;
|
||||
|
||||
if is_kwarg {
|
||||
buffer.push(b'k');
|
||||
}
|
||||
let PrimitiveStore { int32, int64, float, bool, str, none, .. } = ctx.primitives;
|
||||
|
||||
if ctx.unifier.unioned(ty, int32) {
|
||||
@ -435,14 +452,14 @@ fn gen_rpc_tag(
|
||||
buffer.push(b't');
|
||||
buffer.push(ty.len() as u8);
|
||||
for ty in ty {
|
||||
gen_rpc_tag(ctx, *ty, buffer)?;
|
||||
gen_rpc_tag(ctx, *ty, false, buffer)?;
|
||||
}
|
||||
}
|
||||
TObj { obj_id, params, .. } if *obj_id == PrimDef::List.id() => {
|
||||
let ty = iter_type_vars(params).next().unwrap().ty;
|
||||
|
||||
buffer.push(b'l');
|
||||
gen_rpc_tag(ctx, ty, buffer)?;
|
||||
gen_rpc_tag(ctx, ty, false, buffer)?;
|
||||
}
|
||||
TObj { obj_id, .. } if *obj_id == PrimDef::NDArray.id() => {
|
||||
let (ndarray_dtype, ndarray_ndims) = unpack_ndarray_var_tys(&mut ctx.unifier, ty);
|
||||
@ -450,10 +467,7 @@ fn gen_rpc_tag(
|
||||
&*ctx.unifier.get_ty_immutable(ndarray_ndims)
|
||||
{
|
||||
if values.len() != 1 {
|
||||
return Err(format!(
|
||||
"NDArray types with multiple literal bounds for ndims is not supported: {}",
|
||||
ctx.unifier.stringify(ty)
|
||||
));
|
||||
return Err(format!("NDArray types with multiple literal bounds for ndims is not supported: {}", ctx.unifier.stringify(ty)));
|
||||
}
|
||||
|
||||
let value = values[0].clone();
|
||||
@ -469,7 +483,7 @@ fn gen_rpc_tag(
|
||||
|
||||
buffer.push(b'a');
|
||||
buffer.push((ndarray_ndims & 0xFF) as u8);
|
||||
gen_rpc_tag(ctx, ndarray_dtype, buffer)?;
|
||||
gen_rpc_tag(ctx, ndarray_dtype, false, buffer)?;
|
||||
}
|
||||
_ => return Err(format!("Unsupported type: {:?}", ctx.unifier.stringify(ty))),
|
||||
}
|
||||
@ -826,6 +840,7 @@ fn rpc_codegen_callback_fn<'ctx>(
|
||||
obj: Option<(Type, ValueEnum<'ctx>)>,
|
||||
fun: (&FunSignature, DefinitionId),
|
||||
args: Vec<(Option<StrRef>, ValueEnum<'ctx>)>,
|
||||
kwargs: Option<HashMap<StrRef, ValueEnum<'ctx>>>, // New parameter for keyword arguments
|
||||
generator: &mut dyn CodeGenerator,
|
||||
is_async: bool,
|
||||
) -> Result<Option<BasicValueEnum<'ctx>>, String> {
|
||||
@ -835,72 +850,24 @@ fn rpc_codegen_callback_fn<'ctx>(
|
||||
let ptr_type = int8.ptr_type(AddressSpace::default());
|
||||
let tag_ptr_type = ctx.ctx.struct_type(&[ptr_type.into(), size_type.into()], false);
|
||||
|
||||
let service_id = int32.const_int(fun.1.0 as u64, false);
|
||||
// -- setup rpc tags
|
||||
let mut tag = Vec::new();
|
||||
if obj.is_some() {
|
||||
tag.push(b'O');
|
||||
}
|
||||
for arg in &fun.0.args {
|
||||
gen_rpc_tag(ctx, arg.ty, &mut tag)?;
|
||||
}
|
||||
tag.push(b':');
|
||||
gen_rpc_tag(ctx, fun.0.ret, &mut tag)?;
|
||||
let service_id = int32.const_int(fun.1 .0 as u64, false);
|
||||
|
||||
let mut hasher = DefaultHasher::new();
|
||||
tag.hash(&mut hasher);
|
||||
let hash = format!("{}", hasher.finish());
|
||||
// Handle both positional and keyword arguments
|
||||
let mut mapping: HashMap<StrRef, ValueEnum<'ctx>> = HashMap::new();
|
||||
|
||||
let tag_ptr = ctx
|
||||
.module
|
||||
.get_global(hash.as_str())
|
||||
.unwrap_or_else(|| {
|
||||
let tag_arr_ptr = ctx.module.add_global(
|
||||
int8.array_type(tag.len() as u32),
|
||||
None,
|
||||
format!("tagptr{}", fun.1.0).as_str(),
|
||||
);
|
||||
tag_arr_ptr.set_initializer(&int8.const_array(
|
||||
&tag.iter().map(|v| int8.const_int(u64::from(*v), false)).collect::<Vec<_>>(),
|
||||
));
|
||||
tag_arr_ptr.set_linkage(Linkage::Private);
|
||||
let tag_ptr = ctx.module.add_global(tag_ptr_type, None, &hash);
|
||||
tag_ptr.set_linkage(Linkage::Private);
|
||||
tag_ptr.set_initializer(&ctx.ctx.const_struct(
|
||||
&[
|
||||
tag_arr_ptr.as_pointer_value().const_cast(ptr_type).into(),
|
||||
size_type.const_int(tag.len() as u64, false).into(),
|
||||
],
|
||||
false,
|
||||
));
|
||||
tag_ptr
|
||||
})
|
||||
.as_pointer_value();
|
||||
|
||||
let arg_length = args.len() + usize::from(obj.is_some());
|
||||
|
||||
let stackptr = call_stacksave(ctx, Some("rpc.stack"));
|
||||
let args_ptr = ctx
|
||||
.builder
|
||||
.build_array_alloca(
|
||||
ptr_type,
|
||||
ctx.ctx.i32_type().const_int(arg_length as u64, false),
|
||||
"argptr",
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
// -- rpc args handling
|
||||
// Add positional arguments first
|
||||
let mut keys = fun.0.args.clone();
|
||||
let mut mapping = HashMap::new();
|
||||
for (key, value) in args {
|
||||
mapping.insert(key.unwrap_or_else(|| keys.remove(0).name), value);
|
||||
}
|
||||
// default value handling
|
||||
for k in keys {
|
||||
mapping
|
||||
.insert(k.name, ctx.gen_symbol_val(generator, &k.default_value.unwrap(), k.ty).into());
|
||||
|
||||
// Add keyword arguments if provided
|
||||
if let Some(kwargs) = kwargs {
|
||||
for (key, value) in kwargs {
|
||||
mapping.insert(key, value);
|
||||
}
|
||||
}
|
||||
// reorder the parameters
|
||||
|
||||
let mut real_params = fun
|
||||
.0
|
||||
.args
|
||||
@ -913,15 +880,17 @@ fn rpc_codegen_callback_fn<'ctx>(
|
||||
.map(|llvm_val| (llvm_val, arg.ty))
|
||||
})
|
||||
.collect::<Result<Vec<(_, _)>, _>>()?;
|
||||
|
||||
// Existing logic for generating the RPC call remains largely unchanged
|
||||
if let Some(obj) = obj {
|
||||
if let ValueEnum::Static(obj_val) = obj.1 {
|
||||
real_params.insert(0, (obj_val.get_const_obj(ctx, generator), obj.0));
|
||||
} else {
|
||||
// should be an error here...
|
||||
panic!("only host object is allowed");
|
||||
}
|
||||
}
|
||||
|
||||
// Generate the RPC call as before, but with the updated `real_params`
|
||||
for (i, (arg, arg_ty)) in real_params.iter().enumerate() {
|
||||
let arg_slot = format_rpc_arg(generator, ctx, (*arg, *arg_ty, i));
|
||||
let arg_ptr = unsafe {
|
||||
@ -935,7 +904,6 @@ fn rpc_codegen_callback_fn<'ctx>(
|
||||
ctx.builder.build_store(arg_ptr, arg_slot).unwrap();
|
||||
}
|
||||
|
||||
// call
|
||||
infer_and_call_function(
|
||||
ctx,
|
||||
if is_async { "rpc_send_async" } else { "rpc_send" },
|
||||
@ -945,22 +913,7 @@ fn rpc_codegen_callback_fn<'ctx>(
|
||||
None,
|
||||
);
|
||||
|
||||
// reclaim stack space used by arguments
|
||||
call_stackrestore(ctx, stackptr);
|
||||
|
||||
if is_async {
|
||||
// async RPCs do not return any values
|
||||
Ok(None)
|
||||
} else {
|
||||
let result = format_rpc_ret(generator, ctx, fun.0.ret);
|
||||
|
||||
if !result.is_some_and(|res| res.get_type().is_pointer_type()) {
|
||||
// An RPC returning an NDArray would not touch here.
|
||||
call_stackrestore(ctx, stackptr);
|
||||
}
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
Ok(format_rpc_ret(generator, ctx, fun.0.ret))
|
||||
}
|
||||
|
||||
pub fn attributes_writeback<'ctx>(
|
||||
@ -971,7 +924,7 @@ pub fn attributes_writeback<'ctx>(
|
||||
return_obj: Option<(Type, ValueEnum<'ctx>)>,
|
||||
) -> Result<(), String> {
|
||||
Python::with_gil(|py| -> PyResult<Result<(), String>> {
|
||||
let host_attributes = host_attributes.downcast_bound::<PyList>(py)?;
|
||||
let host_attributes: &PyList = host_attributes.downcast(py)?;
|
||||
let top_levels = ctx.top_level.definitions.read();
|
||||
let globals = inner_resolver.global_value_ids.read();
|
||||
let int32 = ctx.ctx.i32_type();
|
||||
@ -984,7 +937,7 @@ pub fn attributes_writeback<'ctx>(
|
||||
}
|
||||
|
||||
for val in (*globals).values() {
|
||||
let val = val.bind(py);
|
||||
let val = val.as_ref(py);
|
||||
let ty = inner_resolver.get_obj_type(
|
||||
py,
|
||||
val,
|
||||
@ -1008,14 +961,14 @@ pub fn attributes_writeback<'ctx>(
|
||||
if !is_mutable {
|
||||
continue;
|
||||
}
|
||||
if gen_rpc_tag(ctx, *field_ty, &mut scratch_buffer).is_ok() {
|
||||
if gen_rpc_tag(ctx, *field_ty, false, &mut scratch_buffer).is_ok() {
|
||||
attributes.push(name.to_string());
|
||||
let (index, _) = ctx.get_attr_index(ty, *name);
|
||||
values.push((
|
||||
*field_ty,
|
||||
ctx.build_gep_and_load(
|
||||
obj.into_pointer_value(),
|
||||
&[zero, int32.const_int(index.unwrap() as u64, false)],
|
||||
&[zero, int32.const_int(index as u64, false)],
|
||||
None,
|
||||
),
|
||||
));
|
||||
@ -1031,7 +984,7 @@ pub fn attributes_writeback<'ctx>(
|
||||
TypeEnum::TObj { obj_id, params, .. } if *obj_id == PrimDef::List.id() => {
|
||||
let elem_ty = iter_type_vars(params).next().unwrap().ty;
|
||||
|
||||
if gen_rpc_tag(ctx, elem_ty, &mut scratch_buffer).is_ok() {
|
||||
if gen_rpc_tag(ctx, elem_ty, false, &mut scratch_buffer).is_ok() {
|
||||
let pydict = PyDict::new(py);
|
||||
pydict.set_item("obj", val)?;
|
||||
host_attributes.append(pydict)?;
|
||||
@ -1049,14 +1002,14 @@ pub fn attributes_writeback<'ctx>(
|
||||
if *is_method {
|
||||
continue;
|
||||
}
|
||||
if gen_rpc_tag(ctx, *field_ty, &mut scratch_buffer).is_ok() {
|
||||
if gen_rpc_tag(ctx, *field_ty, false, &mut scratch_buffer).is_ok() {
|
||||
fields.push(name.to_string());
|
||||
let (index, _) = ctx.get_attr_index(ty, *name);
|
||||
values.push((
|
||||
*field_ty,
|
||||
ctx.build_gep_and_load(
|
||||
obj.into_pointer_value(),
|
||||
&[zero, int32.const_int(index.unwrap() as u64, false)],
|
||||
&[zero, int32.const_int(index as u64, false)],
|
||||
None,
|
||||
),
|
||||
));
|
||||
@ -1168,7 +1121,7 @@ fn polymorphic_print<'ctx>(
|
||||
if as_rtio { "rtio_log" } else { "core_log" },
|
||||
if as_rtio { None } else { Some(llvm_i32.into()) },
|
||||
&[llvm_pi8.into()],
|
||||
&once(fmt.into()).chain(args).collect_vec(),
|
||||
&once(fmt.into()).chain(args).map(BasicValueEnum::into).collect_vec(),
|
||||
true,
|
||||
None,
|
||||
None,
|
||||
|
@ -1,273 +0,0 @@
|
||||
use itertools::Itertools;
|
||||
|
||||
use nac3core::{toplevel::TopLevelDef, typecheck::typedef::Unifier};
|
||||
|
||||
use super::{InnerResolver, symbol_resolver::PyValueHandle};
|
||||
|
||||
impl InnerResolver {
|
||||
pub fn debug_str(&self, tld: Option<&[TopLevelDef]>, unifier: &Option<&mut Unifier>) -> String {
|
||||
fn fmt_elems(elems: &str) -> String {
|
||||
if elems.is_empty() { String::new() } else { format!("\n{elems}\n\t") }
|
||||
}
|
||||
fn stringify_pyvalue_handle(handle: &PyValueHandle) -> String {
|
||||
format!("(id: {}, value: {})", handle.0, handle.1)
|
||||
}
|
||||
fn stringify_tld(tld: &TopLevelDef) -> String {
|
||||
match tld {
|
||||
TopLevelDef::Module { name, .. } => {
|
||||
format!("TopLevelDef::Module {{ name: {name} }}")
|
||||
}
|
||||
TopLevelDef::Class { name, .. } => {
|
||||
format!("TopLevelDef::Class {{ name: {name} }}")
|
||||
}
|
||||
TopLevelDef::Function { name, .. } => {
|
||||
format!("TopLevelDef::Function {{ name: {name} }}")
|
||||
}
|
||||
TopLevelDef::Variable { name, .. } => {
|
||||
format!("TopLevelDef::Variable {{ name: {name} }}")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut str = String::new();
|
||||
str.push_str("nac3artiq::InnerResolver {");
|
||||
|
||||
{
|
||||
let id_to_type = self.id_to_type.read();
|
||||
str.push_str(
|
||||
format!(
|
||||
"\n\tid_to_type: {{{}}},",
|
||||
fmt_elems(
|
||||
id_to_type
|
||||
.iter()
|
||||
.sorted_by_cached_key(|(k, _)| k.to_string())
|
||||
.map(|(k, v)| {
|
||||
let ty_str = unifier.as_ref().map_or_else(
|
||||
|| format!("{v:?}"),
|
||||
|unifier| unifier.stringify(*v),
|
||||
);
|
||||
format!("\t\t{k} -> {ty_str}")
|
||||
})
|
||||
.join(",\n")
|
||||
.as_str()
|
||||
),
|
||||
)
|
||||
.as_str(),
|
||||
);
|
||||
}
|
||||
|
||||
{
|
||||
let id_to_def = self.id_to_def.read();
|
||||
str.push_str(
|
||||
format!(
|
||||
"\n\tid_to_def: {{{}}},",
|
||||
fmt_elems(
|
||||
id_to_def
|
||||
.iter()
|
||||
.sorted_by_cached_key(|(k, _)| k.to_string())
|
||||
.map(|(k, v)| {
|
||||
let tld_str = tld.map_or_else(
|
||||
|| format!("{v:?}"),
|
||||
|tlds| stringify_tld(&tlds[v.0]),
|
||||
);
|
||||
format!("\t\t{k} -> {tld_str}")
|
||||
})
|
||||
.join(",\n")
|
||||
.as_str()
|
||||
)
|
||||
)
|
||||
.as_str(),
|
||||
);
|
||||
}
|
||||
|
||||
{
|
||||
let id_to_pyval = self.id_to_pyval.read();
|
||||
str.push_str(
|
||||
format!(
|
||||
"\n\tid_to_pyval: {{{}}},",
|
||||
fmt_elems(
|
||||
id_to_pyval
|
||||
.iter()
|
||||
.sorted_by_cached_key(|(k, _)| k.to_string())
|
||||
.map(|(k, v)| { format!("\t\t{k} -> {}", stringify_pyvalue_handle(v)) })
|
||||
.join(",\n")
|
||||
.as_str()
|
||||
)
|
||||
)
|
||||
.as_str(),
|
||||
);
|
||||
}
|
||||
|
||||
{
|
||||
let id_to_primitive = self.id_to_primitive.read();
|
||||
str.push_str(
|
||||
format!(
|
||||
"\n\tid_to_primitive: {{{}}},",
|
||||
fmt_elems(
|
||||
id_to_primitive
|
||||
.iter()
|
||||
.sorted_by_key(|(k, _)| *k)
|
||||
.map(|(k, v)| { format!("\t\t{k} -> {v:?}") })
|
||||
.join(",\n")
|
||||
.as_str()
|
||||
)
|
||||
)
|
||||
.as_str(),
|
||||
);
|
||||
}
|
||||
|
||||
{
|
||||
let field_to_val = self.field_to_val.read();
|
||||
str.push_str(
|
||||
format!(
|
||||
"\n\tfield_to_val: {{{}}},",
|
||||
fmt_elems(
|
||||
field_to_val
|
||||
.iter()
|
||||
.sorted_by_key(|((id, _), _)| *id)
|
||||
.map(|((id, name), pyval)| {
|
||||
format!(
|
||||
"\t\t({id}, {name}) -> {}",
|
||||
pyval.as_ref().map_or_else(
|
||||
|| String::from("None"),
|
||||
|pyval| format!(
|
||||
"Some({})",
|
||||
stringify_pyvalue_handle(pyval)
|
||||
)
|
||||
)
|
||||
)
|
||||
})
|
||||
.join(",\n")
|
||||
.as_str()
|
||||
)
|
||||
)
|
||||
.as_str(),
|
||||
);
|
||||
}
|
||||
|
||||
{
|
||||
let global_value_ids = self.global_value_ids.read();
|
||||
str.push_str(
|
||||
format!(
|
||||
"\n\tglobal_value_ids: {{{}}},",
|
||||
fmt_elems(
|
||||
global_value_ids
|
||||
.iter()
|
||||
.sorted_by_key(|(k, _)| *k)
|
||||
.map(|(k, v)| format!("\t\t{k} -> {v}"))
|
||||
.join(",\n")
|
||||
.as_str()
|
||||
)
|
||||
)
|
||||
.as_str(),
|
||||
);
|
||||
}
|
||||
|
||||
{
|
||||
let pyid_to_def = self.pyid_to_def.read();
|
||||
str.push_str(
|
||||
format!(
|
||||
"\n\tpyid_to_def: {{{}}},",
|
||||
fmt_elems(
|
||||
pyid_to_def
|
||||
.iter()
|
||||
.sorted_by_key(|(k, _)| *k)
|
||||
.map(|(k, v)| {
|
||||
let tld_str = tld.map_or_else(
|
||||
|| format!("{v:?}"),
|
||||
|tlds| stringify_tld(&tlds[v.0]),
|
||||
);
|
||||
format!("\t\t{k} -> {tld_str}")
|
||||
})
|
||||
.join(",\n")
|
||||
.as_str()
|
||||
)
|
||||
)
|
||||
.as_str(),
|
||||
);
|
||||
}
|
||||
|
||||
{
|
||||
let pyid_to_type = self.pyid_to_type.read();
|
||||
str.push_str(
|
||||
format!(
|
||||
"\n\tpyid_to_type: {{{}}},",
|
||||
fmt_elems(
|
||||
pyid_to_type
|
||||
.iter()
|
||||
.sorted_by_key(|(k, _)| *k)
|
||||
.map(|(k, v)| {
|
||||
let ty_str = unifier.as_ref().map_or_else(
|
||||
|| format!("{v:?}"),
|
||||
|unifier| unifier.stringify(*v),
|
||||
);
|
||||
format!("\t\t{k} -> {ty_str}")
|
||||
})
|
||||
.join(",\n")
|
||||
.as_str()
|
||||
)
|
||||
)
|
||||
.as_str(),
|
||||
);
|
||||
}
|
||||
|
||||
{
|
||||
let string_store = self.string_store.read();
|
||||
str.push_str(
|
||||
format!(
|
||||
"\n\tstring_store: {{{}}},",
|
||||
fmt_elems(
|
||||
string_store
|
||||
.iter()
|
||||
.sorted_by_key(|(k, _)| *k)
|
||||
.map(|(k, v)| format!("\t\t{k} -> {v}"))
|
||||
.join(",\n")
|
||||
.as_str()
|
||||
)
|
||||
)
|
||||
.as_str(),
|
||||
);
|
||||
}
|
||||
|
||||
{
|
||||
let exception_ids = self.exception_ids.read();
|
||||
str.push_str(
|
||||
format!(
|
||||
"\n\texception_ids: {{{}}},",
|
||||
fmt_elems(
|
||||
exception_ids
|
||||
.iter()
|
||||
.sorted_by_key(|(k, _)| *k)
|
||||
.map(|(k, v)| format!("\t\t{k} -> {v}"))
|
||||
.join(",\n")
|
||||
.as_str()
|
||||
)
|
||||
)
|
||||
.as_str(),
|
||||
);
|
||||
}
|
||||
|
||||
let name_to_pyid = &self.name_to_pyid;
|
||||
str.push_str(
|
||||
format!(
|
||||
"\n\tname_to_pyid: {{{}}},",
|
||||
fmt_elems(
|
||||
name_to_pyid
|
||||
.iter()
|
||||
.sorted_by_cached_key(|(k, _)| k.to_string())
|
||||
.map(|(k, v)| format!("\t\t{k} -> {v}"))
|
||||
.join(",\n")
|
||||
.as_str()
|
||||
)
|
||||
)
|
||||
.as_str(),
|
||||
);
|
||||
|
||||
let module = &self.module;
|
||||
str.push_str(format!("\n\tmodule: {module}").as_str());
|
||||
|
||||
str.push_str("\n}");
|
||||
|
||||
str
|
||||
}
|
||||
}
|
@ -1,6 +1,7 @@
|
||||
#![deny(future_incompatible, let_underscore, nonstandard_style, clippy::all)]
|
||||
#![warn(clippy::pedantic)]
|
||||
#![allow(
|
||||
unsafe_op_in_unsafe_fn,
|
||||
clippy::cast_possible_truncation,
|
||||
clippy::cast_sign_loss,
|
||||
clippy::enum_glob_use,
|
||||
@ -10,11 +11,9 @@
|
||||
)]
|
||||
|
||||
use std::{
|
||||
cell::LazyCell,
|
||||
collections::{HashMap, HashSet},
|
||||
fs,
|
||||
io::Write,
|
||||
path::Path,
|
||||
process::Command,
|
||||
rc::Rc,
|
||||
sync::Arc,
|
||||
@ -24,7 +23,7 @@ use indexmap::IndexMap;
|
||||
use itertools::Itertools;
|
||||
use parking_lot::{Mutex, RwLock};
|
||||
use pyo3::{
|
||||
IntoPyObjectExt, create_exception, exceptions,
|
||||
create_exception, exceptions,
|
||||
prelude::*,
|
||||
types::{PyBytes, PyDict, PyNone, PySet},
|
||||
};
|
||||
@ -32,17 +31,17 @@ use tempfile::{self, TempDir};
|
||||
|
||||
use nac3core::{
|
||||
codegen::{
|
||||
CodeGenLLVMOptions, CodeGenTargetMachineOptions, CodeGenTask, CodeGenerator, WithCall,
|
||||
WorkerRegistry, concrete_type::ConcreteTypeStore, gen_func_impl, irrt::load_irrt,
|
||||
concrete_type::ConcreteTypeStore, gen_func_impl, irrt::load_irrt, CodeGenLLVMOptions,
|
||||
CodeGenTargetMachineOptions, CodeGenTask, CodeGenerator, WithCall, WorkerRegistry,
|
||||
},
|
||||
inkwell::{
|
||||
OptimizationLevel,
|
||||
context::Context,
|
||||
memory_buffer::MemoryBuffer,
|
||||
module::{FlagBehavior, Linkage, Module},
|
||||
passes::PassBuilderOptions,
|
||||
support::is_multithreaded,
|
||||
targets::*,
|
||||
OptimizationLevel,
|
||||
},
|
||||
nac3parser::{
|
||||
ast::{self, Constant, ExprKind, Located, Stmt, StmtKind, StrRef},
|
||||
@ -50,31 +49,27 @@ use nac3core::{
|
||||
},
|
||||
symbol_resolver::SymbolResolver,
|
||||
toplevel::{
|
||||
DefinitionId, GenCall, TopLevelDef,
|
||||
builtins::get_exn_constructor,
|
||||
composer::{BuiltinFuncCreator, BuiltinFuncSpec, ComposerConfig, TopLevelComposer},
|
||||
DefinitionId, GenCall, TopLevelDef,
|
||||
},
|
||||
typecheck::{
|
||||
type_inferencer::PrimitiveStore,
|
||||
typedef::{FunSignature, FuncArg, Type, TypeEnum, Unifier, VarMap, into_var_map},
|
||||
typedef::{into_var_map, FunSignature, FuncArg, Type, TypeEnum, Unifier, VarMap},
|
||||
},
|
||||
};
|
||||
use nac3ld::Linker;
|
||||
|
||||
use codegen::{
|
||||
ArtiqCodeGenerator, attributes_writeback, gen_core_log, gen_rtio_log, rpc_codegen_callback,
|
||||
attributes_writeback, gen_core_log, gen_rtio_log, rpc_codegen_callback, ArtiqCodeGenerator,
|
||||
};
|
||||
use symbol_resolver::{DeferredEvaluationStore, InnerResolver, PythonHelper, Resolver};
|
||||
use timeline::TimeFns;
|
||||
|
||||
mod codegen;
|
||||
mod debug;
|
||||
mod symbol_resolver;
|
||||
mod timeline;
|
||||
|
||||
const ENV_NAC3_EMIT_LLVM_BC: &str = "NAC3_EMIT_LLVM_BC";
|
||||
const ENV_NAC3_EMIT_LLVM_LL: &str = "NAC3_EMIT_LLVM_LL";
|
||||
|
||||
#[derive(PartialEq, Clone, Copy)]
|
||||
enum Isa {
|
||||
Host,
|
||||
@ -89,7 +84,7 @@ impl Isa {
|
||||
match self {
|
||||
Isa::Host => TargetMachine::get_default_triple(),
|
||||
Isa::RiscV32G | Isa::RiscV32IMA => TargetTriple::create("riscv32-unknown-linux"),
|
||||
Isa::CortexA9 => TargetTriple::create("armv7-unknown-linux-eabihf"),
|
||||
Isa::CortexA9 => TargetTriple::create("armv7-unknown-linux-gnueabihf"),
|
||||
}
|
||||
}
|
||||
|
||||
@ -166,8 +161,6 @@ pub struct PrimitivePythonId {
|
||||
virtual_id: u64,
|
||||
option: u64,
|
||||
module: u64,
|
||||
kernel: u64,
|
||||
kernel_invariant: u64,
|
||||
}
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
@ -177,10 +170,7 @@ pub struct SpecialPythonId {
|
||||
sequential: u64,
|
||||
}
|
||||
|
||||
/// An [`IndexMap`] storing the `id()` of values, mapped to a handle of the value itself.
|
||||
type PyValueMap = IndexMap<u64, Arc<PyObject>>;
|
||||
|
||||
type TopLevelComponent = (Stmt, String, Arc<PyObject>);
|
||||
type TopLevelComponent = (Stmt, String, PyObject);
|
||||
|
||||
// TopLevelComposer is unsendable as it holds the unification table, which is
|
||||
// unsendable due to Rc. Arc would cause a performance hit.
|
||||
@ -207,17 +197,17 @@ create_exception!(nac3artiq, CompileError, exceptions::PyException);
|
||||
impl Nac3 {
|
||||
fn register_module(
|
||||
&mut self,
|
||||
module: &Arc<PyObject>,
|
||||
module: &PyObject,
|
||||
registered_class_ids: &HashSet<u64>,
|
||||
) -> PyResult<()> {
|
||||
let (module_name, source_file, source) =
|
||||
Python::with_gil(|py| -> PyResult<(String, String, String)> {
|
||||
let module = module.bind(py);
|
||||
let module: &PyAny = module.extract(py)?;
|
||||
let source_file = module.getattr("__file__");
|
||||
let (source_file, source) = if let Ok(source_file) = source_file {
|
||||
let source_file = source_file.extract::<&str>()?;
|
||||
let source_file = source_file.extract()?;
|
||||
(
|
||||
source_file.to_string(),
|
||||
source_file,
|
||||
fs::read_to_string(source_file).map_err(|e| {
|
||||
exceptions::PyIOError::new_err(format!(
|
||||
"failed to read input file: {e}"
|
||||
@ -227,26 +217,18 @@ impl Nac3 {
|
||||
} else {
|
||||
// kernels submitted by content have no file
|
||||
// but still can provide source by StringLoader
|
||||
let get_src_fn = module.getattr("__loader__")?.getattr("get_source")?;
|
||||
(String::from("<expcontent>"), get_src_fn.call1((PyNone::get(py),))?.extract()?)
|
||||
let get_src_fn = module
|
||||
.getattr("__loader__")?
|
||||
.extract::<PyObject>()?
|
||||
.getattr(py, "get_source")?;
|
||||
("<expcontent>", get_src_fn.call1(py, (PyNone::get(py),))?.extract(py)?)
|
||||
};
|
||||
Ok((module.getattr("__name__")?.extract()?, source_file, source))
|
||||
Ok((module.getattr("__name__")?.extract()?, source_file.to_string(), source))
|
||||
})?;
|
||||
|
||||
let parser_result = parse_program(&source, source_file.into())
|
||||
.map_err(|e| exceptions::PySyntaxError::new_err(format!("parse error: {e}")))?;
|
||||
|
||||
let id_fn = LazyCell::new(|| {
|
||||
Python::with_gil(|py| {
|
||||
PyModule::import(py, "builtins").unwrap().getattr("id").unwrap().unbind()
|
||||
})
|
||||
});
|
||||
let get_type_hints_fn = LazyCell::new(|| {
|
||||
Python::with_gil(|py| {
|
||||
PyModule::import(py, "typing").unwrap().getattr("get_type_hints").unwrap().unbind()
|
||||
})
|
||||
});
|
||||
|
||||
for mut stmt in parser_result {
|
||||
let include = match stmt.node {
|
||||
StmtKind::ClassDef { ref decorator_list, ref mut body, ref mut bases, .. } => {
|
||||
@ -263,15 +245,15 @@ impl Nac3 {
|
||||
// Drop unregistered (i.e. host-only) base classes.
|
||||
bases.retain(|base| {
|
||||
Python::with_gil(|py| -> PyResult<bool> {
|
||||
let id_fn = PyModule::import(py, "builtins")?.getattr("id")?;
|
||||
match &base.node {
|
||||
ExprKind::Name { id, .. } => {
|
||||
if *id == "Exception".into() {
|
||||
Ok(true)
|
||||
} else {
|
||||
let base_obj =
|
||||
module.bind(py).getattr(id.to_string().as_str())?;
|
||||
let base_id =
|
||||
id_fn.bind(py).call1((base_obj,))?.extract()?;
|
||||
module.getattr(py, id.to_string().as_str())?;
|
||||
let base_id = id_fn.call1((base_obj,))?.extract()?;
|
||||
Ok(registered_class_ids.contains(&base_id))
|
||||
}
|
||||
}
|
||||
@ -304,28 +286,10 @@ impl Nac3 {
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// Allow global variable declaration with `Kernel` or `KernelInvariant` type annotation
|
||||
StmtKind::AnnAssign { ref target, .. } => match &target.node {
|
||||
ExprKind::Name { id, .. } => Python::with_gil(|py| {
|
||||
let py_type_hints =
|
||||
get_type_hints_fn.bind(py).call1((module.bind(py),)).unwrap();
|
||||
let py_type_hints = py_type_hints.downcast::<PyDict>().unwrap();
|
||||
let var_type_hint =
|
||||
py_type_hints.get_item(id.to_string().as_str()).unwrap().unwrap();
|
||||
let var_type = var_type_hint.getattr_opt("__origin__").unwrap();
|
||||
if let Some(var_type) = var_type {
|
||||
let var_type_id = id_fn.bind(py).call1((var_type,)).unwrap();
|
||||
let var_type_id = var_type_id.extract::<u64>().unwrap();
|
||||
|
||||
[self.primitive_ids.kernel, self.primitive_ids.kernel_invariant]
|
||||
.contains(&var_type_id)
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}),
|
||||
_ => false,
|
||||
},
|
||||
// Allow global variable declaration with `Kernel` type annotation
|
||||
StmtKind::AnnAssign { ref annotation, .. } => {
|
||||
matches!(&annotation.node, ExprKind::Subscript { value, .. } if matches!(&value.node, ExprKind::Name {id, ..} if id == &"Kernel".into()))
|
||||
}
|
||||
_ => false,
|
||||
};
|
||||
|
||||
@ -358,7 +322,7 @@ impl Nac3 {
|
||||
None => {
|
||||
return Some(format!(
|
||||
"object launching kernel does not have method `{method_name}`"
|
||||
));
|
||||
))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
@ -379,7 +343,7 @@ impl Nac3 {
|
||||
None if default_value.is_none() => {
|
||||
return Some(format!(
|
||||
"argument `{name}` not provided when launching kernel function"
|
||||
));
|
||||
))
|
||||
}
|
||||
_ => break,
|
||||
};
|
||||
@ -393,7 +357,7 @@ impl Nac3 {
|
||||
Err(e) => {
|
||||
return Some(format!(
|
||||
"type error ({e}) at parameter #{i} when calling kernel function"
|
||||
));
|
||||
))
|
||||
}
|
||||
};
|
||||
if let Err(e) = unifier.unify(in_ty, *ty) {
|
||||
@ -467,13 +431,13 @@ impl Nac3 {
|
||||
]
|
||||
}
|
||||
|
||||
fn compile_method<'py, T>(
|
||||
fn compile_method<T>(
|
||||
&self,
|
||||
obj: &Bound<'py, PyAny>,
|
||||
obj: &PyAny,
|
||||
method_name: &str,
|
||||
args: Vec<Bound<'py, PyAny>>,
|
||||
embedding_map: &Bound<'py, PyAny>,
|
||||
py: Python<'py>,
|
||||
args: Vec<&PyAny>,
|
||||
embedding_map: &PyAny,
|
||||
py: Python,
|
||||
link_fn: &dyn Fn(&Module) -> PyResult<T>,
|
||||
) -> PyResult<T> {
|
||||
let size_t = self.isa.get_size_type(&Context::create());
|
||||
@ -489,20 +453,19 @@ impl Nac3 {
|
||||
let id_fn = builtins.getattr("id")?;
|
||||
let issubclass = builtins.getattr("issubclass")?;
|
||||
let exn_class = builtins.getattr("Exception")?;
|
||||
let store_obj = embedding_map.getattr("store_object").unwrap();
|
||||
let store_str = embedding_map.getattr("store_str").unwrap();
|
||||
let store_fun = embedding_map.getattr("store_function").unwrap().into_py_any(py)?;
|
||||
let host_attributes =
|
||||
embedding_map.getattr("attributes_writeback").unwrap().into_py_any(py)?;
|
||||
let store_obj = embedding_map.getattr("store_object").unwrap().to_object(py);
|
||||
let store_str = embedding_map.getattr("store_str").unwrap().to_object(py);
|
||||
let store_fun = embedding_map.getattr("store_function").unwrap().to_object(py);
|
||||
let host_attributes = embedding_map.getattr("attributes_writeback").unwrap().to_object(py);
|
||||
let global_value_ids: Arc<RwLock<HashMap<_, _>>> = Arc::new(RwLock::new(HashMap::new()));
|
||||
let helper = PythonHelper {
|
||||
id_fn: Arc::new(builtins.getattr("id").unwrap().into_py_any(py)?),
|
||||
len_fn: Arc::new(builtins.getattr("len").unwrap().into_py_any(py)?),
|
||||
type_fn: Arc::new(builtins.getattr("type").unwrap().into_py_any(py)?),
|
||||
origin_ty_fn: Arc::new(typings.getattr("get_origin").unwrap().into_py_any(py)?),
|
||||
args_ty_fn: Arc::new(typings.getattr("get_args").unwrap().into_py_any(py)?),
|
||||
store_obj: Arc::new(store_obj.clone().into_py_any(py)?),
|
||||
store_str: Arc::new(store_str.into_py_any(py)?),
|
||||
id_fn: builtins.getattr("id").unwrap().to_object(py),
|
||||
len_fn: builtins.getattr("len").unwrap().to_object(py),
|
||||
type_fn: builtins.getattr("type").unwrap().to_object(py),
|
||||
origin_ty_fn: typings.getattr("get_origin").unwrap().to_object(py),
|
||||
args_ty_fn: typings.getattr("get_args").unwrap().to_object(py),
|
||||
store_obj: store_obj.clone(),
|
||||
store_str,
|
||||
};
|
||||
|
||||
let pyid_to_type = Arc::new(RwLock::new(HashMap::<u64, Type>::new()));
|
||||
@ -525,14 +488,14 @@ impl Nac3 {
|
||||
|
||||
let mut rpc_ids = vec![];
|
||||
for (stmt, path, module) in &self.top_levels {
|
||||
let py_module = module.bind(py);
|
||||
let py_module: &PyAny = module.extract(py)?;
|
||||
let module_id: u64 = id_fn.call1((py_module,))?.extract()?;
|
||||
let module_name: String = py_module.getattr("__name__")?.extract()?;
|
||||
let helper = helper.clone();
|
||||
let class_obj;
|
||||
if let StmtKind::ClassDef { name, .. } = &stmt.node {
|
||||
let class = py_module.getattr(name.to_string().as_str()).unwrap();
|
||||
if issubclass.call1((&class, &exn_class)).unwrap().extract().unwrap()
|
||||
if issubclass.call1((class, exn_class)).unwrap().extract().unwrap()
|
||||
&& class.getattr("artiq_builtin").is_err()
|
||||
{
|
||||
class_obj = Some(class);
|
||||
@ -545,8 +508,8 @@ impl Nac3 {
|
||||
let (name_to_pyid, resolver, _, _) =
|
||||
module_to_resolver_cache.get(&module_id).cloned().unwrap_or_else(|| {
|
||||
let mut name_to_pyid: HashMap<StrRef, u64> = HashMap::new();
|
||||
let members = py_module.getattr("__dict__").unwrap();
|
||||
let members = members.downcast::<PyDict>().unwrap();
|
||||
let members: &PyDict =
|
||||
py_module.getattr("__dict__").unwrap().downcast().unwrap();
|
||||
for (key, val) in members {
|
||||
let key: &str = key.extract().unwrap();
|
||||
let val = id_fn.call1((val,)).unwrap().extract().unwrap();
|
||||
@ -592,79 +555,52 @@ impl Nac3 {
|
||||
if let Some(class_obj) = class_obj {
|
||||
self.exception_ids
|
||||
.write()
|
||||
.insert(def_id.0, store_obj.call1((class_obj,))?.extract()?);
|
||||
.insert(def_id.0, store_obj.call1(py, (class_obj,))?.extract(py)?);
|
||||
}
|
||||
|
||||
match &stmt.node {
|
||||
StmtKind::FunctionDef { decorator_list, .. } => {
|
||||
for decorator in decorator_list {
|
||||
if let Some(decorator_str) = decorator_id_string(decorator) {
|
||||
if decorator_str == "rpc" {
|
||||
store_fun
|
||||
.call1(
|
||||
py,
|
||||
(
|
||||
def_id.0.into_py_any(py)?,
|
||||
module
|
||||
.bind(py)
|
||||
.getattr(name.to_string().as_str())
|
||||
.unwrap(),
|
||||
),
|
||||
)
|
||||
.unwrap();
|
||||
if decorator_list
|
||||
.iter()
|
||||
.any(|decorator| decorator_id_string(decorator) == Some("rpc".to_string()))
|
||||
{
|
||||
store_fun
|
||||
.call1(
|
||||
py,
|
||||
(
|
||||
def_id.0.into_py(py),
|
||||
module.getattr(py, name.to_string().as_str()).unwrap(),
|
||||
),
|
||||
)
|
||||
.unwrap();
|
||||
let is_async = decorator_list.iter().any(|decorator| {
|
||||
decorator_get_flags(decorator)
|
||||
.iter()
|
||||
.any(|constant| *constant == Constant::Str("async".into()))
|
||||
});
|
||||
rpc_ids.push((None, def_id, is_async));
|
||||
}
|
||||
}
|
||||
StmtKind::ClassDef { name, body, .. } => {
|
||||
let class_name = name.to_string();
|
||||
let class_obj = module.getattr(py, class_name.as_str()).unwrap();
|
||||
for stmt in body {
|
||||
if let StmtKind::FunctionDef { name, decorator_list, .. } = &stmt.node {
|
||||
if decorator_list.iter().any(|decorator| {
|
||||
decorator_id_string(decorator) == Some("rpc".to_string())
|
||||
}) {
|
||||
let is_async = decorator_list.iter().any(|decorator| {
|
||||
decorator_get_flags(decorator)
|
||||
.iter()
|
||||
.any(|constant| *constant == Constant::Str("async".into()))
|
||||
});
|
||||
rpc_ids.push((None, def_id, is_async));
|
||||
} else if decorator_str != "kernel"
|
||||
&& decorator_str != "portable"
|
||||
&& decorator_str != "extern"
|
||||
{
|
||||
return Err(CompileError::new_err(format!(
|
||||
"compilation failed\n----------\nDecorator {} is not supported (at {})",
|
||||
decorator_id_string(decorator).unwrap(),
|
||||
stmt.location
|
||||
)));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
StmtKind::ClassDef { name, body, .. } => {
|
||||
let class_name = name.to_string();
|
||||
let class_obj = module.bind(py).getattr(class_name.as_str()).unwrap();
|
||||
for stmt in body {
|
||||
if let StmtKind::FunctionDef { name, decorator_list, .. } = &stmt.node {
|
||||
for decorator in decorator_list {
|
||||
if let Some(decorator_str) = decorator_id_string(decorator) {
|
||||
if decorator_str == "rpc" {
|
||||
let is_async = decorator_list.iter().any(|decorator| {
|
||||
decorator_get_flags(decorator).iter().any(|constant| {
|
||||
*constant == Constant::Str("async".into())
|
||||
})
|
||||
});
|
||||
if name == &"__init__".into() {
|
||||
return Err(CompileError::new_err(format!(
|
||||
"compilation failed\n----------\nThe constructor of class {} should not be decorated with rpc decorator (at {})",
|
||||
class_name, stmt.location
|
||||
)));
|
||||
}
|
||||
rpc_ids.push((
|
||||
Some((class_obj.clone(), *name)),
|
||||
def_id,
|
||||
is_async,
|
||||
));
|
||||
} else if decorator_str != "kernel"
|
||||
&& decorator_str != "portable"
|
||||
{
|
||||
return Err(CompileError::new_err(format!(
|
||||
"compilation failed\n----------\nDecorator {} is not supported (at {})",
|
||||
decorator_id_string(decorator).unwrap(),
|
||||
stmt.location
|
||||
)));
|
||||
}
|
||||
if name == &"__init__".into() {
|
||||
return Err(CompileError::new_err(format!(
|
||||
"compilation failed\n----------\nThe constructor of class {} should not be decorated with rpc decorator (at {})",
|
||||
class_name, stmt.location
|
||||
)));
|
||||
}
|
||||
rpc_ids.push((Some((class_obj.clone(), *name)), def_id, is_async));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -708,7 +644,7 @@ impl Nac3 {
|
||||
let mut arg_names = vec![];
|
||||
for (i, arg) in args.into_iter().enumerate() {
|
||||
let name = format!("tmp{i}");
|
||||
module.add(&*name, &arg)?;
|
||||
module.add(&name, arg)?;
|
||||
name_to_pyid.insert(name.clone().into(), id_fun.call1((arg,))?.extract()?);
|
||||
arg_names.push(name);
|
||||
}
|
||||
@ -730,7 +666,7 @@ impl Nac3 {
|
||||
id_to_primitive: RwLock::default(),
|
||||
field_to_val: RwLock::default(),
|
||||
name_to_pyid,
|
||||
module: Arc::new(module.into_py_any(py)?),
|
||||
module: module.to_object(py),
|
||||
helper: helper.clone(),
|
||||
string_store: self.string_store.clone(),
|
||||
exception_ids: self.exception_ids.clone(),
|
||||
@ -804,8 +740,10 @@ impl Nac3 {
|
||||
.call1(
|
||||
py,
|
||||
(
|
||||
id.0.into_py_any(py)?,
|
||||
class_def.getattr(name.to_string().as_str()).unwrap(),
|
||||
id.0.into_py(py),
|
||||
class_def
|
||||
.getattr(py, name.to_string().as_str())
|
||||
.unwrap(),
|
||||
),
|
||||
)
|
||||
.unwrap();
|
||||
@ -815,7 +753,7 @@ impl Nac3 {
|
||||
TopLevelDef::Variable { .. } => {
|
||||
return Err(CompileError::new_err(String::from(
|
||||
"Unsupported @rpc annotation on global variable",
|
||||
)));
|
||||
)))
|
||||
}
|
||||
TopLevelDef::Module { .. } => {
|
||||
unreachable!("Type module cannot be decorated with @rpc")
|
||||
@ -939,18 +877,6 @@ impl Nac3 {
|
||||
|
||||
embedding_map.setattr("expects_return", has_return).unwrap();
|
||||
|
||||
let emit_llvm_bc = std::env::var(ENV_NAC3_EMIT_LLVM_BC).is_ok();
|
||||
let emit_llvm_ll = std::env::var(ENV_NAC3_EMIT_LLVM_LL).is_ok();
|
||||
|
||||
let emit_llvm = |module: &Module<'_>, filename: &str| {
|
||||
if emit_llvm_bc {
|
||||
module.write_bitcode_to_path(Path::new(format!("{filename}.bc").as_str()));
|
||||
}
|
||||
if emit_llvm_ll {
|
||||
module.print_to_file(Path::new(format!("{filename}.ll").as_str())).unwrap();
|
||||
}
|
||||
};
|
||||
|
||||
// Link all modules into `main`.
|
||||
let buffers = membuffers.lock();
|
||||
let main = context
|
||||
@ -959,8 +885,6 @@ impl Nac3 {
|
||||
"main",
|
||||
))
|
||||
.unwrap();
|
||||
emit_llvm(&main, "main");
|
||||
|
||||
for buffer in buffers.iter().rev().skip(1) {
|
||||
let other = context
|
||||
.create_module_from_ir(MemoryBuffer::create_from_memory_range(buffer, "main"))
|
||||
@ -968,10 +892,7 @@ impl Nac3 {
|
||||
|
||||
main.link_in_module(other).map_err(|err| CompileError::new_err(err.to_string()))?;
|
||||
}
|
||||
emit_llvm(&main, "main.merged");
|
||||
|
||||
main.link_in_module(irrt).map_err(|err| CompileError::new_err(err.to_string()))?;
|
||||
emit_llvm(&main, "main.fat");
|
||||
|
||||
let mut function_iter = main.get_first_function();
|
||||
while let Some(func) = function_iter {
|
||||
@ -991,8 +912,6 @@ impl Nac3 {
|
||||
global_option = global.get_next_global();
|
||||
}
|
||||
|
||||
emit_llvm(&main, "main.pre-opt");
|
||||
|
||||
let target_machine = self
|
||||
.llvm_options
|
||||
.target
|
||||
@ -1007,15 +926,12 @@ impl Nac3 {
|
||||
panic!("Failed to run optimization for module `main`: {}", err.to_string());
|
||||
}
|
||||
|
||||
emit_llvm(&main, "main.post-opt");
|
||||
|
||||
Python::with_gil(|py| {
|
||||
let string_store = self.string_store.read();
|
||||
let mut string_store_vec = string_store.iter().collect::<Vec<_>>();
|
||||
string_store_vec.sort_by(|(_s1, key1), (_s2, key2)| key1.cmp(key2));
|
||||
for (s, key) in string_store_vec {
|
||||
let embed_key: i32 =
|
||||
helper.store_str.bind(py).call1((s,)).unwrap().extract().unwrap();
|
||||
let embed_key: i32 = helper.store_str.call1(py, (s,)).unwrap().extract(py).unwrap();
|
||||
assert_eq!(
|
||||
embed_key, *key,
|
||||
"string {s} is out of sync between embedding map (key={embed_key}) and \
|
||||
@ -1125,7 +1041,7 @@ fn add_exceptions(
|
||||
#[pymethods]
|
||||
impl Nac3 {
|
||||
#[new]
|
||||
fn new<'py>(isa: &str, artiq_builtins: &Bound<'py, PyDict>, py: Python<'py>) -> PyResult<Self> {
|
||||
fn new(isa: &str, artiq_builtins: &PyDict, py: Python) -> PyResult<Self> {
|
||||
let isa = match isa {
|
||||
"host" => Isa::Host,
|
||||
"rv32g" => Isa::RiscV32G,
|
||||
@ -1192,59 +1108,42 @@ impl Nac3 {
|
||||
|
||||
let builtins_mod = PyModule::import(py, "builtins").unwrap();
|
||||
let id_fn = builtins_mod.getattr("id").unwrap();
|
||||
let numpy_mod = PyModule::import(py, "numpy").unwrap();
|
||||
let typing_mod = PyModule::import(py, "typing").unwrap();
|
||||
let types_mod = PyModule::import(py, "types").unwrap();
|
||||
|
||||
let get_id = |x: &Bound<PyAny>| id_fn.call1((x,)).and_then(|id| id.extract()).unwrap();
|
||||
let get_artiq_builtin = |mod_name: Option<&str>, name: &str| -> Bound<PyAny> {
|
||||
if let Some(mod_name) = mod_name {
|
||||
artiq_builtins
|
||||
.get_item(mod_name)
|
||||
.unwrap()
|
||||
.unwrap_or_else(|| {
|
||||
panic!("no module key '{mod_name}' present in artiq_builtins")
|
||||
})
|
||||
.downcast::<PyDict>()
|
||||
.unwrap()
|
||||
.get_item(name)
|
||||
.unwrap()
|
||||
.unwrap_or_else(|| {
|
||||
panic!("no key '{name}' present in artiq_builtins.{mod_name}")
|
||||
})
|
||||
} else {
|
||||
artiq_builtins
|
||||
.get_item(name)
|
||||
.unwrap()
|
||||
.unwrap_or_else(|| panic!("no key '{name}' present in artiq_builtins"))
|
||||
}
|
||||
let get_id = |x: &PyAny| id_fn.call1((x,)).and_then(PyAny::extract).unwrap();
|
||||
let get_attr_id = |obj: &PyModule, attr| {
|
||||
id_fn.call1((obj.getattr(attr).unwrap(),)).unwrap().extract().unwrap()
|
||||
};
|
||||
|
||||
let primitive_ids = PrimitivePythonId {
|
||||
virtual_id: get_id(&get_artiq_builtin(Some("artiq"), "virtual")),
|
||||
virtual_id: get_id(artiq_builtins.get_item("virtual").ok().flatten().unwrap()),
|
||||
generic_alias: (
|
||||
get_id(&get_artiq_builtin(Some("typing"), "_GenericAlias")),
|
||||
get_id(&get_artiq_builtin(Some("types"), "GenericAlias")),
|
||||
get_attr_id(typing_mod, "_GenericAlias"),
|
||||
get_attr_id(types_mod, "GenericAlias"),
|
||||
),
|
||||
none: get_id(&get_artiq_builtin(Some("artiq"), "none")),
|
||||
typevar: get_id(&get_artiq_builtin(Some("typing"), "TypeVar")),
|
||||
const_generic_marker: get_id(&get_artiq_builtin(Some("artiq"), "_ConstGenericMarker")),
|
||||
int: get_id(&get_artiq_builtin(None, "int")),
|
||||
int32: get_id(&get_artiq_builtin(Some("numpy"), "int32")),
|
||||
int64: get_id(&get_artiq_builtin(Some("numpy"), "int64")),
|
||||
uint32: get_id(&get_artiq_builtin(Some("numpy"), "uint32")),
|
||||
uint64: get_id(&get_artiq_builtin(Some("numpy"), "uint64")),
|
||||
bool: get_id(&get_artiq_builtin(None, "bool")),
|
||||
np_bool_: get_id(&get_artiq_builtin(Some("numpy"), "bool_")),
|
||||
string: get_id(&get_artiq_builtin(None, "str")),
|
||||
np_str_: get_id(&get_artiq_builtin(Some("numpy"), "str_")),
|
||||
float: get_id(&get_artiq_builtin(None, "float")),
|
||||
float64: get_id(&get_artiq_builtin(Some("numpy"), "float64")),
|
||||
list: get_id(&get_artiq_builtin(None, "list")),
|
||||
ndarray: get_id(&get_artiq_builtin(Some("numpy"), "ndarray")),
|
||||
tuple: get_id(&get_artiq_builtin(None, "tuple")),
|
||||
exception: get_id(&get_artiq_builtin(None, "Exception")),
|
||||
option: get_id(&get_artiq_builtin(Some("artiq"), "Option")),
|
||||
module: get_id(&get_artiq_builtin(Some("types"), "ModuleType")),
|
||||
kernel: get_id(&get_artiq_builtin(Some("artiq"), "Kernel")),
|
||||
kernel_invariant: get_id(&get_artiq_builtin(Some("artiq"), "KernelInvariant")),
|
||||
none: get_id(artiq_builtins.get_item("none").ok().flatten().unwrap()),
|
||||
typevar: get_attr_id(typing_mod, "TypeVar"),
|
||||
const_generic_marker: get_id(
|
||||
artiq_builtins.get_item("_ConstGenericMarker").ok().flatten().unwrap(),
|
||||
),
|
||||
int: get_attr_id(builtins_mod, "int"),
|
||||
int32: get_attr_id(numpy_mod, "int32"),
|
||||
int64: get_attr_id(numpy_mod, "int64"),
|
||||
uint32: get_attr_id(numpy_mod, "uint32"),
|
||||
uint64: get_attr_id(numpy_mod, "uint64"),
|
||||
bool: get_attr_id(builtins_mod, "bool"),
|
||||
np_bool_: get_attr_id(numpy_mod, "bool_"),
|
||||
string: get_attr_id(builtins_mod, "str"),
|
||||
np_str_: get_attr_id(numpy_mod, "str_"),
|
||||
float: get_attr_id(builtins_mod, "float"),
|
||||
float64: get_attr_id(numpy_mod, "float64"),
|
||||
list: get_attr_id(builtins_mod, "list"),
|
||||
ndarray: get_attr_id(numpy_mod, "ndarray"),
|
||||
tuple: get_attr_id(builtins_mod, "tuple"),
|
||||
exception: get_attr_id(builtins_mod, "Exception"),
|
||||
option: get_id(artiq_builtins.get_item("Option").ok().flatten().unwrap()),
|
||||
module: get_attr_id(types_mod, "ModuleType"),
|
||||
};
|
||||
|
||||
let working_directory = tempfile::Builder::new().prefix("nac3-").tempdir().unwrap();
|
||||
@ -1312,45 +1211,37 @@ impl Nac3 {
|
||||
})
|
||||
}
|
||||
|
||||
fn analyze<'py>(
|
||||
fn analyze(
|
||||
&mut self,
|
||||
functions: &Bound<'py, PySet>,
|
||||
classes: &Bound<'py, PySet>,
|
||||
special_ids: &Bound<'py, PyDict>,
|
||||
content_modules: &Bound<'py, PySet>,
|
||||
functions: &PySet,
|
||||
classes: &PySet,
|
||||
special_ids: &PyDict,
|
||||
content_modules: &PySet,
|
||||
) -> PyResult<()> {
|
||||
let (modules, class_ids) =
|
||||
Python::with_gil(|py| -> PyResult<(PyValueMap, HashSet<u64>)> {
|
||||
let mut modules: IndexMap<u64, Arc<PyObject>> = IndexMap::new();
|
||||
Python::with_gil(|py| -> PyResult<(IndexMap<u64, PyObject>, HashSet<u64>)> {
|
||||
let mut modules: IndexMap<u64, PyObject> = IndexMap::new();
|
||||
let mut class_ids: HashSet<u64> = HashSet::new();
|
||||
|
||||
let id_fn = PyModule::import(py, "builtins")?.getattr("id")?;
|
||||
let getmodule_fn = PyModule::import(py, "inspect")?.getattr("getmodule")?;
|
||||
|
||||
for function in functions {
|
||||
let module = getmodule_fn.call1((&function,))?;
|
||||
if !module.is_none() {
|
||||
modules.insert(
|
||||
id_fn.call1((&module,))?.extract()?,
|
||||
Arc::new(module.into_py_any(py)?),
|
||||
);
|
||||
let module: PyObject = getmodule_fn.call1((function,))?.extract()?;
|
||||
if !module.is_none(py) {
|
||||
modules.insert(id_fn.call1((&module,))?.extract()?, module);
|
||||
}
|
||||
}
|
||||
for class in classes {
|
||||
let module = getmodule_fn.call1((&class,))?;
|
||||
if !module.is_none() {
|
||||
modules.insert(
|
||||
id_fn.call1((&module,))?.extract()?,
|
||||
Arc::new(module.into_py_any(py)?),
|
||||
);
|
||||
let module: PyObject = getmodule_fn.call1((class,))?.extract()?;
|
||||
if !module.is_none(py) {
|
||||
modules.insert(id_fn.call1((&module,))?.extract()?, module);
|
||||
}
|
||||
class_ids.insert(id_fn.call1((&class,))?.extract()?);
|
||||
class_ids.insert(id_fn.call1((class,))?.extract()?);
|
||||
}
|
||||
for module in content_modules {
|
||||
modules.insert(
|
||||
id_fn.call1((&module,))?.extract()?,
|
||||
Arc::new(module.into_py_any(py)?),
|
||||
);
|
||||
let module: PyObject = module.extract()?;
|
||||
modules.insert(id_fn.call1((&module,))?.extract()?, module);
|
||||
}
|
||||
Ok((modules, class_ids))
|
||||
})?;
|
||||
@ -1380,18 +1271,19 @@ impl Nac3 {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn compile_method_to_file<'py>(
|
||||
fn compile_method_to_file(
|
||||
&mut self,
|
||||
obj: &Bound<'py, PyAny>,
|
||||
obj: &PyAny,
|
||||
method_name: &str,
|
||||
args: Vec<Bound<'py, PyAny>>,
|
||||
args: Vec<&PyAny>,
|
||||
filename: &str,
|
||||
embedding_map: &Bound<'py, PyAny>,
|
||||
py: Python<'py>,
|
||||
embedding_map: &PyAny,
|
||||
py: Python,
|
||||
) -> PyResult<()> {
|
||||
let target_machine = self.get_llvm_target_machine();
|
||||
let link_fn = |module: &Module| {
|
||||
if self.isa == Isa::Host {
|
||||
|
||||
if self.isa == Isa::Host {
|
||||
let link_fn = |module: &Module| {
|
||||
let working_directory = self.working_directory.path().to_owned();
|
||||
target_machine
|
||||
.write_to_file(module, FileType::Object, &working_directory.join("module.o"))
|
||||
@ -1401,7 +1293,11 @@ impl Nac3 {
|
||||
working_directory.join("module.o").to_string_lossy().to_string(),
|
||||
)?;
|
||||
Ok(())
|
||||
} else {
|
||||
};
|
||||
|
||||
self.compile_method(obj, method_name, args, embedding_map, py, &link_fn)
|
||||
} else {
|
||||
let link_fn = |module: &Module| {
|
||||
let object_mem = target_machine
|
||||
.write_to_memory_buffer(module, FileType::Object)
|
||||
.expect("couldn't write module to object file buffer");
|
||||
@ -1415,23 +1311,24 @@ impl Nac3 {
|
||||
} else {
|
||||
Err(CompileError::new_err("linker failed to process object file"))
|
||||
}
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
self.compile_method(obj, method_name, args, embedding_map, py, &link_fn)
|
||||
self.compile_method(obj, method_name, args, embedding_map, py, &link_fn)
|
||||
}
|
||||
}
|
||||
|
||||
fn compile_method_to_mem<'py>(
|
||||
fn compile_method_to_mem(
|
||||
&mut self,
|
||||
obj: &Bound<'py, PyAny>,
|
||||
obj: &PyAny,
|
||||
method_name: &str,
|
||||
args: Vec<Bound<'py, PyAny>>,
|
||||
embedding_map: &Bound<'py, PyAny>,
|
||||
py: Python<'py>,
|
||||
args: Vec<&PyAny>,
|
||||
embedding_map: &PyAny,
|
||||
py: Python,
|
||||
) -> PyResult<PyObject> {
|
||||
let target_machine = self.get_llvm_target_machine();
|
||||
let link_fn = |module: &Module| {
|
||||
if self.isa == Isa::Host {
|
||||
|
||||
if self.isa == Isa::Host {
|
||||
let link_fn = |module: &Module| {
|
||||
let working_directory = self.working_directory.path().to_owned();
|
||||
target_machine
|
||||
.write_to_file(module, FileType::Object, &working_directory.join("module.o"))
|
||||
@ -1445,7 +1342,11 @@ impl Nac3 {
|
||||
)?;
|
||||
|
||||
Ok(PyBytes::new(py, &fs::read(filename).unwrap()).into())
|
||||
} else {
|
||||
};
|
||||
|
||||
self.compile_method(obj, method_name, args, embedding_map, py, &link_fn)
|
||||
} else {
|
||||
let link_fn = |module: &Module| {
|
||||
let object_mem = target_machine
|
||||
.write_to_memory_buffer(module, FileType::Object)
|
||||
.expect("couldn't write module to object file buffer");
|
||||
@ -1454,20 +1355,20 @@ impl Nac3 {
|
||||
} else {
|
||||
Err(CompileError::new_err("linker failed to process object file"))
|
||||
}
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
self.compile_method(obj, method_name, args, embedding_map, py, &link_fn)
|
||||
self.compile_method(obj, method_name, args, embedding_map, py, &link_fn)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "init-llvm-profile")]
|
||||
unsafe extern "C" {
|
||||
extern "C" {
|
||||
fn __llvm_profile_initialize();
|
||||
}
|
||||
|
||||
#[pymodule]
|
||||
fn nac3artiq<'py>(py: Python<'py>, m: &Bound<'py, PyModule>) -> PyResult<()> {
|
||||
fn nac3artiq(py: Python, m: &PyModule) -> PyResult<()> {
|
||||
#[cfg(feature = "init-llvm-profile")]
|
||||
unsafe {
|
||||
__llvm_profile_initialize();
|
||||
|
@ -1,48 +1,44 @@
|
||||
use std::{
|
||||
collections::{HashMap, HashSet},
|
||||
fmt::Debug,
|
||||
sync::{
|
||||
Arc,
|
||||
atomic::{AtomicBool, Ordering::Relaxed},
|
||||
Arc,
|
||||
},
|
||||
};
|
||||
|
||||
use itertools::Itertools;
|
||||
use parking_lot::RwLock;
|
||||
use pyo3::{
|
||||
IntoPyObjectExt, PyAny, PyErr, PyObject, PyResult, Python,
|
||||
prelude::*,
|
||||
types::{PyDict, PyTuple},
|
||||
PyAny, PyErr, PyObject, PyResult, Python,
|
||||
};
|
||||
|
||||
use super::PrimitivePythonId;
|
||||
use nac3core::{
|
||||
codegen::{
|
||||
CodeGenContext, CodeGenerator,
|
||||
types::{ProxyType, ndarray::NDArrayType, structure::StructProxyType},
|
||||
types::{ndarray::NDArrayType, structure::StructProxyType, ProxyType},
|
||||
values::ndarray::make_contiguous_strides,
|
||||
CodeGenContext, CodeGenerator,
|
||||
},
|
||||
inkwell::{
|
||||
AddressSpace,
|
||||
module::Linkage,
|
||||
types::{BasicType, BasicTypeEnum},
|
||||
values::{BasicValue, BasicValueEnum},
|
||||
AddressSpace,
|
||||
},
|
||||
nac3parser::ast::{self, StrRef},
|
||||
symbol_resolver::{StaticValue, SymbolResolver, SymbolValue, ValueEnum},
|
||||
toplevel::{
|
||||
DefinitionId, TopLevelDef,
|
||||
helper::PrimDef,
|
||||
numpy::{make_ndarray_ty, unpack_ndarray_var_tys},
|
||||
DefinitionId, TopLevelDef,
|
||||
},
|
||||
typecheck::{
|
||||
type_inferencer::PrimitiveStore,
|
||||
typedef::{Type, TypeEnum, TypeVar, Unifier, VarMap, into_var_map, iter_type_vars},
|
||||
typedef::{into_var_map, iter_type_vars, Type, TypeEnum, TypeVar, Unifier, VarMap},
|
||||
},
|
||||
};
|
||||
|
||||
use super::PrimitivePythonId;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum PrimitiveValue {
|
||||
I32(i32),
|
||||
I64(i64),
|
||||
@ -75,17 +71,16 @@ impl DeferredEvaluationStore {
|
||||
|
||||
/// A class field as stored in the [`InnerResolver`], represented by the ID and name of the
|
||||
/// associated [`PythonValue`].
|
||||
pub(crate) type ResolverField = (u64, StrRef);
|
||||
|
||||
/// A value as stored in Python, represented by the `id()` and [`PyObject`] of the value.
|
||||
pub(crate) type PyValueHandle = (u64, Arc<PyObject>);
|
||||
type ResolverField = (u64, StrRef);
|
||||
/// A class field as stored in Python, represented by the `id()` and [`PyObject`] of the field.
|
||||
type PyFieldHandle = (u64, PyObject);
|
||||
|
||||
pub struct InnerResolver {
|
||||
pub id_to_type: RwLock<HashMap<StrRef, Type>>,
|
||||
pub id_to_def: RwLock<HashMap<StrRef, DefinitionId>>,
|
||||
pub id_to_pyval: RwLock<HashMap<StrRef, PyValueHandle>>,
|
||||
pub id_to_pyval: RwLock<HashMap<StrRef, (u64, PyObject)>>,
|
||||
pub id_to_primitive: RwLock<HashMap<u64, PrimitiveValue>>,
|
||||
pub field_to_val: RwLock<HashMap<ResolverField, Option<PyValueHandle>>>,
|
||||
pub field_to_val: RwLock<HashMap<ResolverField, Option<PyFieldHandle>>>,
|
||||
pub global_value_ids: Arc<RwLock<HashMap<u64, PyObject>>>,
|
||||
pub pyid_to_def: Arc<RwLock<HashMap<u64, DefinitionId>>>,
|
||||
pub pyid_to_type: Arc<RwLock<HashMap<u64, Type>>>,
|
||||
@ -96,33 +91,26 @@ pub struct InnerResolver {
|
||||
pub deferred_eval_store: DeferredEvaluationStore,
|
||||
// module specific
|
||||
pub name_to_pyid: HashMap<StrRef, u64>,
|
||||
pub module: Arc<PyObject>,
|
||||
pub module: PyObject,
|
||||
}
|
||||
|
||||
impl Debug for InnerResolver {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}", self.debug_str(None, &None))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Resolver(pub Arc<InnerResolver>);
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct PythonHelper {
|
||||
pub type_fn: Arc<PyObject>,
|
||||
pub len_fn: Arc<PyObject>,
|
||||
pub id_fn: Arc<PyObject>,
|
||||
pub origin_ty_fn: Arc<PyObject>,
|
||||
pub args_ty_fn: Arc<PyObject>,
|
||||
pub store_obj: Arc<PyObject>,
|
||||
pub store_str: Arc<PyObject>,
|
||||
pub type_fn: PyObject,
|
||||
pub len_fn: PyObject,
|
||||
pub id_fn: PyObject,
|
||||
pub origin_ty_fn: PyObject,
|
||||
pub args_ty_fn: PyObject,
|
||||
pub store_obj: PyObject,
|
||||
pub store_str: PyObject,
|
||||
}
|
||||
|
||||
struct PythonValue {
|
||||
id: u64,
|
||||
value: Arc<PyObject>,
|
||||
store_obj: Arc<PyObject>,
|
||||
value: PyObject,
|
||||
store_obj: PyObject,
|
||||
resolver: Arc<InnerResolver>,
|
||||
}
|
||||
|
||||
@ -139,7 +127,7 @@ impl StaticValue for PythonValue {
|
||||
ctx.module.get_global(format!("{}_const", self.id).as_str()).map_or_else(
|
||||
|| {
|
||||
Python::with_gil(|py| -> PyResult<BasicValueEnum<'ctx>> {
|
||||
let id: u32 = self.store_obj.bind(py).call1((&*self.value,))?.extract()?;
|
||||
let id: u32 = self.store_obj.call1(py, (self.value.clone(),))?.extract(py)?;
|
||||
let struct_type = ctx.ctx.struct_type(&[ctx.ctx.i32_type().into()], false);
|
||||
let global = ctx.module.add_global(
|
||||
struct_type,
|
||||
@ -188,7 +176,7 @@ impl StaticValue for PythonValue {
|
||||
|
||||
Python::with_gil(|py| -> PyResult<BasicValueEnum<'ctx>> {
|
||||
self.resolver
|
||||
.get_obj_value(py, (*self.value).bind(py), ctx, generator, expected_ty)
|
||||
.get_obj_value(py, self.value.as_ref(py), ctx, generator, expected_ty)
|
||||
.map(Option::unwrap)
|
||||
})
|
||||
.map_err(|e| e.to_string())
|
||||
@ -204,94 +192,63 @@ impl StaticValue for PythonValue {
|
||||
field_to_val.get(&(self.id, name)).cloned()
|
||||
}
|
||||
.unwrap_or_else(|| {
|
||||
Python::with_gil(|py| -> PyResult<Option<PyValueHandle>> {
|
||||
Python::with_gil(|py| -> PyResult<Option<(u64, PyObject)>> {
|
||||
let helper = &self.resolver.helper;
|
||||
let id = helper.id_fn.bind(py).call1((&*self.value,))?.extract::<u64>()?;
|
||||
let ty = helper.type_fn.bind(py).call1((&*self.value,))?;
|
||||
let ty_id: u64 = helper.id_fn.bind(py).call1((ty,))?.extract()?;
|
||||
|
||||
let ty = helper.type_fn.call1(py, (&self.value,))?;
|
||||
let ty_id: u64 = helper.id_fn.call1(py, (ty,))?.extract(py)?;
|
||||
// for optimizing unwrap KernelInvariant
|
||||
if ty_id == self.resolver.primitive_ids.option && name == "_nac3_option".into() {
|
||||
let obj = self.value.bind(py).getattr(name.to_string().as_str())?;
|
||||
let id = self.resolver.helper.id_fn.bind(py).call1((&obj,))?.extract()?;
|
||||
let obj = Arc::new(obj.into_py_any(py)?);
|
||||
let obj = self.value.getattr(py, name.to_string().as_str())?;
|
||||
let id = self.resolver.helper.id_fn.call1(py, (&obj,))?.extract(py)?;
|
||||
return if self.id == self.resolver.primitive_ids.none {
|
||||
Ok(None)
|
||||
} else {
|
||||
Ok(Some((id, obj)))
|
||||
};
|
||||
}
|
||||
|
||||
let result = if let Some(def_id) =
|
||||
self.resolver.pyid_to_def.read().get(&ty_id).copied()
|
||||
{
|
||||
let mut mutable = true;
|
||||
let defs = ctx.top_level.definitions.read();
|
||||
if let TopLevelDef::Class { fields, .. } = &*defs[def_id.0].read() {
|
||||
for (field_name, _, is_mutable) in fields {
|
||||
if field_name == &name {
|
||||
mutable = *is_mutable;
|
||||
break;
|
||||
}
|
||||
let def_id = { *self.resolver.pyid_to_def.read().get(&ty_id).unwrap() };
|
||||
let mut mutable = true;
|
||||
let defs = ctx.top_level.definitions.read();
|
||||
if let TopLevelDef::Class { fields, .. } = &*defs[def_id.0].read() {
|
||||
for (field_name, _, is_mutable) in fields {
|
||||
if field_name == &name {
|
||||
mutable = *is_mutable;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if mutable {
|
||||
None
|
||||
} else {
|
||||
let obj = self.value.bind(py).getattr(name.to_string().as_str())?;
|
||||
let id = self.resolver.helper.id_fn.bind(py).call1((&obj,))?.extract()?;
|
||||
let obj = Arc::new(obj.into_py_any(py)?);
|
||||
Some((id, obj))
|
||||
}
|
||||
} else if let Some(def_id) = self.resolver.pyid_to_def.read().get(&id).copied() {
|
||||
// Check if self.value is a module
|
||||
let in_mod_ctx = ctx
|
||||
.top_level
|
||||
.definitions
|
||||
.read()
|
||||
.get(def_id.0)
|
||||
.is_some_and(|def| matches!(&*def.read(), TopLevelDef::Module { .. }));
|
||||
|
||||
if in_mod_ctx {
|
||||
let obj = self.value.bind(py).getattr(name.to_string().as_str())?;
|
||||
let id = self.resolver.helper.id_fn.bind(py).call1((&obj,))?.extract()?;
|
||||
let obj = Arc::new(obj.into_py_any(py)?);
|
||||
Some((id, obj))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
}
|
||||
let result = if mutable {
|
||||
None
|
||||
} else {
|
||||
let obj = self.value.getattr(py, name.to_string().as_str())?;
|
||||
let id = self.resolver.helper.id_fn.call1(py, (&obj,))?.extract(py)?;
|
||||
Some((id, obj))
|
||||
};
|
||||
|
||||
self.resolver.field_to_val.write().insert((self.id, name), result.clone());
|
||||
Ok(result)
|
||||
})
|
||||
.unwrap()
|
||||
})
|
||||
.map(|(id, obj)| {
|
||||
Python::with_gil(|_| {
|
||||
ValueEnum::Static(Arc::new(PythonValue {
|
||||
id,
|
||||
value: obj,
|
||||
store_obj: self.store_obj.clone(),
|
||||
resolver: self.resolver.clone(),
|
||||
}))
|
||||
})
|
||||
ValueEnum::Static(Arc::new(PythonValue {
|
||||
id,
|
||||
value: obj,
|
||||
store_obj: self.store_obj.clone(),
|
||||
resolver: self.resolver.clone(),
|
||||
}))
|
||||
})
|
||||
}
|
||||
|
||||
fn get_tuple_element<'ctx>(&self, index: u32) -> Option<ValueEnum<'ctx>> {
|
||||
Python::with_gil(|py| -> PyResult<Option<PyValueHandle>> {
|
||||
Python::with_gil(|py| -> PyResult<Option<(u64, PyObject)>> {
|
||||
let helper = &self.resolver.helper;
|
||||
let ty = helper.type_fn.bind(py).call1((&*self.value,))?;
|
||||
let ty_id: u64 = helper.id_fn.bind(py).call1((ty,))?.extract()?;
|
||||
let ty = helper.type_fn.call1(py, (&self.value,))?;
|
||||
let ty_id: u64 = helper.id_fn.call1(py, (ty,))?.extract(py)?;
|
||||
assert_eq!(ty_id, self.resolver.primitive_ids.tuple);
|
||||
let tup = self.value.bind(py).downcast::<PyTuple>()?;
|
||||
let elem = Arc::new(tup.get_item(index as usize)?.into_py_any(py)?);
|
||||
let id = self.resolver.helper.id_fn.bind(py).call1((&*elem,))?.extract()?;
|
||||
Ok(Some((id, elem)))
|
||||
let tup: &PyTuple = self.value.extract(py)?;
|
||||
let elem = tup.get_item(index as usize)?;
|
||||
let id = self.resolver.helper.id_fn.call1(py, (elem,))?.extract(py)?;
|
||||
Ok(Some((id, elem.into())))
|
||||
})
|
||||
.unwrap()
|
||||
.map(|(id, obj)| {
|
||||
@ -306,23 +263,23 @@ impl StaticValue for PythonValue {
|
||||
}
|
||||
|
||||
impl InnerResolver {
|
||||
fn get_list_elem_type<'py>(
|
||||
fn get_list_elem_type(
|
||||
&self,
|
||||
py: Python<'py>,
|
||||
list: &Bound<'py, PyAny>,
|
||||
py: Python,
|
||||
list: &PyAny,
|
||||
len: usize,
|
||||
unifier: &mut Unifier,
|
||||
defs: &[Arc<RwLock<TopLevelDef>>],
|
||||
primitives: &PrimitiveStore,
|
||||
) -> PyResult<Result<Type, String>> {
|
||||
let mut ty = match self.get_obj_type(py, &list.get_item(0)?, unifier, defs, primitives)? {
|
||||
let mut ty = match self.get_obj_type(py, list.get_item(0)?, unifier, defs, primitives)? {
|
||||
Ok(t) => t,
|
||||
Err(e) => return Ok(Err(format!("type error ({e}) at element #0 of the list"))),
|
||||
};
|
||||
for i in 1..len {
|
||||
let b = match list
|
||||
.get_item(i)
|
||||
.map(|elem| self.get_obj_type(py, &elem, unifier, defs, primitives))??
|
||||
.map(|elem| self.get_obj_type(py, elem, unifier, defs, primitives))??
|
||||
{
|
||||
Ok(t) => t,
|
||||
Err(e) => return Ok(Err(format!("type error ({e}) at element #{i} of the list"))),
|
||||
@ -333,7 +290,7 @@ impl InnerResolver {
|
||||
return Ok(Err(format!(
|
||||
"inhomogeneous type ({}) at element #{i} of the list",
|
||||
e.to_display(unifier)
|
||||
)));
|
||||
)))
|
||||
}
|
||||
};
|
||||
}
|
||||
@ -346,21 +303,17 @@ impl InnerResolver {
|
||||
/// `TypeVars` and `GenericAlias`(`A[int, bool]`) should use `ty_ty_id` to check.
|
||||
///
|
||||
/// The `bool` value returned indicates whether they are instantiated or not
|
||||
fn get_pyty_obj_type<'py>(
|
||||
fn get_pyty_obj_type(
|
||||
&self,
|
||||
py: Python<'py>,
|
||||
pyty: &Bound<'py, PyAny>,
|
||||
py: Python,
|
||||
pyty: &PyAny,
|
||||
unifier: &mut Unifier,
|
||||
defs: &[Arc<RwLock<TopLevelDef>>],
|
||||
primitives: &PrimitiveStore,
|
||||
) -> PyResult<Result<(Type, bool), String>> {
|
||||
let ty_id: u64 = self.helper.id_fn.bind(py).call1((pyty,))?.extract()?;
|
||||
let ty_ty_id: u64 = self
|
||||
.helper
|
||||
.id_fn
|
||||
.bind(py)
|
||||
.call1((self.helper.type_fn.bind(py).call1((pyty,))?,))?
|
||||
.extract()?;
|
||||
let ty_id: u64 = self.helper.id_fn.call1(py, (pyty,))?.extract(py)?;
|
||||
let ty_ty_id: u64 =
|
||||
self.helper.id_fn.call1(py, (self.helper.type_fn.call1(py, (pyty,))?,))?.extract(py)?;
|
||||
|
||||
if ty_id == self.primitive_ids.int || ty_id == self.primitive_ids.int32 {
|
||||
Ok(Ok((primitives.int32, true)))
|
||||
@ -441,8 +394,7 @@ impl InnerResolver {
|
||||
(unifier.add_ty(ty), false)
|
||||
}))
|
||||
} else if ty_ty_id == self.primitive_ids.typevar {
|
||||
let name = pyty.getattr("__name__").unwrap();
|
||||
let name = name.extract::<&str>().unwrap();
|
||||
let name: &str = pyty.getattr("__name__").unwrap().extract().unwrap();
|
||||
let (constraint_types, is_const_generic) = {
|
||||
let constraints = pyty.getattr("__constraints__").unwrap();
|
||||
let mut result: Vec<Type> = vec![];
|
||||
@ -450,9 +402,8 @@ impl InnerResolver {
|
||||
|
||||
let mut is_const_generic = false;
|
||||
for i in 0usize.. {
|
||||
if let Ok(constr) = &constraints.get_item(i) {
|
||||
let constr_id: u64 =
|
||||
self.helper.id_fn.bind(py).call1((constr,))?.extract()?;
|
||||
if let Ok(constr) = constraints.get_item(i) {
|
||||
let constr_id: u64 = self.helper.id_fn.call1(py, (constr,))?.extract(py)?;
|
||||
if constr_id == self.primitive_ids.const_generic_marker {
|
||||
is_const_generic = true;
|
||||
continue;
|
||||
@ -511,23 +462,24 @@ impl InnerResolver {
|
||||
} else if ty_ty_id == self.primitive_ids.generic_alias.0
|
||||
|| ty_ty_id == self.primitive_ids.generic_alias.1
|
||||
{
|
||||
let origin = self.helper.origin_ty_fn.bind(py).call1((pyty,))?;
|
||||
let args = self.helper.args_ty_fn.bind(py).call1((pyty,))?;
|
||||
let args = args.downcast::<PyTuple>()?;
|
||||
let origin_ty = match self.get_pyty_obj_type(py, &origin, unifier, defs, primitives)? {
|
||||
Ok((ty, false)) => ty,
|
||||
Ok((_, true)) => {
|
||||
return Ok(Err("instantiated type does not take type parameters".into()));
|
||||
}
|
||||
Err(err) => return Ok(Err(err)),
|
||||
};
|
||||
let origin = self.helper.origin_ty_fn.call1(py, (pyty,))?;
|
||||
let args = self.helper.args_ty_fn.call1(py, (pyty,))?;
|
||||
let args: &PyTuple = args.downcast(py)?;
|
||||
let origin_ty =
|
||||
match self.get_pyty_obj_type(py, origin.as_ref(py), unifier, defs, primitives)? {
|
||||
Ok((ty, false)) => ty,
|
||||
Ok((_, true)) => {
|
||||
return Ok(Err("instantiated type does not take type parameters".into()))
|
||||
}
|
||||
Err(err) => return Ok(Err(err)),
|
||||
};
|
||||
|
||||
match &*unifier.get_ty(origin_ty) {
|
||||
TypeEnum::TObj { obj_id, .. } if *obj_id == PrimDef::List.id() => {
|
||||
if args.len() == 1 {
|
||||
let ty = match self.get_pyty_obj_type(
|
||||
py,
|
||||
&args.get_item(0)?,
|
||||
args.get_item(0)?,
|
||||
unifier,
|
||||
defs,
|
||||
primitives,
|
||||
@ -572,10 +524,10 @@ impl InnerResolver {
|
||||
|
||||
// npt.NDArray[T] == np.ndarray[Any, np.dtype[T]]
|
||||
let ndarray_dtype_pyty =
|
||||
self.helper.args_ty_fn.bind(py).call1((args.get_item(1)?,))?;
|
||||
let dtype = ndarray_dtype_pyty.downcast::<PyTuple>()?.get_item(0)?;
|
||||
self.helper.args_ty_fn.call1(py, (args.get_item(1)?,))?;
|
||||
let dtype = ndarray_dtype_pyty.downcast::<PyTuple>(py)?.get_item(0)?;
|
||||
|
||||
let ty = match self.get_pyty_obj_type(py, &dtype, unifier, defs, primitives)? {
|
||||
let ty = match self.get_pyty_obj_type(py, dtype, unifier, defs, primitives)? {
|
||||
Ok(ty) => ty,
|
||||
Err(err) => return Ok(Err(err)),
|
||||
};
|
||||
@ -591,7 +543,7 @@ impl InnerResolver {
|
||||
TypeEnum::TTuple { .. } => {
|
||||
let args = match args
|
||||
.iter()
|
||||
.map(|x| self.get_pyty_obj_type(py, &x, unifier, defs, primitives))
|
||||
.map(|x| self.get_pyty_obj_type(py, x, unifier, defs, primitives))
|
||||
.collect::<Result<Vec<_>, _>>()?
|
||||
.into_iter()
|
||||
.collect::<Result<Vec<_>, _>>() {
|
||||
@ -624,7 +576,7 @@ impl InnerResolver {
|
||||
}
|
||||
let args = match args
|
||||
.iter()
|
||||
.map(|x| self.get_pyty_obj_type(py, &x, unifier, defs, primitives))
|
||||
.map(|x| self.get_pyty_obj_type(py, x, unifier, defs, primitives))
|
||||
.collect::<Result<Vec<_>, _>>()?
|
||||
.into_iter()
|
||||
.collect::<Result<Vec<_>, _>>() {
|
||||
@ -651,7 +603,7 @@ impl InnerResolver {
|
||||
if args.len() == 1 {
|
||||
let ty = match self.get_pyty_obj_type(
|
||||
py,
|
||||
&args.get_item(0)?,
|
||||
args.get_item(0)?,
|
||||
unifier,
|
||||
defs,
|
||||
primitives,
|
||||
@ -682,22 +634,23 @@ impl InnerResolver {
|
||||
false,
|
||||
)))
|
||||
} else {
|
||||
let str_fn = PyModule::import(py, "builtins").unwrap().getattr("repr").unwrap();
|
||||
let str_fn =
|
||||
pyo3::types::PyModule::import(py, "builtins").unwrap().getattr("repr").unwrap();
|
||||
let str_repr: String = str_fn.call1((pyty,)).unwrap().extract().unwrap();
|
||||
Ok(Err(format!("{str_repr} is not registered with NAC3 (@nac3 decorator missing?)")))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_obj_type<'py>(
|
||||
pub fn get_obj_type(
|
||||
&self,
|
||||
py: Python<'py>,
|
||||
obj: &Bound<'py, PyAny>,
|
||||
py: Python,
|
||||
obj: &PyAny,
|
||||
unifier: &mut Unifier,
|
||||
defs: &[Arc<RwLock<TopLevelDef>>],
|
||||
primitives: &PrimitiveStore,
|
||||
) -> PyResult<Result<Type, String>> {
|
||||
let ty = self.helper.type_fn.bind(py).call1((obj,)).unwrap();
|
||||
let py_obj_id: u64 = self.helper.id_fn.bind(py).call1((obj,))?.extract()?;
|
||||
let ty = self.helper.type_fn.call1(py, (obj,)).unwrap();
|
||||
let py_obj_id: u64 = self.helper.id_fn.call1(py, (obj,))?.extract(py)?;
|
||||
if let Some(ty) = self.pyid_to_type.read().get(&py_obj_id) {
|
||||
return Ok(Ok(*ty));
|
||||
}
|
||||
@ -722,7 +675,8 @@ impl InnerResolver {
|
||||
});
|
||||
|
||||
// check if obj is module
|
||||
if self.helper.id_fn.bind(py).call1((&ty,))?.extract::<u64>()? == self.primitive_ids.module
|
||||
if self.helper.id_fn.call1(py, (ty.clone(),))?.extract::<u64>(py)?
|
||||
== self.primitive_ids.module
|
||||
&& self.pyid_to_def.read().contains_key(&py_obj_id)
|
||||
{
|
||||
let def_id = self.pyid_to_def.read()[&py_obj_id];
|
||||
@ -737,7 +691,7 @@ impl InnerResolver {
|
||||
for (name, _) in attributes {
|
||||
let attribute_obj = obj.getattr(name.to_string().as_str())?;
|
||||
let attribute_ty =
|
||||
self.get_obj_type(py, &attribute_obj, unifier, defs, primitives)?;
|
||||
self.get_obj_type(py, attribute_obj, unifier, defs, primitives)?;
|
||||
if let Ok(attribute_ty) = attribute_ty {
|
||||
module_attributes.insert(*name, (attribute_ty, false));
|
||||
} else {
|
||||
@ -747,7 +701,7 @@ impl InnerResolver {
|
||||
|
||||
for name in methods.keys() {
|
||||
let method_obj = obj.getattr(name.to_string().as_str())?;
|
||||
let method_ty = self.get_obj_type(py, &method_obj, unifier, defs, primitives)?;
|
||||
let method_ty = self.get_obj_type(py, method_obj, unifier, defs, primitives)?;
|
||||
if let Ok(method_ty) = method_ty {
|
||||
module_attributes.insert(*name, (method_ty, true));
|
||||
} else {
|
||||
@ -775,11 +729,11 @@ impl InnerResolver {
|
||||
self.primitive_ids.generic_alias.0,
|
||||
self.primitive_ids.generic_alias.1,
|
||||
]
|
||||
.contains(&self.helper.id_fn.bind(py).call1((&ty,))?.extract::<u64>()?)
|
||||
.contains(&self.helper.id_fn.call1(py, (ty.clone(),))?.extract::<u64>(py)?)
|
||||
{
|
||||
obj
|
||||
} else {
|
||||
&ty
|
||||
ty.as_ref(py)
|
||||
}
|
||||
},
|
||||
unifier,
|
||||
@ -815,7 +769,7 @@ impl InnerResolver {
|
||||
// do the instantiation for these four types
|
||||
(TypeEnum::TObj { obj_id, params, .. }, false) if *obj_id == PrimDef::List.id() => {
|
||||
let ty = iter_type_vars(params).nth(0).unwrap().ty;
|
||||
let len: usize = self.helper.len_fn.bind(py).call1((obj,))?.extract()?;
|
||||
let len: usize = self.helper.len_fn.call1(py, (obj,))?.extract(py)?;
|
||||
if len == 0 {
|
||||
assert!(matches!(
|
||||
&*unifier.get_ty(ty),
|
||||
@ -867,7 +821,7 @@ impl InnerResolver {
|
||||
Ok(Ok(extracted_ty))
|
||||
} else {
|
||||
let dtype = obj.getattr("dtype")?.getattr("type")?;
|
||||
let dtype_ty = self.get_pyty_obj_type(py, &dtype, unifier, defs, primitives)?;
|
||||
let dtype_ty = self.get_pyty_obj_type(py, dtype, unifier, defs, primitives)?;
|
||||
match dtype_ty {
|
||||
Ok((t, _)) => match unifier.unify(ty, t) {
|
||||
Ok(()) => {
|
||||
@ -886,10 +840,10 @@ impl InnerResolver {
|
||||
}
|
||||
}
|
||||
(TypeEnum::TTuple { .. }, false) => {
|
||||
let elements = obj.downcast::<PyTuple>()?;
|
||||
let elements: &PyTuple = obj.downcast()?;
|
||||
let types: Result<Result<Vec<_>, _>, _> = elements
|
||||
.iter()
|
||||
.map(|elem| self.get_obj_type(py, &elem, unifier, defs, primitives))
|
||||
.map(|elem| self.get_obj_type(py, elem, unifier, defs, primitives))
|
||||
.collect();
|
||||
let types = types?;
|
||||
Ok(types.map(|types| {
|
||||
@ -901,11 +855,11 @@ impl InnerResolver {
|
||||
(TypeEnum::TObj { obj_id, params, .. }, false)
|
||||
if *obj_id == primitives.option.obj_id(unifier).unwrap() =>
|
||||
{
|
||||
let Ok(field_data) = &obj.getattr("_nac3_option") else {
|
||||
let Ok(field_data) = obj.getattr("_nac3_option") else {
|
||||
unreachable!("cannot be None")
|
||||
};
|
||||
// if is `none`
|
||||
let zelf_id: u64 = self.helper.id_fn.bind(py).call1((obj,))?.extract()?;
|
||||
let zelf_id: u64 = self.helper.id_fn.call1(py, (obj,))?.extract(py)?;
|
||||
if zelf_id == self.primitive_ids.none {
|
||||
let ty_enum = unifier.get_ty_immutable(primitives.option);
|
||||
let TypeEnum::TObj { params, .. } = ty_enum.as_ref() else {
|
||||
@ -930,7 +884,7 @@ impl InnerResolver {
|
||||
Err(e) => {
|
||||
return Ok(Err(format!(
|
||||
"error when getting type of the option object ({e})"
|
||||
)));
|
||||
)))
|
||||
}
|
||||
};
|
||||
let new_var_map: VarMap = params.iter().map(|(id, _)| (*id, ty)).collect();
|
||||
@ -953,10 +907,10 @@ impl InnerResolver {
|
||||
// loop through non-function fields of the class to get the instantiated value
|
||||
for field in fields {
|
||||
let name: String = (*field.0).into();
|
||||
if let TypeEnum::TFunc(..) = &*unifier.get_ty(field.1.0) {
|
||||
if let TypeEnum::TFunc(..) = &*unifier.get_ty(field.1 .0) {
|
||||
continue;
|
||||
}
|
||||
let field_data = &match obj.getattr(name.as_str()) {
|
||||
let field_data = match obj.getattr(name.as_str()) {
|
||||
Ok(d) => d,
|
||||
Err(e) => return Ok(Err(format!("{e}"))),
|
||||
};
|
||||
@ -966,10 +920,10 @@ impl InnerResolver {
|
||||
Err(e) => {
|
||||
return Ok(Err(format!(
|
||||
"error when getting type of field `{name}` ({e})"
|
||||
)));
|
||||
)))
|
||||
}
|
||||
};
|
||||
let field_ty = unifier.subst(field.1.0, &var_map).unwrap_or(field.1.0);
|
||||
let field_ty = unifier.subst(field.1 .0, &var_map).unwrap_or(field.1 .0);
|
||||
if let Err(e) = unifier.unify(ty, field_ty) {
|
||||
// field type mismatch
|
||||
return Ok(Err(format!(
|
||||
@ -1000,22 +954,22 @@ impl InnerResolver {
|
||||
// check integer bounds
|
||||
if unifier.unioned(extracted_ty, primitives.int32) {
|
||||
obj.extract::<i32>().map_or_else(
|
||||
|_| Ok(Err(format!("{obj:?} is not in the range of int32"))),
|
||||
|_| Ok(Err(format!("{obj} is not in the range of int32"))),
|
||||
|_| Ok(Ok(extracted_ty)),
|
||||
)
|
||||
} else if unifier.unioned(extracted_ty, primitives.int64) {
|
||||
obj.extract::<i64>().map_or_else(
|
||||
|_| Ok(Err(format!("{obj:?} is not in the range of int64"))),
|
||||
|_| Ok(Err(format!("{obj} is not in the range of int64"))),
|
||||
|_| Ok(Ok(extracted_ty)),
|
||||
)
|
||||
} else if unifier.unioned(extracted_ty, primitives.uint32) {
|
||||
obj.extract::<u32>().map_or_else(
|
||||
|_| Ok(Err(format!("{obj:?} is not in the range of uint32"))),
|
||||
|_| Ok(Err(format!("{obj} is not in the range of uint32"))),
|
||||
|_| Ok(Ok(extracted_ty)),
|
||||
)
|
||||
} else if unifier.unioned(extracted_ty, primitives.uint64) {
|
||||
obj.extract::<u64>().map_or_else(
|
||||
|_| Ok(Err(format!("{obj:?} is not in the range of uint64"))),
|
||||
|_| Ok(Err(format!("{obj} is not in the range of uint64"))),
|
||||
|_| Ok(Ok(extracted_ty)),
|
||||
)
|
||||
} else if unifier.unioned(extracted_ty, primitives.bool) {
|
||||
@ -1024,11 +978,11 @@ impl InnerResolver {
|
||||
{
|
||||
Ok(Ok(extracted_ty))
|
||||
} else {
|
||||
Ok(Err(format!("{obj:?} is not in the range of bool")))
|
||||
Ok(Err(format!("{obj} is not in the range of bool")))
|
||||
}
|
||||
} else if unifier.unioned(extracted_ty, primitives.float) {
|
||||
obj.extract::<f64>().map_or_else(
|
||||
|_| Ok(Err(format!("{obj:?} is not in the range of float64"))),
|
||||
|_| Ok(Err(format!("{obj} is not in the range of float64"))),
|
||||
|_| Ok(Ok(extracted_ty)),
|
||||
)
|
||||
} else {
|
||||
@ -1038,21 +992,17 @@ impl InnerResolver {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_obj_value<'ctx, 'py>(
|
||||
pub fn get_obj_value<'ctx>(
|
||||
&self,
|
||||
py: Python<'py>,
|
||||
obj: &Bound<'py, PyAny>,
|
||||
py: Python,
|
||||
obj: &PyAny,
|
||||
ctx: &mut CodeGenContext<'ctx, '_>,
|
||||
generator: &mut dyn CodeGenerator,
|
||||
expected_ty: Type,
|
||||
) -> PyResult<Option<BasicValueEnum<'ctx>>> {
|
||||
let ty_id: u64 = self
|
||||
.helper
|
||||
.id_fn
|
||||
.bind(py)
|
||||
.call1((self.helper.type_fn.bind(py).call1((obj,))?,))?
|
||||
.extract()?;
|
||||
let id: u64 = self.helper.id_fn.bind(py).call1((obj,))?.extract()?;
|
||||
let ty_id: u64 =
|
||||
self.helper.id_fn.call1(py, (self.helper.type_fn.call1(py, (obj,))?,))?.extract(py)?;
|
||||
let id: u64 = self.helper.id_fn.call1(py, (obj,))?.extract(py)?;
|
||||
if ty_id == self.primitive_ids.int || ty_id == self.primitive_ids.int32 {
|
||||
let val: i32 = obj.extract().unwrap();
|
||||
self.id_to_primitive.write().insert(id, PrimitiveValue::I32(val));
|
||||
@ -1092,7 +1042,7 @@ impl InnerResolver {
|
||||
return Ok(Some(global.as_pointer_value().into()));
|
||||
}
|
||||
|
||||
let len: usize = self.helper.len_fn.bind(py).call1((obj,))?.extract()?;
|
||||
let len: usize = self.helper.len_fn.call1(py, (obj,))?.extract(py)?;
|
||||
let elem_ty = match ctx.unifier.get_ty_immutable(expected_ty).as_ref() {
|
||||
TypeEnum::TObj { obj_id, params, .. } if *obj_id == PrimDef::List.id() => {
|
||||
iter_type_vars(params).nth(0).unwrap().ty
|
||||
@ -1119,13 +1069,13 @@ impl InnerResolver {
|
||||
});
|
||||
return Ok(Some(global.as_pointer_value().into()));
|
||||
}
|
||||
self.global_value_ids.write().insert(id, obj.as_unbound().into_py_any(py)?);
|
||||
self.global_value_ids.write().insert(id, obj.into());
|
||||
}
|
||||
|
||||
let arr: Result<Option<Vec<_>>, _> = (0..len)
|
||||
.map(|i| {
|
||||
obj.get_item(i).and_then(|elem| {
|
||||
self.get_obj_value(py, &elem, ctx, generator, elem_ty).map_err(|e| {
|
||||
self.get_obj_value(py, elem, ctx, generator, elem_ty).map_err(|e| {
|
||||
super::CompileError::new_err(format!("Error getting element {i}: {e}"))
|
||||
})
|
||||
})
|
||||
@ -1203,14 +1153,13 @@ impl InnerResolver {
|
||||
});
|
||||
return Ok(Some(global.as_pointer_value().into()));
|
||||
}
|
||||
self.global_value_ids.write().insert(id, obj.as_unbound().into_py_any(py)?);
|
||||
self.global_value_ids.write().insert(id, obj.into());
|
||||
}
|
||||
|
||||
let ndims = llvm_ndarray.ndims();
|
||||
|
||||
// Obtain the shape of the ndarray
|
||||
let shape_tuple = obj.getattr("shape")?;
|
||||
let shape_tuple = shape_tuple.downcast::<PyTuple>()?;
|
||||
let shape_tuple: &PyTuple = obj.getattr("shape")?.downcast()?;
|
||||
assert_eq!(shape_tuple.len(), ndims as usize);
|
||||
|
||||
// The Rust type inferencer cannot figure this out
|
||||
@ -1219,7 +1168,7 @@ impl InnerResolver {
|
||||
.enumerate()
|
||||
.map(|(i, elem)| {
|
||||
let value = self
|
||||
.get_obj_value(py, &elem, ctx, generator, ctx.primitives.usize())
|
||||
.get_obj_value(py, elem, ctx, generator, ctx.primitives.usize())
|
||||
.map_err(|e| {
|
||||
super::CompileError::new_err(format!("Error getting element {i}: {e}"))
|
||||
})?
|
||||
@ -1256,7 +1205,7 @@ impl InnerResolver {
|
||||
.map(|i| {
|
||||
obj.getattr("flat")?.get_item(i).and_then(|elem| {
|
||||
let value = self
|
||||
.get_obj_value(py, &elem, ctx, generator, ndarray_dtype)
|
||||
.get_obj_value(py, elem, ctx, generator, ndarray_dtype)
|
||||
.map_err(|e| {
|
||||
super::CompileError::new_err(format!(
|
||||
"Error getting element {i}: {e}"
|
||||
@ -1389,14 +1338,14 @@ impl InnerResolver {
|
||||
};
|
||||
|
||||
let tup_tys = ty.iter();
|
||||
let elements = obj.downcast::<PyTuple>()?;
|
||||
let elements: &PyTuple = obj.downcast()?;
|
||||
assert_eq!(elements.len(), tup_tys.len());
|
||||
let val: Result<Option<Vec<_>>, _> = elements
|
||||
.iter()
|
||||
.enumerate()
|
||||
.zip(tup_tys)
|
||||
.map(|((i, elem), ty)| {
|
||||
self.get_obj_value(py, &elem, ctx, generator, *ty).map_err(|e| {
|
||||
self.get_obj_value(py, elem, ctx, generator, *ty).map_err(|e| {
|
||||
super::CompileError::new_err(format!("Error getting element {i}: {e}"))
|
||||
})
|
||||
})
|
||||
@ -1425,7 +1374,7 @@ impl InnerResolver {
|
||||
match self
|
||||
.get_obj_value(
|
||||
py,
|
||||
&obj.getattr("_nac3_option").unwrap(),
|
||||
obj.getattr("_nac3_option").unwrap(),
|
||||
ctx,
|
||||
generator,
|
||||
option_val_ty,
|
||||
@ -1449,9 +1398,7 @@ impl InnerResolver {
|
||||
});
|
||||
return Ok(Some(global.as_pointer_value().into()));
|
||||
}
|
||||
self.global_value_ids
|
||||
.write()
|
||||
.insert(id, obj.as_unbound().into_py_any(py)?);
|
||||
self.global_value_ids.write().insert(id, obj.into());
|
||||
}
|
||||
let global = ctx.module.add_global(
|
||||
v.get_type(),
|
||||
@ -1488,7 +1435,7 @@ impl InnerResolver {
|
||||
});
|
||||
return Ok(Some(global.as_pointer_value().into()));
|
||||
}
|
||||
self.global_value_ids.write().insert(id, obj.as_unbound().into_py_any(py)?);
|
||||
self.global_value_ids.write().insert(id, obj.into());
|
||||
}
|
||||
|
||||
let fields = {
|
||||
@ -1498,7 +1445,7 @@ impl InnerResolver {
|
||||
attributes
|
||||
.iter()
|
||||
.filter_map(|f| {
|
||||
let definition = top_level_defs.get(f.1.0).unwrap().read();
|
||||
let definition = top_level_defs.get(f.1 .0).unwrap().read();
|
||||
if let TopLevelDef::Variable { ty, .. } = &*definition {
|
||||
Some((f.0, *ty))
|
||||
} else {
|
||||
@ -1513,7 +1460,7 @@ impl InnerResolver {
|
||||
.map(|(name, ty)| {
|
||||
self.get_obj_value(
|
||||
py,
|
||||
&obj.getattr(name.to_string().as_str())?,
|
||||
obj.getattr(name.to_string().as_str())?,
|
||||
ctx,
|
||||
generator,
|
||||
*ty,
|
||||
@ -1558,7 +1505,7 @@ impl InnerResolver {
|
||||
});
|
||||
return Ok(Some(global.as_pointer_value().into()));
|
||||
}
|
||||
self.global_value_ids.write().insert(id, obj.as_unbound().into_py_any(py)?);
|
||||
self.global_value_ids.write().insert(id, obj.into());
|
||||
}
|
||||
// should be classes
|
||||
let definition =
|
||||
@ -1570,7 +1517,7 @@ impl InnerResolver {
|
||||
.map(|(name, ty, _)| {
|
||||
self.get_obj_value(
|
||||
py,
|
||||
&obj.getattr(name.to_string().as_str())?,
|
||||
obj.getattr(name.to_string().as_str())?,
|
||||
ctx,
|
||||
generator,
|
||||
*ty,
|
||||
@ -1594,18 +1541,14 @@ impl InnerResolver {
|
||||
}
|
||||
}
|
||||
|
||||
fn get_default_param_obj_value<'py>(
|
||||
fn get_default_param_obj_value(
|
||||
&self,
|
||||
py: Python<'py>,
|
||||
obj: &Bound<'py, PyAny>,
|
||||
py: Python,
|
||||
obj: &PyAny,
|
||||
) -> PyResult<Result<SymbolValue, String>> {
|
||||
let id: u64 = self.helper.id_fn.bind(py).call1((obj,))?.extract()?;
|
||||
let ty_id: u64 = self
|
||||
.helper
|
||||
.id_fn
|
||||
.bind(py)
|
||||
.call1((self.helper.type_fn.bind(py).call1((obj,))?,))?
|
||||
.extract()?;
|
||||
let id: u64 = self.helper.id_fn.call1(py, (obj,))?.extract(py)?;
|
||||
let ty_id: u64 =
|
||||
self.helper.id_fn.call1(py, (self.helper.type_fn.call1(py, (obj,))?,))?.extract(py)?;
|
||||
Ok(if ty_id == self.primitive_ids.int || ty_id == self.primitive_ids.int32 {
|
||||
let val: i32 = obj.extract()?;
|
||||
Ok(SymbolValue::I32(val))
|
||||
@ -1631,15 +1574,15 @@ impl InnerResolver {
|
||||
let val: f64 = obj.extract()?;
|
||||
Ok(SymbolValue::Double(val))
|
||||
} else if ty_id == self.primitive_ids.tuple {
|
||||
let elements = obj.downcast::<PyTuple>()?;
|
||||
let elements: &PyTuple = obj.downcast()?;
|
||||
let elements: Result<Result<Vec<_>, String>, _> =
|
||||
elements.iter().map(|elem| self.get_default_param_obj_value(py, &elem)).collect();
|
||||
elements.iter().map(|elem| self.get_default_param_obj_value(py, elem)).collect();
|
||||
elements?.map(SymbolValue::Tuple)
|
||||
} else if ty_id == self.primitive_ids.option {
|
||||
if id == self.primitive_ids.none {
|
||||
Ok(SymbolValue::OptionNone)
|
||||
} else {
|
||||
self.get_default_param_obj_value(py, &obj.getattr("_nac3_option").unwrap())?
|
||||
self.get_default_param_obj_value(py, obj.getattr("_nac3_option").unwrap())?
|
||||
.map(|v| SymbolValue::OptionSome(Box::new(v)))
|
||||
}
|
||||
} else {
|
||||
@ -1655,14 +1598,13 @@ impl SymbolResolver for Resolver {
|
||||
};
|
||||
|
||||
Python::with_gil(|py| -> PyResult<Option<SymbolValue>> {
|
||||
let obj = self.0.module.bind(py);
|
||||
let members = obj.getattr("__dict__").unwrap();
|
||||
let members = members.downcast::<PyDict>().unwrap();
|
||||
let obj: &PyAny = self.0.module.extract(py)?;
|
||||
let members: &PyDict = obj.getattr("__dict__").unwrap().downcast().unwrap();
|
||||
let mut sym_value = None;
|
||||
for (key, val) in members {
|
||||
let key: &str = key.extract()?;
|
||||
if key == id.to_string() {
|
||||
if let Ok(Ok(v)) = self.0.get_default_param_obj_value(py, &val) {
|
||||
if let Ok(Ok(v)) = self.0.get_default_param_obj_value(py, val) {
|
||||
sym_value = Some(v);
|
||||
}
|
||||
break;
|
||||
@ -1696,14 +1638,13 @@ impl SymbolResolver for Resolver {
|
||||
Ok(t)
|
||||
} else {
|
||||
Python::with_gil(|py| -> PyResult<Result<Type, String>> {
|
||||
let obj = self.0.module.bind(py);
|
||||
let obj: &PyAny = self.0.module.extract(py)?;
|
||||
let mut sym_ty = Err(format!("cannot find symbol `{str}`"));
|
||||
let members = obj.getattr("__dict__").unwrap();
|
||||
let members = members.downcast::<PyDict>().unwrap();
|
||||
let members: &PyDict = obj.getattr("__dict__").unwrap().downcast().unwrap();
|
||||
for (key, val) in members {
|
||||
let key: &str = key.extract()?;
|
||||
if key == str.to_string() {
|
||||
sym_ty = self.0.get_obj_type(py, &val, unifier, defs, primitives)?;
|
||||
sym_ty = self.0.get_obj_type(py, val, unifier, defs, primitives)?;
|
||||
break;
|
||||
}
|
||||
}
|
||||
@ -1728,60 +1669,42 @@ impl SymbolResolver for Resolver {
|
||||
) -> Option<ValueEnum<'ctx>> {
|
||||
if let Some(def_id) = self.0.id_to_def.read().get(&id) {
|
||||
let top_levels = ctx.top_level.definitions.read();
|
||||
if let TopLevelDef::Variable { resolver, .. } = &*top_levels[def_id.0].read() {
|
||||
if matches!(&*top_levels[def_id.0].read(), TopLevelDef::Variable { .. }) {
|
||||
let module_val = &self.0.module;
|
||||
let Ok((obj, idx)) = Python::with_gil(
|
||||
|py| -> PyResult<Result<(BasicValueEnum<'ctx>, Option<usize>), String>> {
|
||||
let module_val = (**module_val).bind(py);
|
||||
let ret = Python::with_gil(|py| -> PyResult<Result<BasicValueEnum, String>> {
|
||||
let module_val = module_val.as_ref(py);
|
||||
|
||||
let ty = self.0.get_obj_type(
|
||||
py,
|
||||
module_val,
|
||||
&mut ctx.unifier,
|
||||
&top_levels,
|
||||
&ctx.primitives,
|
||||
)?;
|
||||
if let Err(ty) = ty {
|
||||
return Ok(Err(ty));
|
||||
}
|
||||
let ty = ty.unwrap();
|
||||
let obj =
|
||||
self.0.get_obj_value(py, module_val, ctx, generator, ty)?.unwrap();
|
||||
let (idx, _) = ctx.get_attr_index(ty, id);
|
||||
|
||||
Ok(Ok((obj, idx)))
|
||||
},
|
||||
)
|
||||
.unwrap() else {
|
||||
return None;
|
||||
};
|
||||
|
||||
let Some(idx) = idx else {
|
||||
// `idx` not found in the current resolver - try the resolver of the variable
|
||||
return resolver.as_ref().and_then(|resolver| {
|
||||
let resolver = &**resolver;
|
||||
|
||||
// TODO: Can we assume that if get_identifier_def returns a result,
|
||||
// get_symbol_value will also return a value?
|
||||
resolver
|
||||
.get_identifier_def(id)
|
||||
.ok()
|
||||
.and_then(|_| resolver.get_symbol_value(id, ctx, generator))
|
||||
});
|
||||
};
|
||||
|
||||
let ret = unsafe {
|
||||
ctx.builder.build_gep(
|
||||
obj.into_pointer_value(),
|
||||
&[
|
||||
ctx.ctx.i32_type().const_zero(),
|
||||
ctx.ctx.i32_type().const_int(idx as u64, false),
|
||||
],
|
||||
id.to_string().as_str(),
|
||||
)
|
||||
}
|
||||
let ty = self.0.get_obj_type(
|
||||
py,
|
||||
module_val,
|
||||
&mut ctx.unifier,
|
||||
&top_levels,
|
||||
&ctx.primitives,
|
||||
)?;
|
||||
if let Err(ty) = ty {
|
||||
return Ok(Err(ty));
|
||||
}
|
||||
let ty = ty.unwrap();
|
||||
let obj = self.0.get_obj_value(py, module_val, ctx, generator, ty)?.unwrap();
|
||||
let (idx, _) = ctx.get_attr_index(ty, id);
|
||||
let ret = unsafe {
|
||||
ctx.builder.build_gep(
|
||||
obj.into_pointer_value(),
|
||||
&[
|
||||
ctx.ctx.i32_type().const_zero(),
|
||||
ctx.ctx.i32_type().const_int(idx as u64, false),
|
||||
],
|
||||
id.to_string().as_str(),
|
||||
)
|
||||
}
|
||||
.unwrap();
|
||||
Ok(Ok(ret.as_basic_value_enum()))
|
||||
})
|
||||
.unwrap();
|
||||
return Some(ret.as_basic_value_enum().into());
|
||||
if ret.is_err() {
|
||||
return None;
|
||||
}
|
||||
return Some(ret.unwrap().into());
|
||||
}
|
||||
}
|
||||
|
||||
@ -1790,16 +1713,15 @@ impl SymbolResolver for Resolver {
|
||||
id_to_val.get(&id).cloned()
|
||||
}
|
||||
.or_else(|| {
|
||||
Python::with_gil(|py| -> PyResult<Option<PyValueHandle>> {
|
||||
let obj = self.0.module.bind(py);
|
||||
let mut sym_value: Option<PyValueHandle> = None;
|
||||
let members = obj.getattr("__dict__").unwrap();
|
||||
let members = members.downcast::<PyDict>().unwrap();
|
||||
Python::with_gil(|py| -> PyResult<Option<(u64, PyObject)>> {
|
||||
let obj: &PyAny = self.0.module.extract(py)?;
|
||||
let mut sym_value: Option<(u64, PyObject)> = None;
|
||||
let members: &PyDict = obj.getattr("__dict__").unwrap().downcast().unwrap();
|
||||
for (key, val) in members {
|
||||
let key: &str = key.extract()?;
|
||||
if key == id.to_string() {
|
||||
let id = self.0.helper.id_fn.bind(py).call1((&val,))?.extract()?;
|
||||
sym_value = Some((id, Arc::new(val.as_unbound().into_py_any(py)?)));
|
||||
let id = self.0.helper.id_fn.call1(py, (val,))?.extract(py)?;
|
||||
sym_value = Some((id, val.extract()?));
|
||||
break;
|
||||
}
|
||||
}
|
||||
@ -1811,14 +1733,12 @@ impl SymbolResolver for Resolver {
|
||||
.unwrap()
|
||||
});
|
||||
sym_value.map(|(id, v)| {
|
||||
Python::with_gil(|_| {
|
||||
ValueEnum::Static(Arc::new(PythonValue {
|
||||
id,
|
||||
value: v,
|
||||
store_obj: self.0.helper.store_obj.clone(),
|
||||
resolver: self.0.clone(),
|
||||
}))
|
||||
})
|
||||
ValueEnum::Static(Arc::new(PythonValue {
|
||||
id,
|
||||
value: v,
|
||||
store_obj: self.0.helper.store_obj.clone(),
|
||||
resolver: self.0.clone(),
|
||||
}))
|
||||
})
|
||||
}
|
||||
|
||||
@ -1866,9 +1786,9 @@ impl SymbolResolver for Resolver {
|
||||
let store = self.0.deferred_eval_store.store.read();
|
||||
Python::with_gil(|py| -> PyResult<Result<(), String>> {
|
||||
for (variables, constraints, name) in store.iter() {
|
||||
let constraints = constraints.bind(py);
|
||||
let constraints: &PyAny = constraints.as_ref(py);
|
||||
for (i, var) in variables.iter().enumerate() {
|
||||
if let Ok(constr) = &constraints.get_item(i) {
|
||||
if let Ok(constr) = constraints.get_item(i) {
|
||||
match self.0.get_pyty_obj_type(py, constr, unifier, defs, primitives)? {
|
||||
Ok((ty, _)) => {
|
||||
if !unifier.is_concrete(ty, &[]) {
|
||||
|
@ -1,6 +1,6 @@
|
||||
use nac3core::{
|
||||
codegen::{CodeGenContext, expr::infer_and_call_function},
|
||||
inkwell::{AddressSpace, AtomicOrdering, values::BasicValueEnum},
|
||||
codegen::{expr::infer_and_call_function, CodeGenContext},
|
||||
inkwell::{values::BasicValueEnum, AddressSpace, AtomicOrdering},
|
||||
};
|
||||
|
||||
/// Functions for manipulating the timeline.
|
||||
|
@ -2,7 +2,7 @@
|
||||
name = "nac3ast"
|
||||
version = "0.1.0"
|
||||
authors = ["RustPython Team", "M-Labs"]
|
||||
edition = "2024"
|
||||
edition = "2021"
|
||||
|
||||
[features]
|
||||
default = ["constant-optimization", "fold"]
|
||||
@ -11,5 +11,5 @@ fold = []
|
||||
|
||||
[dependencies]
|
||||
parking_lot = "0.12"
|
||||
string-interner = "0.19"
|
||||
string-interner = "0.18"
|
||||
fxhash = "0.2"
|
||||
|
@ -6,7 +6,7 @@ pub use crate::location::Location;
|
||||
use fxhash::FxBuildHasher;
|
||||
use parking_lot::{Mutex, MutexGuard};
|
||||
use std::{cell::RefCell, collections::HashMap, fmt, sync::LazyLock};
|
||||
use string_interner::{DefaultBackend, StringInterner, symbol::SymbolU32};
|
||||
use string_interner::{symbol::SymbolU32, DefaultBackend, StringInterner};
|
||||
|
||||
pub type Interner = StringInterner<DefaultBackend, FxBuildHasher>;
|
||||
static INTERNER: LazyLock<Mutex<Interner>> =
|
||||
|
@ -1,6 +1,6 @@
|
||||
use crate::StrRef;
|
||||
use crate::constant;
|
||||
use crate::fold::Fold;
|
||||
use crate::StrRef;
|
||||
|
||||
pub(crate) trait Foldable<T, U> {
|
||||
type Mapped;
|
||||
|
@ -2,7 +2,7 @@
|
||||
name = "nac3core"
|
||||
version = "0.1.0"
|
||||
authors = ["M-Labs"]
|
||||
edition = "2024"
|
||||
edition = "2021"
|
||||
|
||||
[features]
|
||||
default = ["derive"]
|
||||
@ -12,7 +12,7 @@ no-escape-analysis = []
|
||||
[dependencies]
|
||||
itertools = "0.14"
|
||||
crossbeam = "0.8"
|
||||
indexmap = "2.8"
|
||||
indexmap = "2.7"
|
||||
parking_lot = "0.12"
|
||||
nac3core_derive = { path = "nac3core_derive", optional = true }
|
||||
nac3parser = { path = "../nac3parser" }
|
||||
|
@ -23,10 +23,6 @@ DEF_builtin_unary(double, cbrt, double);
|
||||
DEF_builtin_unary(double, erf, double);
|
||||
DEF_builtin_unary(double, erfc, double);
|
||||
|
||||
#define __builtin_gamma __builtin_tgamma
|
||||
DEF_builtin_unary(double, gamma, double);
|
||||
#undef __builtin_gamma
|
||||
|
||||
DEF_builtin_binary(double, atan2, double, double);
|
||||
DEF_builtin_binary(double, hypot, double, double);
|
||||
DEF_builtin_binary(double, nextafter, double, double);
|
||||
|
@ -34,6 +34,27 @@ DEF_nac3_int_exp_(int64_t);
|
||||
DEF_nac3_int_exp_(uint32_t);
|
||||
DEF_nac3_int_exp_(uint64_t);
|
||||
|
||||
double __nac3_gamma(double z) {
|
||||
// Handling for denormals
|
||||
// | x | Python gamma(x) | C tgamma(x) |
|
||||
// --- | ----------------- | --------------- | ----------- |
|
||||
// (1) | nan | nan | nan |
|
||||
// (2) | -inf | -inf | inf |
|
||||
// (3) | inf | inf | inf |
|
||||
// (4) | 0.0 | inf | inf |
|
||||
// (5) | {-1.0, -2.0, ...} | inf | nan |
|
||||
|
||||
// (1)-(3)
|
||||
if (__builtin_isinf(z) || __builtin_isnan(z)) {
|
||||
return z;
|
||||
}
|
||||
|
||||
double v = __builtin_tgamma(z);
|
||||
|
||||
// (4)-(5)
|
||||
return __builtin_isinf(v) || __builtin_isnan(v) ? __builtin_inf() : v;
|
||||
}
|
||||
|
||||
double __nac3_gammaln(double x) {
|
||||
// libm's handling of value overflows differs from scipy:
|
||||
// - scipy: gammaln(-inf) -> -inf
|
||||
|
@ -1,7 +1,7 @@
|
||||
[package]
|
||||
name = "nac3core_derive"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
edition = "2021"
|
||||
|
||||
[lib]
|
||||
proc-macro = true
|
||||
|
@ -2,8 +2,8 @@ use proc_macro::TokenStream;
|
||||
use proc_macro_error::{abort, proc_macro_error};
|
||||
use quote::quote;
|
||||
use syn::{
|
||||
Data, DataStruct, Expr, ExprField, ExprMethodCall, ExprPath, GenericArgument, Ident, LitStr,
|
||||
Path, PathArguments, Type, TypePath, parse_macro_input, spanned::Spanned,
|
||||
parse_macro_input, spanned::Spanned, Data, DataStruct, Expr, ExprField, ExprMethodCall,
|
||||
ExprPath, GenericArgument, Ident, LitStr, Path, PathArguments, Type, TypePath,
|
||||
};
|
||||
|
||||
/// Extracts all generic arguments of a [`Type`] into a [`Vec`].
|
||||
@ -59,7 +59,11 @@ fn replace_top_level_receiver(expr: &mut Expr, ident: Ident) -> Option<&mut Expr
|
||||
| Expr::Field(ExprField { base: operand, .. }) = expr
|
||||
{
|
||||
return if extract_dot_operand(operand).is_some() {
|
||||
if replace_top_level_receiver(operand, ident).is_some() { Some(expr) } else { None }
|
||||
if replace_top_level_receiver(operand, ident).is_some() {
|
||||
Some(expr)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
*operand = Box::new(Expr::Path(ExprPath {
|
||||
attrs: Vec::default(),
|
||||
@ -101,7 +105,7 @@ fn normalize_value_expr(expr: &Expr) -> proc_macro2::TokenStream {
|
||||
abort!(
|
||||
path,
|
||||
format!(
|
||||
"Expected one of `size_t`, `usize`, or an implicit call expression in #[value_type(...)], found {}",
|
||||
"Expected one of `size_t`, `usize`, or an implicit call expression in #[value_type(...)], found {}",
|
||||
quote!(#expr).to_string(),
|
||||
)
|
||||
)
|
||||
@ -150,7 +154,7 @@ fn normalize_value_expr(expr: &Expr) -> proc_macro2::TokenStream {
|
||||
abort!(
|
||||
expr,
|
||||
format!(
|
||||
"Expected one of `size_t`, `usize`, or an implicit call expression in #[value_type(...)], found {}",
|
||||
"Expected one of `size_t`, `usize`, or an implicit call expression in #[value_type(...)], found {}",
|
||||
quote!(#expr).to_string(),
|
||||
)
|
||||
)
|
||||
@ -220,9 +224,10 @@ pub fn derive(input: TokenStream) -> TokenStream {
|
||||
let Data::Struct(DataStruct { fields, .. }) = &input.data else {
|
||||
abort!(input, "Only structs with named fields are supported");
|
||||
};
|
||||
if let Err(err_span) = fields
|
||||
.iter()
|
||||
.try_for_each(|field| if field.ident.is_some() { Ok(()) } else { Err(field.span()) })
|
||||
if let Err(err_span) =
|
||||
fields
|
||||
.iter()
|
||||
.try_for_each(|field| if field.ident.is_some() { Ok(()) } else { Err(field.span()) })
|
||||
{
|
||||
abort!(err_span, "Only structs with named fields are supported");
|
||||
};
|
||||
|
@ -1,8 +1,8 @@
|
||||
use nac3core::{
|
||||
codegen::types::structure::StructField,
|
||||
inkwell::{
|
||||
AddressSpace,
|
||||
values::{IntValue, PointerValue},
|
||||
AddressSpace,
|
||||
},
|
||||
};
|
||||
use nac3core_derive::StructFields;
|
||||
|
@ -1,8 +1,8 @@
|
||||
use nac3core::{
|
||||
codegen::types::structure::StructField,
|
||||
inkwell::{
|
||||
AddressSpace,
|
||||
values::{IntValue, PointerValue},
|
||||
AddressSpace,
|
||||
},
|
||||
};
|
||||
use nac3core_derive::StructFields;
|
||||
|
@ -1,8 +1,8 @@
|
||||
use nac3core::{
|
||||
codegen::types::structure::StructField,
|
||||
inkwell::{
|
||||
AddressSpace,
|
||||
values::{IntValue, PointerValue},
|
||||
AddressSpace,
|
||||
},
|
||||
};
|
||||
use nac3core_derive::StructFields;
|
||||
|
@ -1,8 +1,8 @@
|
||||
use nac3core::{
|
||||
codegen::types::structure::StructField,
|
||||
inkwell::{
|
||||
AddressSpace,
|
||||
values::{IntValue, PointerValue},
|
||||
AddressSpace,
|
||||
},
|
||||
};
|
||||
use nac3core_derive::StructFields;
|
||||
|
@ -1,8 +1,8 @@
|
||||
use nac3core::{
|
||||
codegen::types::structure::StructField,
|
||||
inkwell::{
|
||||
AddressSpace,
|
||||
values::{IntValue, PointerValue},
|
||||
AddressSpace,
|
||||
},
|
||||
};
|
||||
use nac3core_derive::StructFields;
|
||||
|
@ -1,26 +1,26 @@
|
||||
use inkwell::{
|
||||
FloatPredicate, IntPredicate, OptimizationLevel,
|
||||
types::BasicTypeEnum,
|
||||
values::{BasicValueEnum, IntValue},
|
||||
FloatPredicate, IntPredicate, OptimizationLevel,
|
||||
};
|
||||
use itertools::Itertools;
|
||||
|
||||
use super::{
|
||||
CodeGenContext, CodeGenerator,
|
||||
expr::destructure_range,
|
||||
extern_fns, irrt,
|
||||
irrt::calculate_len_for_slice_range,
|
||||
llvm_intrinsics,
|
||||
macros::codegen_unreachable,
|
||||
types::{ListType, RangeType, TupleType, ndarray::NDArrayType},
|
||||
types::{ndarray::NDArrayType, ListType, RangeType, TupleType},
|
||||
values::{
|
||||
ProxyValue, TypedArrayLikeAccessor, UntypedArrayLikeAccessor,
|
||||
ndarray::{NDArrayOut, NDArrayValue, ScalarOrNDArray},
|
||||
ProxyValue, TypedArrayLikeAccessor, UntypedArrayLikeAccessor,
|
||||
},
|
||||
CodeGenContext, CodeGenerator,
|
||||
};
|
||||
use crate::{
|
||||
toplevel::{
|
||||
helper::{PrimDef, arraylike_flatten_element_type, extract_ndims},
|
||||
helper::{arraylike_flatten_element_type, extract_ndims, PrimDef},
|
||||
numpy::unpack_ndarray_var_tys,
|
||||
},
|
||||
typecheck::typedef::{Type, TypeEnum},
|
||||
@ -99,21 +99,17 @@ pub fn call_int32<'ctx, G: CodeGenerator + ?Sized>(
|
||||
}
|
||||
|
||||
BasicValueEnum::IntValue(n) if n.get_type().get_bit_width() == 32 => {
|
||||
debug_assert!(
|
||||
[ctx.primitives.int32, ctx.primitives.uint32,]
|
||||
.iter()
|
||||
.any(|ty| ctx.unifier.unioned(n_ty, *ty))
|
||||
);
|
||||
debug_assert!([ctx.primitives.int32, ctx.primitives.uint32,]
|
||||
.iter()
|
||||
.any(|ty| ctx.unifier.unioned(n_ty, *ty)));
|
||||
|
||||
n.into()
|
||||
}
|
||||
|
||||
BasicValueEnum::IntValue(n) if n.get_type().get_bit_width() == 64 => {
|
||||
debug_assert!(
|
||||
[ctx.primitives.int64, ctx.primitives.uint64,]
|
||||
.iter()
|
||||
.any(|ty| ctx.unifier.unioned(n_ty, *ty))
|
||||
);
|
||||
debug_assert!([ctx.primitives.int64, ctx.primitives.uint64,]
|
||||
.iter()
|
||||
.any(|ty| ctx.unifier.unioned(n_ty, *ty)));
|
||||
|
||||
ctx.builder.build_int_truncate(n, llvm_i32, "trunc").map(Into::into).unwrap()
|
||||
}
|
||||
@ -159,11 +155,9 @@ pub fn call_int64<'ctx, G: CodeGenerator + ?Sized>(
|
||||
|
||||
Ok(match n {
|
||||
BasicValueEnum::IntValue(n) if matches!(n.get_type().get_bit_width(), 1 | 8 | 32) => {
|
||||
debug_assert!(
|
||||
[ctx.primitives.bool, ctx.primitives.int32, ctx.primitives.uint32,]
|
||||
.iter()
|
||||
.any(|ty| ctx.unifier.unioned(n_ty, *ty))
|
||||
);
|
||||
debug_assert!([ctx.primitives.bool, ctx.primitives.int32, ctx.primitives.uint32,]
|
||||
.iter()
|
||||
.any(|ty| ctx.unifier.unioned(n_ty, *ty)));
|
||||
|
||||
if ctx.unifier.unioned(n_ty, ctx.primitives.int32) {
|
||||
ctx.builder.build_int_s_extend(n, llvm_i64, "sext").map(Into::into).unwrap()
|
||||
@ -173,11 +167,9 @@ pub fn call_int64<'ctx, G: CodeGenerator + ?Sized>(
|
||||
}
|
||||
|
||||
BasicValueEnum::IntValue(n) if n.get_type().get_bit_width() == 64 => {
|
||||
debug_assert!(
|
||||
[ctx.primitives.int64, ctx.primitives.uint64,]
|
||||
.iter()
|
||||
.any(|ty| ctx.unifier.unioned(n_ty, *ty))
|
||||
);
|
||||
debug_assert!([ctx.primitives.int64, ctx.primitives.uint64,]
|
||||
.iter()
|
||||
.any(|ty| ctx.unifier.unioned(n_ty, *ty)));
|
||||
|
||||
n.into()
|
||||
}
|
||||
@ -230,11 +222,9 @@ pub fn call_uint32<'ctx, G: CodeGenerator + ?Sized>(
|
||||
}
|
||||
|
||||
BasicValueEnum::IntValue(n) if n.get_type().get_bit_width() == 32 => {
|
||||
debug_assert!(
|
||||
[ctx.primitives.int32, ctx.primitives.uint32,]
|
||||
.iter()
|
||||
.any(|ty| ctx.unifier.unioned(n_ty, *ty))
|
||||
);
|
||||
debug_assert!([ctx.primitives.int32, ctx.primitives.uint32,]
|
||||
.iter()
|
||||
.any(|ty| ctx.unifier.unioned(n_ty, *ty)));
|
||||
|
||||
n.into()
|
||||
}
|
||||
@ -303,11 +293,9 @@ pub fn call_uint64<'ctx, G: CodeGenerator + ?Sized>(
|
||||
|
||||
Ok(match n {
|
||||
BasicValueEnum::IntValue(n) if matches!(n.get_type().get_bit_width(), 1 | 8 | 32) => {
|
||||
debug_assert!(
|
||||
[ctx.primitives.bool, ctx.primitives.int32, ctx.primitives.uint32,]
|
||||
.iter()
|
||||
.any(|ty| ctx.unifier.unioned(n_ty, *ty))
|
||||
);
|
||||
debug_assert!([ctx.primitives.bool, ctx.primitives.int32, ctx.primitives.uint32,]
|
||||
.iter()
|
||||
.any(|ty| ctx.unifier.unioned(n_ty, *ty)));
|
||||
|
||||
if ctx.unifier.unioned(n_ty, ctx.primitives.int32) {
|
||||
ctx.builder.build_int_s_extend(n, llvm_i64, "sext").map(Into::into).unwrap()
|
||||
@ -317,11 +305,9 @@ pub fn call_uint64<'ctx, G: CodeGenerator + ?Sized>(
|
||||
}
|
||||
|
||||
BasicValueEnum::IntValue(n) if n.get_type().get_bit_width() == 64 => {
|
||||
debug_assert!(
|
||||
[ctx.primitives.int64, ctx.primitives.uint64,]
|
||||
.iter()
|
||||
.any(|ty| ctx.unifier.unioned(n_ty, *ty))
|
||||
);
|
||||
debug_assert!([ctx.primitives.int64, ctx.primitives.uint64,]
|
||||
.iter()
|
||||
.any(|ty| ctx.unifier.unioned(n_ty, *ty)));
|
||||
|
||||
n.into()
|
||||
}
|
||||
@ -373,17 +359,15 @@ pub fn call_float<'ctx, G: CodeGenerator + ?Sized>(
|
||||
|
||||
Ok(match n {
|
||||
BasicValueEnum::IntValue(n) if matches!(n.get_type().get_bit_width(), 1 | 8 | 32 | 64) => {
|
||||
debug_assert!(
|
||||
[
|
||||
ctx.primitives.bool,
|
||||
ctx.primitives.int32,
|
||||
ctx.primitives.uint32,
|
||||
ctx.primitives.int64,
|
||||
ctx.primitives.uint64,
|
||||
]
|
||||
.iter()
|
||||
.any(|ty| ctx.unifier.unioned(n_ty, *ty))
|
||||
);
|
||||
debug_assert!([
|
||||
ctx.primitives.bool,
|
||||
ctx.primitives.int32,
|
||||
ctx.primitives.uint32,
|
||||
ctx.primitives.int64,
|
||||
ctx.primitives.uint64,
|
||||
]
|
||||
.iter()
|
||||
.any(|ty| ctx.unifier.unioned(n_ty, *ty)));
|
||||
|
||||
if [ctx.primitives.bool, ctx.primitives.int32, ctx.primitives.int64]
|
||||
.iter()
|
||||
@ -531,16 +515,14 @@ pub fn call_bool<'ctx, G: CodeGenerator + ?Sized>(
|
||||
}
|
||||
|
||||
BasicValueEnum::IntValue(n) => {
|
||||
debug_assert!(
|
||||
[
|
||||
ctx.primitives.int32,
|
||||
ctx.primitives.uint32,
|
||||
ctx.primitives.int64,
|
||||
ctx.primitives.uint64,
|
||||
]
|
||||
.iter()
|
||||
.any(|ty| ctx.unifier.unioned(n_ty, *ty))
|
||||
);
|
||||
debug_assert!([
|
||||
ctx.primitives.int32,
|
||||
ctx.primitives.uint32,
|
||||
ctx.primitives.int64,
|
||||
ctx.primitives.uint64,
|
||||
]
|
||||
.iter()
|
||||
.any(|ty| ctx.unifier.unioned(n_ty, *ty)));
|
||||
|
||||
ctx.builder
|
||||
.build_int_compare(IntPredicate::NE, n, n.get_type().const_zero(), FN_NAME)
|
||||
@ -701,17 +683,15 @@ pub fn call_min<'ctx>(
|
||||
|
||||
match (m, n) {
|
||||
(BasicValueEnum::IntValue(m), BasicValueEnum::IntValue(n)) => {
|
||||
debug_assert!(
|
||||
[
|
||||
ctx.primitives.bool,
|
||||
ctx.primitives.int32,
|
||||
ctx.primitives.uint32,
|
||||
ctx.primitives.int64,
|
||||
ctx.primitives.uint64,
|
||||
]
|
||||
.iter()
|
||||
.any(|ty| ctx.unifier.unioned(common_ty, *ty))
|
||||
);
|
||||
debug_assert!([
|
||||
ctx.primitives.bool,
|
||||
ctx.primitives.int32,
|
||||
ctx.primitives.uint32,
|
||||
ctx.primitives.int64,
|
||||
ctx.primitives.uint64,
|
||||
]
|
||||
.iter()
|
||||
.any(|ty| ctx.unifier.unioned(common_ty, *ty)));
|
||||
|
||||
if [ctx.primitives.int32, ctx.primitives.int64]
|
||||
.iter()
|
||||
@ -746,18 +726,16 @@ pub fn call_numpy_minimum<'ctx, G: CodeGenerator + ?Sized>(
|
||||
|
||||
Ok(match (x1, x2) {
|
||||
(BasicValueEnum::IntValue(x1), BasicValueEnum::IntValue(x2)) => {
|
||||
debug_assert!(
|
||||
[
|
||||
ctx.primitives.bool,
|
||||
ctx.primitives.int32,
|
||||
ctx.primitives.uint32,
|
||||
ctx.primitives.int64,
|
||||
ctx.primitives.uint64,
|
||||
ctx.primitives.float,
|
||||
]
|
||||
.iter()
|
||||
.any(|ty| ctx.unifier.unioned(common_ty.unwrap(), *ty))
|
||||
);
|
||||
debug_assert!([
|
||||
ctx.primitives.bool,
|
||||
ctx.primitives.int32,
|
||||
ctx.primitives.uint32,
|
||||
ctx.primitives.int64,
|
||||
ctx.primitives.uint64,
|
||||
ctx.primitives.float,
|
||||
]
|
||||
.iter()
|
||||
.any(|ty| ctx.unifier.unioned(common_ty.unwrap(), *ty)));
|
||||
|
||||
call_min(ctx, (x1_ty, x1.into()), (x2_ty, x2.into()))
|
||||
}
|
||||
@ -822,17 +800,15 @@ pub fn call_max<'ctx>(
|
||||
|
||||
match (m, n) {
|
||||
(BasicValueEnum::IntValue(m), BasicValueEnum::IntValue(n)) => {
|
||||
debug_assert!(
|
||||
[
|
||||
ctx.primitives.bool,
|
||||
ctx.primitives.int32,
|
||||
ctx.primitives.uint32,
|
||||
ctx.primitives.int64,
|
||||
ctx.primitives.uint64,
|
||||
]
|
||||
.iter()
|
||||
.any(|ty| ctx.unifier.unioned(common_ty, *ty))
|
||||
);
|
||||
debug_assert!([
|
||||
ctx.primitives.bool,
|
||||
ctx.primitives.int32,
|
||||
ctx.primitives.uint32,
|
||||
ctx.primitives.int64,
|
||||
ctx.primitives.uint64,
|
||||
]
|
||||
.iter()
|
||||
.any(|ty| ctx.unifier.unioned(common_ty, *ty)));
|
||||
|
||||
if [ctx.primitives.int32, ctx.primitives.int64]
|
||||
.iter()
|
||||
@ -869,18 +845,16 @@ pub fn call_numpy_max_min<'ctx, G: CodeGenerator + ?Sized>(
|
||||
|
||||
Ok(match a {
|
||||
BasicValueEnum::IntValue(_) | BasicValueEnum::FloatValue(_) => {
|
||||
debug_assert!(
|
||||
[
|
||||
ctx.primitives.bool,
|
||||
ctx.primitives.int32,
|
||||
ctx.primitives.uint32,
|
||||
ctx.primitives.int64,
|
||||
ctx.primitives.uint64,
|
||||
ctx.primitives.float,
|
||||
]
|
||||
.iter()
|
||||
.any(|ty| ctx.unifier.unioned(a_ty, *ty))
|
||||
);
|
||||
debug_assert!([
|
||||
ctx.primitives.bool,
|
||||
ctx.primitives.int32,
|
||||
ctx.primitives.uint32,
|
||||
ctx.primitives.int64,
|
||||
ctx.primitives.uint64,
|
||||
ctx.primitives.float,
|
||||
]
|
||||
.iter()
|
||||
.any(|ty| ctx.unifier.unioned(a_ty, *ty)));
|
||||
|
||||
match fn_name {
|
||||
"np_argmin" | "np_argmax" => llvm_int64.const_zero().into(),
|
||||
@ -1012,18 +986,16 @@ pub fn call_numpy_maximum<'ctx, G: CodeGenerator + ?Sized>(
|
||||
|
||||
Ok(match (x1, x2) {
|
||||
(BasicValueEnum::IntValue(x1), BasicValueEnum::IntValue(x2)) => {
|
||||
debug_assert!(
|
||||
[
|
||||
ctx.primitives.bool,
|
||||
ctx.primitives.int32,
|
||||
ctx.primitives.uint32,
|
||||
ctx.primitives.int64,
|
||||
ctx.primitives.uint64,
|
||||
ctx.primitives.float,
|
||||
]
|
||||
.iter()
|
||||
.any(|ty| ctx.unifier.unioned(common_ty.unwrap(), *ty))
|
||||
);
|
||||
debug_assert!([
|
||||
ctx.primitives.bool,
|
||||
ctx.primitives.int32,
|
||||
ctx.primitives.uint32,
|
||||
ctx.primitives.int64,
|
||||
ctx.primitives.uint64,
|
||||
ctx.primitives.float,
|
||||
]
|
||||
.iter()
|
||||
.any(|ty| ctx.unifier.unioned(common_ty.unwrap(), *ty)));
|
||||
|
||||
call_max(ctx, (x1_ty, x1.into()), (x2_ty, x2.into()))
|
||||
}
|
||||
@ -1129,17 +1101,15 @@ pub fn call_abs<'ctx, G: CodeGenerator + ?Sized>(
|
||||
&|_ctx, elem_ty| elem_ty,
|
||||
&|_generator, ctx, val_ty, val| match val {
|
||||
BasicValueEnum::IntValue(n) => Some({
|
||||
debug_assert!(
|
||||
[
|
||||
ctx.primitives.bool,
|
||||
ctx.primitives.int32,
|
||||
ctx.primitives.uint32,
|
||||
ctx.primitives.int64,
|
||||
ctx.primitives.uint64,
|
||||
]
|
||||
.iter()
|
||||
.any(|ty| ctx.unifier.unioned(val_ty, *ty))
|
||||
);
|
||||
debug_assert!([
|
||||
ctx.primitives.bool,
|
||||
ctx.primitives.int32,
|
||||
ctx.primitives.uint32,
|
||||
ctx.primitives.int64,
|
||||
ctx.primitives.uint64,
|
||||
]
|
||||
.iter()
|
||||
.any(|ty| ctx.unifier.unioned(val_ty, *ty)));
|
||||
|
||||
if [ctx.primitives.int32, ctx.primitives.int64]
|
||||
.iter()
|
||||
@ -1411,7 +1381,7 @@ create_helper_call_numpy_unary_elementwise_float_to_float!(
|
||||
create_helper_call_numpy_unary_elementwise_float_to_float!(
|
||||
call_scipy_special_gamma,
|
||||
"sp_spec_gamma",
|
||||
irrt::call_gamma
|
||||
|ctx, val, _| irrt::call_gamma(ctx, val)
|
||||
);
|
||||
create_helper_call_numpy_unary_elementwise_float_to_float!(
|
||||
call_scipy_special_gammaln,
|
||||
|
@ -10,7 +10,7 @@ use crate::{
|
||||
typecheck::{
|
||||
type_inferencer::PrimitiveStore,
|
||||
typedef::{
|
||||
FunSignature, FuncArg, Type, TypeEnum, TypeVar, TypeVarId, Unifier, into_var_map,
|
||||
into_var_map, FunSignature, FuncArg, Type, TypeEnum, TypeVar, TypeVarId, Unifier,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
@ -6,12 +6,12 @@ use std::{
|
||||
};
|
||||
|
||||
use inkwell::{
|
||||
AddressSpace, IntPredicate, OptimizationLevel,
|
||||
attributes::{Attribute, AttributeLoc},
|
||||
types::{AnyType, BasicType, BasicTypeEnum},
|
||||
values::{BasicValueEnum, CallSiteValue, FunctionValue, IntValue, PointerValue, StructValue},
|
||||
AddressSpace, IntPredicate, OptimizationLevel,
|
||||
};
|
||||
use itertools::{Either, Itertools, izip};
|
||||
use itertools::{izip, Either, Itertools};
|
||||
|
||||
use nac3parser::ast::{
|
||||
self, Boolop, Cmpop, Comprehension, Constant, Expr, ExprKind, Location, Operator, StrRef,
|
||||
@ -19,7 +19,6 @@ use nac3parser::ast::{
|
||||
};
|
||||
|
||||
use super::{
|
||||
CodeGenContext, CodeGenTask, CodeGenerator,
|
||||
concrete_type::{ConcreteFuncArg, ConcreteTypeEnum, ConcreteTypeStore},
|
||||
gen_in_range_check, get_llvm_abi_type, get_llvm_type, get_va_count_arg_name,
|
||||
irrt::*,
|
||||
@ -34,20 +33,21 @@ use super::{
|
||||
gen_var,
|
||||
},
|
||||
types::{
|
||||
ExceptionType, ListType, OptionType, RangeType, StringType, TupleType, ndarray::NDArrayType,
|
||||
ndarray::NDArrayType, ExceptionType, ListType, OptionType, RangeType, StringType, TupleType,
|
||||
},
|
||||
values::{
|
||||
ndarray::{NDArrayOut, RustNDIndex, ScalarOrNDArray},
|
||||
ArrayLikeIndexer, ArrayLikeValue, ListValue, ProxyValue, RangeValue,
|
||||
UntypedArrayLikeAccessor,
|
||||
ndarray::{NDArrayOut, RustNDIndex, ScalarOrNDArray},
|
||||
},
|
||||
CodeGenContext, CodeGenTask, CodeGenerator,
|
||||
};
|
||||
use crate::{
|
||||
symbol_resolver::{SymbolValue, ValueEnum},
|
||||
toplevel::{
|
||||
DefinitionId, TopLevelDef,
|
||||
helper::{PrimDef, arraylike_flatten_element_type, extract_ndims},
|
||||
helper::{arraylike_flatten_element_type, extract_ndims, PrimDef},
|
||||
numpy::unpack_ndarray_var_tys,
|
||||
DefinitionId, TopLevelDef,
|
||||
},
|
||||
typecheck::{
|
||||
magic_methods::{Binop, BinopVariant, HasOpInfo},
|
||||
@ -73,7 +73,7 @@ pub fn get_subst_key(
|
||||
})
|
||||
.unwrap_or_default();
|
||||
vars.extend(fun_vars);
|
||||
let sorted = vars.keys().filter(|id| filter.is_none_or(|v| v.contains(id))).sorted();
|
||||
let sorted = vars.keys().filter(|id| filter.map_or(true, |v| v.contains(id))).sorted();
|
||||
sorted
|
||||
.map(|id| {
|
||||
unifier.internal_stringify(
|
||||
@ -124,7 +124,7 @@ impl<'ctx> CodeGenContext<'ctx, '_> {
|
||||
|
||||
/// Checks the field and attributes of classes
|
||||
/// Returns the index of attr in class fields otherwise returns the attribute value
|
||||
pub fn get_attr_index(&mut self, ty: Type, attr: StrRef) -> (Option<usize>, Option<Constant>) {
|
||||
pub fn get_attr_index(&mut self, ty: Type, attr: StrRef) -> (usize, Option<Constant>) {
|
||||
let obj_id = match &*self.unifier.get_ty(ty) {
|
||||
TypeEnum::TObj { obj_id, .. } => *obj_id,
|
||||
TypeEnum::TModule { module_id, .. } => *module_id,
|
||||
@ -134,16 +134,13 @@ impl<'ctx> CodeGenContext<'ctx, '_> {
|
||||
let def = &self.top_level.definitions.read()[obj_id.0];
|
||||
let (index, value) = if let TopLevelDef::Class { fields, attributes, .. } = &*def.read() {
|
||||
if let Some(field_index) = fields.iter().find_position(|x| x.0 == attr) {
|
||||
(Some(field_index.0), None)
|
||||
(field_index.0, None)
|
||||
} else {
|
||||
let attribute_index = attributes.iter().find_position(|x| x.0 == attr);
|
||||
(
|
||||
attribute_index.map(|(idx, _)| idx),
|
||||
attribute_index.map(|(_, (_, _, k))| k.clone()),
|
||||
)
|
||||
let attribute_index = attributes.iter().find_position(|x| x.0 == attr).unwrap();
|
||||
(attribute_index.0, Some(attribute_index.1 .2.clone()))
|
||||
}
|
||||
} else if let TopLevelDef::Module { attributes, .. } = &*def.read() {
|
||||
(attributes.iter().find_position(|x| x.0 == attr).map(|(idx, _)| idx), None)
|
||||
(attributes.iter().find_position(|x| x.0 == attr).unwrap().0, None)
|
||||
} else {
|
||||
codegen_unreachable!(self)
|
||||
};
|
||||
@ -785,10 +782,11 @@ pub fn gen_call<'ctx, G: CodeGenerator>(
|
||||
) -> Result<Option<BasicValueEnum<'ctx>>, String> {
|
||||
let llvm_usize = ctx.get_size_type();
|
||||
|
||||
let definition = ctx.top_level.definitions.read().get(fun.1.0).cloned().unwrap();
|
||||
let definition = ctx.top_level.definitions.read().get(fun.1 .0).cloned().unwrap();
|
||||
let id;
|
||||
let key;
|
||||
let param_vals;
|
||||
let is_extern;
|
||||
let vararg_arg;
|
||||
|
||||
// Ensure that the function object only contains up to 1 vararg parameter
|
||||
@ -807,6 +805,7 @@ pub fn gen_call<'ctx, G: CodeGenerator>(
|
||||
if let Some(callback) = codegen_callback {
|
||||
return callback.run(ctx, obj, fun, params, generator);
|
||||
}
|
||||
is_extern = instance_to_stmt.is_empty();
|
||||
vararg_arg = fun.0.args.iter().find(|arg| arg.is_vararg);
|
||||
let old_key = ctx.get_subst_key(obj.as_ref().map(|a| a.0), fun.0, None);
|
||||
let mut keys = fun.0.args.clone();
|
||||
@ -866,10 +865,9 @@ pub fn gen_call<'ctx, G: CodeGenerator>(
|
||||
} else {
|
||||
mapping.insert(
|
||||
k.name,
|
||||
vec![
|
||||
ctx.gen_symbol_val(generator, &k.default_value.unwrap(), k.ty)
|
||||
.into(),
|
||||
],
|
||||
vec![ctx
|
||||
.gen_symbol_val(generator, &k.default_value.unwrap(), k.ty)
|
||||
.into()],
|
||||
);
|
||||
}
|
||||
}
|
||||
@ -939,7 +937,7 @@ pub fn gen_call<'ctx, G: CodeGenerator>(
|
||||
instance_to_symbol.get(&key).cloned().ok_or_else(String::new)
|
||||
}
|
||||
TopLevelDef::Class { .. } => {
|
||||
return Ok(Some(generator.gen_constructor(ctx, fun.0, &def, params)?));
|
||||
return Ok(Some(generator.gen_constructor(ctx, fun.0, &def, params)?))
|
||||
}
|
||||
TopLevelDef::Variable { .. } | TopLevelDef::Module { .. } => unreachable!(),
|
||||
}
|
||||
@ -960,11 +958,22 @@ pub fn gen_call<'ctx, G: CodeGenerator>(
|
||||
} else {
|
||||
Some(ctx.get_llvm_abi_type(generator, fun.0.ret))
|
||||
};
|
||||
let has_sret = ret_type.is_some_and(|ret_type| need_sret(ret_type));
|
||||
let has_sret = ret_type.map_or(false, |ret_type| need_sret(ret_type));
|
||||
let mut byrefs = Vec::new();
|
||||
let mut params = args
|
||||
.iter()
|
||||
.filter(|arg| !arg.is_vararg)
|
||||
.map(|arg| ctx.get_llvm_abi_type(generator, arg.ty).into())
|
||||
.enumerate()
|
||||
.filter(|(_, arg)| !arg.is_vararg)
|
||||
.map(|(i, arg)| {
|
||||
match ctx.get_llvm_abi_type(generator, arg.ty) {
|
||||
BasicTypeEnum::StructType(ty) if is_extern => {
|
||||
byrefs.push((i, ty));
|
||||
ty.ptr_type(AddressSpace::default()).into()
|
||||
}
|
||||
x => x,
|
||||
}
|
||||
.into()
|
||||
})
|
||||
.collect_vec();
|
||||
if has_sret {
|
||||
params.insert(0, ret_type.unwrap().ptr_type(AddressSpace::default()).into());
|
||||
@ -978,7 +987,7 @@ pub fn gen_call<'ctx, G: CodeGenerator>(
|
||||
_ => ctx.ctx.void_type().fn_type(¶ms, is_vararg),
|
||||
};
|
||||
let fun_val = ctx.module.add_function(&symbol, fun_ty, None);
|
||||
if has_sret {
|
||||
let offset = if has_sret {
|
||||
fun_val.add_attribute(
|
||||
AttributeLoc::Param(0),
|
||||
ctx.ctx.create_type_attribute(
|
||||
@ -986,8 +995,23 @@ pub fn gen_call<'ctx, G: CodeGenerator>(
|
||||
ret_type.unwrap().as_any_type_enum(),
|
||||
),
|
||||
);
|
||||
}
|
||||
1
|
||||
} else {
|
||||
0
|
||||
};
|
||||
|
||||
// The attribute ID used to mark arguments of a structure type.
|
||||
// Structure-Typed parameters of extern functions must **not** be marked as `byval`, as
|
||||
// `byval` explicitly specifies that the argument is to be passed on the stack, which breaks
|
||||
// on most ABIs where the first several arguments are expected to be passed in registers.
|
||||
let passing_attr_id =
|
||||
Attribute::get_named_enum_kind_id(if is_extern { "byref" } else { "byval" });
|
||||
for (i, ty) in byrefs {
|
||||
fun_val.add_attribute(
|
||||
AttributeLoc::Param((i as u32) + offset),
|
||||
ctx.ctx.create_type_attribute(passing_attr_id, ty.as_any_type_enum()),
|
||||
);
|
||||
}
|
||||
fun_val
|
||||
});
|
||||
|
||||
@ -1549,6 +1573,7 @@ pub fn gen_binop_expr_with_values<'ctx, G: CodeGenerator>(
|
||||
vec![(None, right_val.into())],
|
||||
)
|
||||
.map(Option::unwrap)
|
||||
.map(BasicValueEnum::into)
|
||||
}
|
||||
}
|
||||
|
||||
@ -2464,7 +2489,7 @@ pub fn gen_expr<'ctx, G: CodeGenerator>(
|
||||
let (index, _) = ctx.get_attr_index(value.custom.unwrap(), *attr);
|
||||
Ok(ValueEnum::Dynamic(ctx.build_gep_and_load(
|
||||
v.into_pointer_value(),
|
||||
&[zero, int32.const_int(index.unwrap() as u64, false)],
|
||||
&[zero, int32.const_int(index as u64, false)],
|
||||
None,
|
||||
))) as Result<_, String>
|
||||
},
|
||||
@ -2481,7 +2506,7 @@ pub fn gen_expr<'ctx, G: CodeGenerator>(
|
||||
}
|
||||
ValueEnum::Dynamic(ctx.build_gep_and_load(
|
||||
v.into_pointer_value(),
|
||||
&[zero, int32.const_int(index.unwrap() as u64, false)],
|
||||
&[zero, int32.const_int(index as u64, false)],
|
||||
None,
|
||||
))
|
||||
}
|
||||
@ -2566,7 +2591,7 @@ pub fn gen_expr<'ctx, G: CodeGenerator>(
|
||||
}
|
||||
ExprKind::UnaryOp { op, operand } => return gen_unaryop_expr(generator, ctx, *op, operand),
|
||||
ExprKind::Compare { left, ops, comparators } => {
|
||||
return gen_cmpop_expr(generator, ctx, left, ops, comparators);
|
||||
return gen_cmpop_expr(generator, ctx, left, ops, comparators)
|
||||
}
|
||||
ExprKind::IfExp { test, body, orelse } => {
|
||||
let test = match generator.gen_expr(ctx, test)? {
|
||||
@ -3010,8 +3035,9 @@ pub fn create_and_call_function<'ctx>(
|
||||
value_name: Option<&str>,
|
||||
configure: Option<&dyn Fn(&FunctionValue<'ctx>)>,
|
||||
) -> Option<BasicValueEnum<'ctx>> {
|
||||
let param_tys = params.iter().map(|(ty, _)| ty).copied().collect_vec();
|
||||
let arg_values = params.iter().map(|(_, value)| value).copied().collect_vec();
|
||||
let param_tys = params.iter().map(|(ty, _)| ty).copied().map(BasicTypeEnum::into).collect_vec();
|
||||
let arg_values =
|
||||
params.iter().map(|(_, value)| value).copied().map(BasicValueEnum::into).collect_vec();
|
||||
|
||||
create_fn_and_call(
|
||||
ctx,
|
||||
|
@ -3,7 +3,7 @@ use inkwell::{
|
||||
values::{BasicValueEnum, FloatValue},
|
||||
};
|
||||
|
||||
use super::{CodeGenContext, expr::infer_and_call_function};
|
||||
use super::{expr::infer_and_call_function, CodeGenContext};
|
||||
|
||||
/// Macro to generate extern function
|
||||
/// Both function return type and function parameter type are `FloatValue`
|
||||
|
@ -7,7 +7,7 @@ use inkwell::{
|
||||
|
||||
use nac3parser::ast::{Expr, Stmt, StrRef};
|
||||
|
||||
use super::{CodeGenContext, bool_to_int_type, expr::*, stmt::*, values::ArraySliceValue};
|
||||
use super::{bool_to_int_type, expr::*, stmt::*, values::ArraySliceValue, CodeGenContext};
|
||||
use crate::{
|
||||
symbol_resolver::ValueEnum,
|
||||
toplevel::{DefinitionId, TopLevelDef},
|
||||
@ -308,6 +308,10 @@ impl CodeGenerator for DefaultCodeGenerator {
|
||||
fn get_size_type<'ctx>(&self, ctx: &'ctx Context) -> IntType<'ctx> {
|
||||
// it should be unsigned, but we don't really need unsigned and this could save us from
|
||||
// having to do a bit cast...
|
||||
if self.size_t == 32 { ctx.i32_type() } else { ctx.i64_type() }
|
||||
if self.size_t == 32 {
|
||||
ctx.i32_type()
|
||||
} else {
|
||||
ctx.i64_type()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -3,7 +3,7 @@ use inkwell::{
|
||||
values::{BasicValueEnum, FloatValue, IntValue},
|
||||
};
|
||||
|
||||
use crate::codegen::{CodeGenContext, expr::infer_and_call_function};
|
||||
use crate::codegen::{expr::infer_and_call_function, CodeGenContext};
|
||||
|
||||
/// Generates a call to [`isinf`](https://en.cppreference.com/w/c/numeric/math/isinf) in IR. Returns
|
||||
/// an `i1` representing the result.
|
||||
@ -94,7 +94,6 @@ generate_f64_nary_fn!(call_expm1, expm1, arg);
|
||||
generate_f64_nary_fn!(call_cbrt, cbrt, arg);
|
||||
generate_f64_nary_fn!(call_erf, erf, arg);
|
||||
generate_f64_nary_fn!(call_erfc, erfc, arg);
|
||||
generate_f64_nary_fn!(call_gamma, gamma, z);
|
||||
generate_f64_nary_fn!(call_atan2, atan2, y, x);
|
||||
generate_f64_nary_fn!(call_hypot, hypot, x, y);
|
||||
generate_f64_nary_fn!(call_nextafter, nextafter, from, to);
|
||||
|
@ -1,16 +1,16 @@
|
||||
use inkwell::{
|
||||
AddressSpace, IntPredicate,
|
||||
types::BasicTypeEnum,
|
||||
values::{BasicValueEnum, IntValue},
|
||||
AddressSpace, IntPredicate,
|
||||
};
|
||||
|
||||
use super::calculate_len_for_slice_range;
|
||||
use crate::codegen::{
|
||||
CodeGenContext, CodeGenerator,
|
||||
expr::infer_and_call_function,
|
||||
macros::codegen_unreachable,
|
||||
stmt::gen_if_callback,
|
||||
values::{ArrayLikeValue, ListValue},
|
||||
CodeGenContext, CodeGenerator,
|
||||
};
|
||||
|
||||
/// This function handles 'end' **inclusively**.
|
||||
|
@ -1,6 +1,6 @@
|
||||
use inkwell::{
|
||||
IntPredicate,
|
||||
values::{BasicValueEnum, FloatValue, IntValue},
|
||||
IntPredicate,
|
||||
};
|
||||
|
||||
use crate::codegen::{
|
||||
@ -59,6 +59,24 @@ pub fn integer_power<'ctx, G: CodeGenerator + ?Sized>(
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
/// Generates a call to `gamma` in IR. Returns an `f64` representing the result.
|
||||
pub fn call_gamma<'ctx>(ctx: &CodeGenContext<'ctx, '_>, v: FloatValue<'ctx>) -> FloatValue<'ctx> {
|
||||
let llvm_f64 = ctx.ctx.f64_type();
|
||||
|
||||
assert_eq!(v.get_type(), llvm_f64);
|
||||
|
||||
infer_and_call_function(
|
||||
ctx,
|
||||
"__nac3_gamma",
|
||||
Some(llvm_f64.into()),
|
||||
&[v.into()],
|
||||
Some("gamma"),
|
||||
None,
|
||||
)
|
||||
.map(BasicValueEnum::into_float_value)
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
/// Generates a call to `gammaln` in IR. Returns an `f64` representing the result.
|
||||
pub fn call_gammaln<'ctx>(ctx: &CodeGenContext<'ctx, '_>, v: FloatValue<'ctx>) -> FloatValue<'ctx> {
|
||||
let llvm_f64 = ctx.ctx.f64_type();
|
||||
|
@ -1,10 +1,10 @@
|
||||
use inkwell::{
|
||||
IntPredicate,
|
||||
attributes::{Attribute, AttributeLoc},
|
||||
context::Context,
|
||||
memory_buffer::MemoryBuffer,
|
||||
module::Module,
|
||||
values::{BasicValue, BasicValueEnum, IntValue},
|
||||
IntPredicate,
|
||||
};
|
||||
|
||||
use nac3parser::ast::Expr;
|
||||
|
@ -1,10 +1,10 @@
|
||||
use inkwell::{types::BasicTypeEnum, values::IntValue};
|
||||
|
||||
use crate::codegen::{
|
||||
CodeGenContext, CodeGenerator,
|
||||
expr::infer_and_call_function,
|
||||
irrt::get_usize_dependent_function_name,
|
||||
values::{ListValue, ProxyValue, TypedArrayLikeAccessor, ndarray::NDArrayValue},
|
||||
values::{ndarray::NDArrayValue, ListValue, ProxyValue, TypedArrayLikeAccessor},
|
||||
CodeGenContext, CodeGenerator,
|
||||
};
|
||||
|
||||
/// Generates a call to `__nac3_ndarray_array_set_and_validate_list_shape`.
|
||||
|
@ -1,13 +1,13 @@
|
||||
use inkwell::{
|
||||
AddressSpace,
|
||||
values::{BasicValueEnum, IntValue, PointerValue},
|
||||
AddressSpace,
|
||||
};
|
||||
|
||||
use crate::codegen::{
|
||||
CodeGenContext, CodeGenerator,
|
||||
expr::infer_and_call_function,
|
||||
irrt::get_usize_dependent_function_name,
|
||||
values::{ProxyValue, TypedArrayLikeAccessor, ndarray::NDArrayValue},
|
||||
values::{ndarray::NDArrayValue, ProxyValue, TypedArrayLikeAccessor},
|
||||
CodeGenContext, CodeGenerator,
|
||||
};
|
||||
|
||||
/// Generates a call to `__nac3_ndarray_util_assert_shape_no_negative`.
|
||||
|
@ -1,14 +1,14 @@
|
||||
use inkwell::values::IntValue;
|
||||
|
||||
use crate::codegen::{
|
||||
CodeGenContext, CodeGenerator,
|
||||
expr::infer_and_call_function,
|
||||
irrt::get_usize_dependent_function_name,
|
||||
types::{ProxyType, ndarray::ShapeEntryType},
|
||||
types::{ndarray::ShapeEntryType, ProxyType},
|
||||
values::{
|
||||
ArrayLikeValue, ArraySliceValue, ProxyValue, TypedArrayLikeAccessor, TypedArrayLikeMutator,
|
||||
ndarray::NDArrayValue,
|
||||
ndarray::NDArrayValue, ArrayLikeValue, ArraySliceValue, ProxyValue, TypedArrayLikeAccessor,
|
||||
TypedArrayLikeMutator,
|
||||
},
|
||||
CodeGenContext, CodeGenerator,
|
||||
};
|
||||
|
||||
/// Generates a call to `__nac3_ndarray_broadcast_to`.
|
||||
@ -55,13 +55,11 @@ pub fn call_nac3_ndarray_broadcast_shapes<'ctx, G, Shape>(
|
||||
let llvm_usize = ctx.get_size_type();
|
||||
|
||||
assert_eq!(num_shape_entries.get_type(), llvm_usize);
|
||||
assert!(
|
||||
ShapeEntryType::is_representable(
|
||||
shape_entries.base_ptr(ctx, generator).get_type(),
|
||||
llvm_usize,
|
||||
)
|
||||
.is_ok()
|
||||
);
|
||||
assert!(ShapeEntryType::is_representable(
|
||||
shape_entries.base_ptr(ctx, generator).get_type(),
|
||||
llvm_usize,
|
||||
)
|
||||
.is_ok());
|
||||
assert_eq!(dst_ndims.get_type(), llvm_usize);
|
||||
assert_eq!(dst_shape.element_type(ctx, generator), llvm_usize.into());
|
||||
|
||||
|
@ -1,8 +1,8 @@
|
||||
use crate::codegen::{
|
||||
CodeGenContext, CodeGenerator,
|
||||
expr::infer_and_call_function,
|
||||
irrt::get_usize_dependent_function_name,
|
||||
values::{ArrayLikeValue, ArraySliceValue, ProxyValue, ndarray::NDArrayValue},
|
||||
values::{ndarray::NDArrayValue, ArrayLikeValue, ArraySliceValue, ProxyValue},
|
||||
CodeGenContext, CodeGenerator,
|
||||
};
|
||||
|
||||
/// Generates a call to `__nac3_ndarray_index`.
|
||||
|
@ -1,13 +1,13 @@
|
||||
use inkwell::values::{BasicValueEnum, IntValue};
|
||||
|
||||
use crate::codegen::{
|
||||
CodeGenContext, CodeGenerator,
|
||||
expr::infer_and_call_function,
|
||||
irrt::get_usize_dependent_function_name,
|
||||
values::{
|
||||
ProxyValue, TypedArrayLikeAccessor,
|
||||
ndarray::{NDArrayValue, NDIterValue},
|
||||
ProxyValue, TypedArrayLikeAccessor,
|
||||
},
|
||||
CodeGenContext, CodeGenerator,
|
||||
};
|
||||
|
||||
/// Generates a call to `__nac3_nditer_initialize`.
|
||||
|
@ -1,8 +1,8 @@
|
||||
use inkwell::values::IntValue;
|
||||
|
||||
use crate::codegen::{
|
||||
CodeGenContext, CodeGenerator, expr::infer_and_call_function,
|
||||
irrt::get_usize_dependent_function_name, values::TypedArrayLikeAccessor,
|
||||
expr::infer_and_call_function, irrt::get_usize_dependent_function_name,
|
||||
values::TypedArrayLikeAccessor, CodeGenContext, CodeGenerator,
|
||||
};
|
||||
|
||||
/// Generates a call to `__nac3_ndarray_matmul_calculate_shapes`.
|
||||
|
@ -1,10 +1,10 @@
|
||||
use inkwell::values::IntValue;
|
||||
|
||||
use crate::codegen::{
|
||||
CodeGenContext, CodeGenerator,
|
||||
expr::infer_and_call_function,
|
||||
irrt::get_usize_dependent_function_name,
|
||||
values::{ArrayLikeValue, ArraySliceValue},
|
||||
CodeGenContext, CodeGenerator,
|
||||
};
|
||||
|
||||
/// Generates a call to `__nac3_ndarray_reshape_resolve_and_check_new_shape`.
|
||||
|
@ -1,10 +1,10 @@
|
||||
use inkwell::{AddressSpace, values::IntValue};
|
||||
use inkwell::{values::IntValue, AddressSpace};
|
||||
|
||||
use crate::codegen::{
|
||||
CodeGenContext, CodeGenerator,
|
||||
expr::infer_and_call_function,
|
||||
irrt::get_usize_dependent_function_name,
|
||||
values::{ProxyValue, TypedArrayLikeAccessor, ndarray::NDArrayValue},
|
||||
values::{ndarray::NDArrayValue, ProxyValue, TypedArrayLikeAccessor},
|
||||
CodeGenContext, CodeGenerator,
|
||||
};
|
||||
|
||||
/// Generates a call to `__nac3_ndarray_transpose`.
|
||||
|
@ -1,9 +1,9 @@
|
||||
use inkwell::{
|
||||
IntPredicate,
|
||||
values::{BasicValueEnum, IntValue},
|
||||
IntPredicate,
|
||||
};
|
||||
|
||||
use crate::codegen::{CodeGenContext, CodeGenerator, expr::infer_and_call_function};
|
||||
use crate::codegen::{expr::infer_and_call_function, CodeGenContext, CodeGenerator};
|
||||
|
||||
/// Invokes the `__nac3_range_slice_len` in IRRT.
|
||||
///
|
||||
|
@ -3,7 +3,7 @@ use inkwell::values::{BasicValueEnum, IntValue};
|
||||
use nac3parser::ast::Expr;
|
||||
|
||||
use crate::{
|
||||
codegen::{CodeGenContext, CodeGenerator, expr::infer_and_call_function},
|
||||
codegen::{expr::infer_and_call_function, CodeGenContext, CodeGenerator},
|
||||
typecheck::typedef::Type,
|
||||
};
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
use inkwell::values::{BasicValueEnum, IntValue};
|
||||
|
||||
use super::get_usize_dependent_function_name;
|
||||
use crate::codegen::{CodeGenContext, expr::infer_and_call_function, values::StringValue};
|
||||
use crate::codegen::{expr::infer_and_call_function, values::StringValue, CodeGenContext};
|
||||
|
||||
/// Generates a call to string equality comparison. Returns an `i1` representing whether the strings are equal.
|
||||
pub fn call_string_eq<'ctx>(
|
||||
|
@ -1,8 +1,8 @@
|
||||
use inkwell::{
|
||||
AddressSpace,
|
||||
intrinsics::Intrinsic,
|
||||
types::AnyTypeEnum::IntType,
|
||||
values::{BasicValueEnum, CallSiteValue, FloatValue, IntValue, PointerValue},
|
||||
AddressSpace,
|
||||
};
|
||||
use itertools::Either;
|
||||
|
||||
|
@ -2,15 +2,14 @@ use std::{
|
||||
cell::OnceCell,
|
||||
collections::{HashMap, HashSet},
|
||||
sync::{
|
||||
Arc,
|
||||
atomic::{AtomicBool, Ordering},
|
||||
Arc,
|
||||
},
|
||||
thread,
|
||||
};
|
||||
|
||||
use crossbeam::channel::{Receiver, Sender, unbounded};
|
||||
use crossbeam::channel::{unbounded, Receiver, Sender};
|
||||
use inkwell::{
|
||||
AddressSpace, IntPredicate, OptimizationLevel,
|
||||
attributes::{Attribute, AttributeLoc},
|
||||
basic_block::BasicBlock,
|
||||
builder::Builder,
|
||||
@ -23,6 +22,7 @@ use inkwell::{
|
||||
targets::{CodeModel, RelocMode, Target, TargetMachine, TargetTriple},
|
||||
types::{AnyType, BasicType, BasicTypeEnum, IntType},
|
||||
values::{BasicValueEnum, FunctionValue, IntValue, PhiValue, PointerValue},
|
||||
AddressSpace, IntPredicate, OptimizationLevel,
|
||||
};
|
||||
use itertools::Itertools;
|
||||
use parking_lot::{Condvar, Mutex};
|
||||
@ -32,9 +32,9 @@ use nac3parser::ast::{Location, Stmt, StrRef};
|
||||
use crate::{
|
||||
symbol_resolver::{StaticValue, SymbolResolver},
|
||||
toplevel::{
|
||||
TopLevelContext, TopLevelDef,
|
||||
helper::{PrimDef, extract_ndims},
|
||||
helper::{extract_ndims, PrimDef},
|
||||
numpy::unpack_ndarray_var_tys,
|
||||
TopLevelContext, TopLevelDef,
|
||||
},
|
||||
typecheck::{
|
||||
type_inferencer::{CodeLocation, PrimitiveStore},
|
||||
@ -44,8 +44,8 @@ use crate::{
|
||||
use concrete_type::{ConcreteType, ConcreteTypeEnum, ConcreteTypeStore};
|
||||
pub use generator::{CodeGenerator, DefaultCodeGenerator};
|
||||
use types::{
|
||||
ExceptionType, ListType, OptionType, ProxyType, RangeType, StringType, TupleType,
|
||||
ndarray::NDArrayType,
|
||||
ndarray::NDArrayType, ExceptionType, ListType, OptionType, ProxyType, RangeType, StringType,
|
||||
TupleType,
|
||||
};
|
||||
|
||||
pub mod builtin_fns;
|
||||
@ -832,7 +832,7 @@ pub fn gen_func_impl<
|
||||
))
|
||||
};
|
||||
|
||||
let has_sret = ret_type.is_some_and(|ty| need_sret(ty));
|
||||
let has_sret = ret_type.map_or(false, |ty| need_sret(ty));
|
||||
let mut params = args
|
||||
.iter()
|
||||
.filter(|arg| !arg.is_vararg)
|
||||
@ -1028,7 +1028,8 @@ pub fn gen_func_impl<
|
||||
);
|
||||
let generator_llvm_usize = generator.get_size_type(context);
|
||||
assert_eq!(
|
||||
generator_llvm_usize, target_llvm_usize,
|
||||
generator_llvm_usize,
|
||||
target_llvm_usize,
|
||||
"CodeGenerator (size_t = {generator_llvm_usize}) is not compatible with CodeGen Target (size_t = {target_llvm_usize})",
|
||||
);
|
||||
|
||||
|
@ -1,23 +1,23 @@
|
||||
use inkwell::{
|
||||
IntPredicate,
|
||||
values::{BasicValue, BasicValueEnum, PointerValue},
|
||||
IntPredicate,
|
||||
};
|
||||
|
||||
use nac3parser::ast::StrRef;
|
||||
|
||||
use super::{
|
||||
CodeGenContext, CodeGenerator,
|
||||
macros::codegen_unreachable,
|
||||
stmt::gen_for_callback,
|
||||
types::ndarray::{NDArrayType, NDIterType},
|
||||
values::{ProxyValue, ndarray::shape::parse_numpy_int_sequence},
|
||||
values::{ndarray::shape::parse_numpy_int_sequence, ProxyValue},
|
||||
CodeGenContext, CodeGenerator,
|
||||
};
|
||||
use crate::{
|
||||
symbol_resolver::ValueEnum,
|
||||
toplevel::{
|
||||
DefinitionId,
|
||||
helper::{arraylike_flatten_element_type, extract_ndims},
|
||||
numpy::unpack_ndarray_var_tys,
|
||||
DefinitionId,
|
||||
},
|
||||
typecheck::typedef::{FunSignature, Type},
|
||||
};
|
||||
|
@ -1,35 +1,35 @@
|
||||
use inkwell::{
|
||||
IntPredicate,
|
||||
attributes::{Attribute, AttributeLoc},
|
||||
basic_block::BasicBlock,
|
||||
builder::Builder,
|
||||
types::{BasicType, BasicTypeEnum},
|
||||
values::{BasicValue, BasicValueEnum, FunctionValue, IntValue, PointerValue},
|
||||
IntPredicate,
|
||||
};
|
||||
use itertools::{Itertools, izip};
|
||||
use itertools::{izip, Itertools};
|
||||
|
||||
use nac3parser::ast::{
|
||||
Constant, ExcepthandlerKind, Expr, ExprKind, Location, Stmt, StmtKind, StrRef,
|
||||
};
|
||||
|
||||
use super::{
|
||||
CodeGenContext, CodeGenerator,
|
||||
expr::{destructure_range, gen_binop_expr},
|
||||
gen_in_range_check,
|
||||
irrt::{handle_slice_indices, list_slice_assignment},
|
||||
macros::codegen_unreachable,
|
||||
types::{ExceptionType, RangeType, ndarray::NDArrayType},
|
||||
types::{ndarray::NDArrayType, ExceptionType, RangeType},
|
||||
values::{
|
||||
ArrayLikeIndexer, ArraySliceValue, ExceptionValue, ListValue, ProxyValue,
|
||||
ndarray::{RustNDIndex, ScalarOrNDArray},
|
||||
ArrayLikeIndexer, ArraySliceValue, ExceptionValue, ListValue, ProxyValue,
|
||||
},
|
||||
CodeGenContext, CodeGenerator,
|
||||
};
|
||||
use crate::{
|
||||
symbol_resolver::ValueEnum,
|
||||
toplevel::{DefinitionId, TopLevelDef},
|
||||
typecheck::{
|
||||
magic_methods::Binop,
|
||||
typedef::{FunSignature, Type, TypeEnum, iter_type_vars},
|
||||
typedef::{iter_type_vars, FunSignature, Type, TypeEnum},
|
||||
},
|
||||
};
|
||||
|
||||
@ -133,7 +133,7 @@ pub fn gen_store_target<'ctx, G: CodeGenerator>(
|
||||
ptr,
|
||||
&[
|
||||
ctx.ctx.i32_type().const_zero(),
|
||||
ctx.ctx.i32_type().const_int(index.unwrap() as u64, false),
|
||||
ctx.ctx.i32_type().const_int(index as u64, false),
|
||||
],
|
||||
name.unwrap_or(""),
|
||||
)
|
||||
@ -234,7 +234,7 @@ pub fn gen_assign_target_list<'ctx, G: CodeGenerator>(
|
||||
|
||||
let a = starred_target_index; // Number of RHS values before the starred target
|
||||
let b = tuple_tys.len() - (targets.len() - 1 - starred_target_index); // Number of RHS values after the starred target
|
||||
// Thus `tuple[a..b]` is assigned to the starred target.
|
||||
// Thus `tuple[a..b]` is assigned to the starred target.
|
||||
|
||||
// Handle assignment before the starred target
|
||||
for (target, val, val_ty) in
|
||||
@ -1468,10 +1468,10 @@ pub fn gen_try<'ctx, 'a, G: CodeGenerator>(
|
||||
ctx.outer_catch_clauses = old_clauses;
|
||||
ctx.unwind_target = old_unwind;
|
||||
ctx.return_target = old_return;
|
||||
ctx.loop_target = old_loop_target.or(ctx.loop_target);
|
||||
ctx.loop_target = old_loop_target.or(ctx.loop_target).take();
|
||||
|
||||
let old_unwind = if finalbody.is_empty() {
|
||||
old_unwind
|
||||
None
|
||||
} else {
|
||||
let final_landingpad = ctx.ctx.append_basic_block(current_fun, "try.catch.final");
|
||||
ctx.builder.position_at_end(final_landingpad);
|
||||
@ -1592,7 +1592,7 @@ pub fn gen_try<'ctx, 'a, G: CodeGenerator>(
|
||||
}
|
||||
|
||||
ctx.unwind_target = old_unwind;
|
||||
ctx.loop_target = old_loop_target.or(ctx.loop_target);
|
||||
ctx.loop_target = old_loop_target.or(ctx.loop_target).take();
|
||||
ctx.return_target = old_return;
|
||||
|
||||
ctx.builder.position_at_end(landingpad);
|
||||
@ -1828,7 +1828,7 @@ pub fn gen_with<'ctx, 'a, G: CodeGenerator>(
|
||||
// reset old_unwind
|
||||
ctx.unwind_target = old_unwind;
|
||||
ctx.return_target = old_return;
|
||||
ctx.loop_target = old_loop_target.or(ctx.loop_target);
|
||||
ctx.loop_target = old_loop_target.or(ctx.loop_target).take();
|
||||
|
||||
let final_landingpad = ctx.ctx.append_basic_block(current_fun, "with.catch.final");
|
||||
ctx.builder.position_at_end(final_landingpad);
|
||||
@ -1882,7 +1882,7 @@ pub fn gen_with<'ctx, 'a, G: CodeGenerator>(
|
||||
let old_return = Some(return_target);
|
||||
|
||||
ctx.unwind_target = old_unwind;
|
||||
ctx.loop_target = old_loop_target.or(ctx.loop_target);
|
||||
ctx.loop_target = old_loop_target.or(ctx.loop_target).take();
|
||||
ctx.return_target = old_return;
|
||||
|
||||
ctx.builder.position_at_end(landingpad);
|
||||
|
@ -7,26 +7,26 @@ use function_name::named;
|
||||
use indexmap::IndexMap;
|
||||
use indoc::indoc;
|
||||
use inkwell::{
|
||||
OptimizationLevel,
|
||||
targets::{InitializationConfig, Target},
|
||||
OptimizationLevel,
|
||||
};
|
||||
use nac3parser::{
|
||||
ast::{FileName, StrRef, fold::Fold},
|
||||
ast::{fold::Fold, FileName, StrRef},
|
||||
parser::parse_program,
|
||||
};
|
||||
use parking_lot::RwLock;
|
||||
|
||||
use super::{
|
||||
concrete_type::ConcreteTypeStore,
|
||||
types::{ndarray::NDArrayType, ListType, ProxyType, RangeType},
|
||||
CodeGenContext, CodeGenLLVMOptions, CodeGenTargetMachineOptions, CodeGenTask, CodeGenerator,
|
||||
DefaultCodeGenerator, WithCall, WorkerRegistry,
|
||||
concrete_type::ConcreteTypeStore,
|
||||
types::{ListType, ProxyType, RangeType, ndarray::NDArrayType},
|
||||
};
|
||||
use crate::{
|
||||
symbol_resolver::{SymbolResolver, ValueEnum},
|
||||
toplevel::{
|
||||
DefinitionId, FunInstance, TopLevelContext, TopLevelDef,
|
||||
composer::{ComposerConfig, TopLevelComposer},
|
||||
DefinitionId, FunInstance, TopLevelContext, TopLevelDef,
|
||||
},
|
||||
typecheck::{
|
||||
type_inferencer::{FunctionData, IdentifierInfo, Inferencer, PrimitiveStore},
|
||||
|
@ -1,19 +1,19 @@
|
||||
use inkwell::{
|
||||
AddressSpace,
|
||||
context::{AsContextRef, Context},
|
||||
types::{AnyTypeEnum, BasicType, BasicTypeEnum, IntType, PointerType, StructType},
|
||||
values::{IntValue, PointerValue, StructValue},
|
||||
AddressSpace,
|
||||
};
|
||||
use itertools::Itertools;
|
||||
|
||||
use nac3core_derive::StructFields;
|
||||
|
||||
use super::{
|
||||
structure::{check_struct_type_matches_fields, StructField, StructFields, StructProxyType},
|
||||
ProxyType,
|
||||
structure::{StructField, StructFields, StructProxyType, check_struct_type_matches_fields},
|
||||
};
|
||||
use crate::{
|
||||
codegen::{CodeGenContext, CodeGenerator, values::ExceptionValue},
|
||||
codegen::{values::ExceptionValue, CodeGenContext, CodeGenerator},
|
||||
typecheck::typedef::{Type, TypeEnum},
|
||||
};
|
||||
|
||||
|
@ -1,8 +1,8 @@
|
||||
use inkwell::{
|
||||
AddressSpace, IntPredicate, OptimizationLevel,
|
||||
context::Context,
|
||||
types::{AnyTypeEnum, BasicType, BasicTypeEnum, IntType, PointerType, StructType},
|
||||
values::{IntValue, PointerValue, StructValue},
|
||||
AddressSpace, IntPredicate, OptimizationLevel,
|
||||
};
|
||||
use itertools::Itertools;
|
||||
|
||||
@ -11,14 +11,14 @@ use nac3core_derive::StructFields;
|
||||
use super::ProxyType;
|
||||
use crate::{
|
||||
codegen::{
|
||||
CodeGenContext, CodeGenerator,
|
||||
types::structure::{
|
||||
FieldIndexCounter, StructField, StructFields, StructProxyType,
|
||||
check_struct_type_matches_fields,
|
||||
check_struct_type_matches_fields, FieldIndexCounter, StructField, StructFields,
|
||||
StructProxyType,
|
||||
},
|
||||
values::ListValue,
|
||||
CodeGenContext, CodeGenerator,
|
||||
},
|
||||
typecheck::typedef::{Type, TypeEnum, iter_type_vars},
|
||||
typecheck::typedef::{iter_type_vars, Type, TypeEnum},
|
||||
};
|
||||
|
||||
/// Proxy type for a `list` type in LLVM.
|
||||
|
@ -1,18 +1,19 @@
|
||||
use inkwell::{
|
||||
AddressSpace,
|
||||
types::BasicTypeEnum,
|
||||
values::{BasicValueEnum, IntValue},
|
||||
AddressSpace,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
codegen::{
|
||||
CodeGenContext, CodeGenerator, irrt,
|
||||
irrt,
|
||||
stmt::gen_if_else_expr_callback,
|
||||
types::{ListType, ProxyType, ndarray::NDArrayType},
|
||||
types::{ndarray::NDArrayType, ListType, ProxyType},
|
||||
values::{
|
||||
ArrayLikeValue, ArraySliceValue, ListValue, ProxyValue, TypedArrayLikeAdapter,
|
||||
TypedArrayLikeMutator, ndarray::NDArrayValue,
|
||||
ndarray::NDArrayValue, ArrayLikeValue, ArraySliceValue, ListValue, ProxyValue,
|
||||
TypedArrayLikeAdapter, TypedArrayLikeMutator,
|
||||
},
|
||||
CodeGenContext, CodeGenerator,
|
||||
},
|
||||
toplevel::helper::{arraylike_flatten_element_type, arraylike_get_ndims},
|
||||
typecheck::typedef::{Type, TypeEnum},
|
||||
|
@ -1,20 +1,20 @@
|
||||
use inkwell::{
|
||||
AddressSpace,
|
||||
context::{AsContextRef, Context},
|
||||
types::{AnyTypeEnum, BasicType, BasicTypeEnum, IntType, PointerType, StructType},
|
||||
values::{IntValue, PointerValue, StructValue},
|
||||
AddressSpace,
|
||||
};
|
||||
use itertools::Itertools;
|
||||
|
||||
use nac3core_derive::StructFields;
|
||||
|
||||
use crate::codegen::{
|
||||
CodeGenContext, CodeGenerator,
|
||||
types::{
|
||||
structure::{check_struct_type_matches_fields, StructField, StructFields, StructProxyType},
|
||||
ProxyType,
|
||||
structure::{StructField, StructFields, StructProxyType, check_struct_type_matches_fields},
|
||||
},
|
||||
values::ndarray::ShapeEntryValue,
|
||||
CodeGenContext, CodeGenerator,
|
||||
};
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
|
||||
|
@ -1,8 +1,8 @@
|
||||
use inkwell::{
|
||||
AddressSpace,
|
||||
context::Context,
|
||||
types::{AnyTypeEnum, BasicType, BasicTypeEnum, IntType, PointerType, StructType},
|
||||
values::{IntValue, PointerValue, StructValue},
|
||||
AddressSpace,
|
||||
};
|
||||
use itertools::Itertools;
|
||||
|
||||
@ -10,15 +10,15 @@ use nac3core_derive::StructFields;
|
||||
|
||||
use crate::{
|
||||
codegen::{
|
||||
CodeGenContext, CodeGenerator,
|
||||
types::{
|
||||
ProxyType,
|
||||
structure::{
|
||||
FieldIndexCounter, StructField, StructFields, StructProxyType,
|
||||
check_struct_type_matches_fields,
|
||||
check_struct_type_matches_fields, FieldIndexCounter, StructField, StructFields,
|
||||
StructProxyType,
|
||||
},
|
||||
ProxyType,
|
||||
},
|
||||
values::ndarray::ContiguousNDArrayValue,
|
||||
CodeGenContext, CodeGenerator,
|
||||
},
|
||||
toplevel::numpy::unpack_ndarray_var_tys,
|
||||
typecheck::typedef::Type,
|
||||
|
@ -1,12 +1,12 @@
|
||||
use inkwell::{
|
||||
IntPredicate,
|
||||
values::{BasicValueEnum, IntValue},
|
||||
IntPredicate,
|
||||
};
|
||||
|
||||
use super::NDArrayType;
|
||||
use crate::{
|
||||
codegen::{
|
||||
CodeGenContext, CodeGenerator, irrt, types::ProxyType, values::TypedArrayLikeAccessor,
|
||||
irrt, types::ProxyType, values::TypedArrayLikeAccessor, CodeGenContext, CodeGenerator,
|
||||
},
|
||||
typecheck::typedef::Type,
|
||||
};
|
||||
|
@ -1,23 +1,23 @@
|
||||
use inkwell::{
|
||||
AddressSpace,
|
||||
context::{AsContextRef, Context},
|
||||
types::{AnyTypeEnum, BasicType, BasicTypeEnum, IntType, PointerType, StructType},
|
||||
values::{IntValue, PointerValue, StructValue},
|
||||
AddressSpace,
|
||||
};
|
||||
use itertools::Itertools;
|
||||
|
||||
use nac3core_derive::StructFields;
|
||||
|
||||
use crate::codegen::{
|
||||
CodeGenContext, CodeGenerator,
|
||||
types::{
|
||||
structure::{check_struct_type_matches_fields, StructField, StructFields, StructProxyType},
|
||||
ProxyType,
|
||||
structure::{StructField, StructFields, StructProxyType, check_struct_type_matches_fields},
|
||||
},
|
||||
values::{
|
||||
ArrayLikeIndexer, ArraySliceValue,
|
||||
ndarray::{NDIndexValue, RustNDIndex},
|
||||
ArrayLikeIndexer, ArraySliceValue,
|
||||
},
|
||||
CodeGenContext, CodeGenerator,
|
||||
};
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
|
||||
|
@ -2,16 +2,16 @@ use inkwell::{types::BasicTypeEnum, values::BasicValueEnum};
|
||||
use itertools::Itertools;
|
||||
|
||||
use crate::codegen::{
|
||||
CodeGenContext, CodeGenerator,
|
||||
stmt::gen_for_callback,
|
||||
types::{
|
||||
ProxyType,
|
||||
ndarray::{NDArrayType, NDIterType},
|
||||
ProxyType,
|
||||
},
|
||||
values::{
|
||||
ArrayLikeValue, ProxyValue,
|
||||
ndarray::{NDArrayOut, NDArrayValue, ScalarOrNDArray},
|
||||
ArrayLikeValue, ProxyValue,
|
||||
},
|
||||
CodeGenContext, CodeGenerator,
|
||||
};
|
||||
|
||||
impl<'ctx> NDArrayType<'ctx> {
|
||||
|
@ -1,20 +1,20 @@
|
||||
use inkwell::{
|
||||
AddressSpace,
|
||||
context::{AsContextRef, Context},
|
||||
types::{AnyTypeEnum, BasicType, BasicTypeEnum, IntType, PointerType, StructType},
|
||||
values::{BasicValue, IntValue, PointerValue, StructValue},
|
||||
AddressSpace,
|
||||
};
|
||||
use itertools::Itertools;
|
||||
|
||||
use nac3core_derive::StructFields;
|
||||
|
||||
use super::{
|
||||
structure::{check_struct_type_matches_fields, StructField, StructFields, StructProxyType},
|
||||
ProxyType,
|
||||
structure::{StructField, StructFields, StructProxyType, check_struct_type_matches_fields},
|
||||
};
|
||||
use crate::{
|
||||
codegen::{
|
||||
values::{TypedArrayLikeMutator, ndarray::NDArrayValue},
|
||||
values::{ndarray::NDArrayValue, TypedArrayLikeMutator},
|
||||
{CodeGenContext, CodeGenerator},
|
||||
},
|
||||
toplevel::{helper::extract_ndims, numpy::unpack_ndarray_var_tys},
|
||||
|
@ -1,8 +1,8 @@
|
||||
use inkwell::{
|
||||
AddressSpace,
|
||||
context::{AsContextRef, Context},
|
||||
types::{AnyTypeEnum, BasicType, BasicTypeEnum, IntType, PointerType, StructType},
|
||||
values::{IntValue, PointerValue, StructValue},
|
||||
AddressSpace,
|
||||
};
|
||||
use itertools::Itertools;
|
||||
|
||||
@ -10,14 +10,15 @@ use nac3core_derive::StructFields;
|
||||
|
||||
use super::ProxyType;
|
||||
use crate::codegen::{
|
||||
CodeGenContext, CodeGenerator, irrt,
|
||||
irrt,
|
||||
types::structure::{
|
||||
StructField, StructFields, StructProxyType, check_struct_type_matches_fields,
|
||||
check_struct_type_matches_fields, StructField, StructFields, StructProxyType,
|
||||
},
|
||||
values::{
|
||||
ArrayLikeValue, ArraySliceValue, ProxyValue, TypedArrayLikeAdapter,
|
||||
ndarray::{NDArrayValue, NDIterValue},
|
||||
ArrayLikeValue, ArraySliceValue, ProxyValue, TypedArrayLikeAdapter,
|
||||
},
|
||||
CodeGenContext, CodeGenerator,
|
||||
};
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
|
||||
|
@ -1,14 +1,14 @@
|
||||
use inkwell::{
|
||||
AddressSpace,
|
||||
context::Context,
|
||||
types::{BasicType, BasicTypeEnum, IntType, PointerType},
|
||||
values::{BasicValue, BasicValueEnum, PointerValue},
|
||||
AddressSpace,
|
||||
};
|
||||
|
||||
use super::ProxyType;
|
||||
use crate::{
|
||||
codegen::{CodeGenContext, CodeGenerator, values::OptionValue},
|
||||
typecheck::typedef::{Type, TypeEnum, iter_type_vars},
|
||||
codegen::{values::OptionValue, CodeGenContext, CodeGenerator},
|
||||
typecheck::typedef::{iter_type_vars, Type, TypeEnum},
|
||||
};
|
||||
|
||||
/// Proxy type for an `Option` type in LLVM.
|
||||
|
@ -1,8 +1,8 @@
|
||||
use inkwell::{
|
||||
AddressSpace,
|
||||
context::Context,
|
||||
types::{AnyTypeEnum, ArrayType, BasicType, BasicTypeEnum, IntType, PointerType},
|
||||
values::{ArrayValue, PointerValue},
|
||||
AddressSpace,
|
||||
};
|
||||
|
||||
use super::ProxyType;
|
||||
|
@ -1,18 +1,18 @@
|
||||
use inkwell::{
|
||||
AddressSpace,
|
||||
context::Context,
|
||||
types::{BasicType, BasicTypeEnum, IntType, PointerType, StructType},
|
||||
values::{GlobalValue, IntValue, PointerValue, StructValue},
|
||||
AddressSpace,
|
||||
};
|
||||
use itertools::Itertools;
|
||||
|
||||
use nac3core_derive::StructFields;
|
||||
|
||||
use super::{
|
||||
structure::{check_struct_type_matches_fields, StructField, StructFields},
|
||||
ProxyType,
|
||||
structure::{StructField, StructFields, check_struct_type_matches_fields},
|
||||
};
|
||||
use crate::codegen::{CodeGenContext, CodeGenerator, values::StringValue};
|
||||
use crate::codegen::{values::StringValue, CodeGenContext, CodeGenerator};
|
||||
|
||||
/// Proxy type for a `str` type in LLVM.
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
|
||||
|
@ -1,10 +1,10 @@
|
||||
use std::marker::PhantomData;
|
||||
|
||||
use inkwell::{
|
||||
AddressSpace,
|
||||
context::AsContextRef,
|
||||
types::{BasicTypeEnum, IntType, PointerType, StructType},
|
||||
values::{AggregateValueEnum, BasicValue, BasicValueEnum, IntValue, PointerValue, StructValue},
|
||||
AddressSpace,
|
||||
};
|
||||
use itertools::Itertools;
|
||||
|
||||
|
@ -7,7 +7,7 @@ use itertools::Itertools;
|
||||
|
||||
use super::ProxyType;
|
||||
use crate::{
|
||||
codegen::{CodeGenContext, CodeGenerator, values::TupleValue},
|
||||
codegen::{values::TupleValue, CodeGenContext, CodeGenerator},
|
||||
typecheck::typedef::{Type, TypeEnum},
|
||||
};
|
||||
|
||||
@ -110,7 +110,7 @@ impl<'ctx> TupleType<'ctx> {
|
||||
/// The caller must ensure that the index is valid.
|
||||
#[must_use]
|
||||
pub unsafe fn type_at_index_unchecked(&self, index: u32) -> BasicTypeEnum<'ctx> {
|
||||
unsafe { self.ty.get_field_type_at_index_unchecked(index) }
|
||||
self.ty.get_field_type_at_index_unchecked(index)
|
||||
}
|
||||
|
||||
/// Constructs a [`TupleValue`] from this type by zero-initializing the tuple value.
|
||||
@ -131,11 +131,10 @@ impl<'ctx> TupleType<'ctx> {
|
||||
let values = objects.into_iter().collect_vec();
|
||||
|
||||
assert_eq!(values.len(), self.num_elements() as usize);
|
||||
assert!(
|
||||
values.iter().enumerate().all(|(i, v)| {
|
||||
v.get_type() == unsafe { self.type_at_index_unchecked(i as u32) }
|
||||
})
|
||||
);
|
||||
assert!(values
|
||||
.iter()
|
||||
.enumerate()
|
||||
.all(|(i, v)| { v.get_type() == unsafe { self.type_at_index_unchecked(i as u32) } }));
|
||||
|
||||
let mut value = self.construct(name);
|
||||
for (i, val) in values.into_iter().enumerate() {
|
||||
|
@ -1,23 +1,23 @@
|
||||
use inkwell::{
|
||||
AddressSpace,
|
||||
context::{AsContextRef, Context, ContextRef},
|
||||
types::{AnyTypeEnum, BasicType, BasicTypeEnum, IntType, PointerType, StructType},
|
||||
values::{IntValue, PointerValue, StructValue},
|
||||
AddressSpace,
|
||||
};
|
||||
use itertools::Itertools;
|
||||
|
||||
use nac3core_derive::StructFields;
|
||||
|
||||
use crate::codegen::{
|
||||
CodeGenContext, CodeGenerator,
|
||||
types::{
|
||||
ProxyType,
|
||||
structure::{
|
||||
FieldIndexCounter, StructField, StructFields, StructProxyType,
|
||||
check_struct_type_matches_fields,
|
||||
check_struct_type_matches_fields, FieldIndexCounter, StructField, StructFields,
|
||||
StructProxyType,
|
||||
},
|
||||
ProxyType,
|
||||
},
|
||||
values::utils::SliceValue,
|
||||
CodeGenContext, CodeGenerator,
|
||||
};
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
|
||||
|
@ -1,7 +1,7 @@
|
||||
use inkwell::{
|
||||
IntPredicate,
|
||||
types::AnyTypeEnum,
|
||||
values::{BasicValueEnum, IntValue, PointerValue},
|
||||
IntPredicate,
|
||||
};
|
||||
|
||||
use crate::codegen::{CodeGenContext, CodeGenerator};
|
||||
|
@ -6,13 +6,13 @@ use itertools::Itertools;
|
||||
|
||||
use nac3parser::ast::Location;
|
||||
|
||||
use super::{ProxyValue, StringValue, structure::StructProxyValue};
|
||||
use super::{structure::StructProxyValue, ProxyValue, StringValue};
|
||||
use crate::codegen::{
|
||||
CodeGenContext, CodeGenerator,
|
||||
types::{
|
||||
ExceptionType,
|
||||
structure::{StructField, StructProxyType},
|
||||
ExceptionType,
|
||||
},
|
||||
CodeGenContext, CodeGenerator,
|
||||
};
|
||||
|
||||
/// Proxy type for accessing an `Exception` value in LLVM.
|
||||
|
@ -1,17 +1,17 @@
|
||||
use inkwell::{
|
||||
AddressSpace, IntPredicate,
|
||||
types::{AnyTypeEnum, BasicType, BasicTypeEnum, IntType},
|
||||
values::{BasicValueEnum, IntValue, PointerValue, StructValue},
|
||||
AddressSpace, IntPredicate,
|
||||
};
|
||||
|
||||
use super::{
|
||||
ArrayLikeIndexer, ArrayLikeValue, ProxyValue, UntypedArrayLikeAccessor,
|
||||
UntypedArrayLikeMutator, structure::StructProxyValue,
|
||||
structure::StructProxyValue, ArrayLikeIndexer, ArrayLikeValue, ProxyValue,
|
||||
UntypedArrayLikeAccessor, UntypedArrayLikeMutator,
|
||||
};
|
||||
use crate::codegen::{
|
||||
types::{
|
||||
ListType, ProxyType,
|
||||
structure::{StructField, StructProxyType},
|
||||
ListType, ProxyType,
|
||||
},
|
||||
{CodeGenContext, CodeGenerator},
|
||||
};
|
||||
|
@ -1,6 +1,6 @@
|
||||
use inkwell::{types::IntType, values::BasicValue};
|
||||
|
||||
use super::{CodeGenContext, types::ProxyType};
|
||||
use super::{types::ProxyType, CodeGenContext};
|
||||
pub use array::*;
|
||||
pub use exception::*;
|
||||
pub use list::*;
|
||||
|
@ -5,17 +5,18 @@ use inkwell::{
|
||||
use itertools::Itertools;
|
||||
|
||||
use crate::codegen::{
|
||||
CodeGenContext, CodeGenerator, irrt,
|
||||
irrt,
|
||||
types::{
|
||||
ProxyType,
|
||||
ndarray::{NDArrayType, ShapeEntryType},
|
||||
structure::{StructField, StructProxyType},
|
||||
ProxyType,
|
||||
},
|
||||
values::{
|
||||
ArrayLikeIndexer, ArrayLikeValue, ArraySliceValue, ProxyValue, TypedArrayLikeAccessor,
|
||||
TypedArrayLikeAdapter, TypedArrayLikeMutator, ndarray::NDArrayValue,
|
||||
structure::StructProxyValue,
|
||||
ndarray::NDArrayValue, structure::StructProxyValue, ArrayLikeIndexer, ArrayLikeValue,
|
||||
ArraySliceValue, ProxyValue, TypedArrayLikeAccessor, TypedArrayLikeAdapter,
|
||||
TypedArrayLikeMutator,
|
||||
},
|
||||
CodeGenContext, CodeGenerator,
|
||||
};
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
@ -167,11 +168,9 @@ fn broadcast_shapes<'ctx, G, Shape>(
|
||||
let llvm_usize = ctx.get_size_type();
|
||||
let llvm_shape_ty = ShapeEntryType::new(ctx);
|
||||
|
||||
assert!(
|
||||
in_shape_entries
|
||||
.iter()
|
||||
.all(|entry| entry.0.element_type(ctx, generator) == llvm_usize.into())
|
||||
);
|
||||
assert!(in_shape_entries
|
||||
.iter()
|
||||
.all(|entry| entry.0.element_type(ctx, generator) == llvm_usize.into()));
|
||||
assert_eq!(broadcast_shape.element_type(ctx, generator), llvm_usize.into());
|
||||
|
||||
// Prepare input shape entries to be passed to `call_nac3_ndarray_broadcast_shapes`.
|
||||
|
@ -1,18 +1,18 @@
|
||||
use inkwell::{
|
||||
AddressSpace,
|
||||
types::{BasicType, BasicTypeEnum, IntType},
|
||||
values::{IntValue, PointerValue, StructValue},
|
||||
AddressSpace,
|
||||
};
|
||||
|
||||
use super::NDArrayValue;
|
||||
use crate::codegen::{
|
||||
CodeGenContext, CodeGenerator,
|
||||
stmt::gen_if_callback,
|
||||
types::{
|
||||
ndarray::{ContiguousNDArrayType, NDArrayType},
|
||||
structure::{StructField, StructProxyType},
|
||||
},
|
||||
values::{ArrayLikeValue, ProxyValue, structure::StructProxyValue},
|
||||
values::{structure::StructProxyValue, ArrayLikeValue, ProxyValue},
|
||||
CodeGenContext, CodeGenerator,
|
||||
};
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
|
@ -2,9 +2,9 @@ use inkwell::values::{BasicValue, BasicValueEnum};
|
||||
|
||||
use super::{NDArrayValue, NDIterValue, ScalarOrNDArray};
|
||||
use crate::codegen::{
|
||||
CodeGenContext, CodeGenerator,
|
||||
stmt::{BreakContinueHooks, gen_for_callback},
|
||||
stmt::{gen_for_callback, BreakContinueHooks},
|
||||
types::ndarray::NDIterType,
|
||||
CodeGenContext, CodeGenerator,
|
||||
};
|
||||
|
||||
impl<'ctx> NDArrayValue<'ctx> {
|
||||
|
@ -1,7 +1,7 @@
|
||||
use inkwell::{
|
||||
AddressSpace,
|
||||
types::IntType,
|
||||
values::{IntValue, PointerValue, StructValue},
|
||||
AddressSpace,
|
||||
};
|
||||
use itertools::Itertools;
|
||||
|
||||
@ -9,15 +9,16 @@ use nac3parser::ast::{Expr, ExprKind};
|
||||
|
||||
use crate::{
|
||||
codegen::{
|
||||
CodeGenContext, CodeGenerator, irrt,
|
||||
irrt,
|
||||
types::{
|
||||
ndarray::{NDArrayType, NDIndexType},
|
||||
structure::{StructField, StructProxyType},
|
||||
utils::SliceType,
|
||||
},
|
||||
values::{
|
||||
ProxyValue, ndarray::NDArrayValue, structure::StructProxyValue, utils::RustSlice,
|
||||
ndarray::NDArrayValue, structure::StructProxyValue, utils::RustSlice, ProxyValue,
|
||||
},
|
||||
CodeGenContext, CodeGenerator,
|
||||
},
|
||||
typecheck::typedef::Type,
|
||||
};
|
||||
|
@ -1,11 +1,11 @@
|
||||
use inkwell::{types::BasicTypeEnum, values::BasicValueEnum};
|
||||
|
||||
use crate::codegen::{
|
||||
CodeGenContext, CodeGenerator,
|
||||
values::{
|
||||
ProxyValue,
|
||||
ndarray::{NDArrayOut, NDArrayValue, ScalarOrNDArray},
|
||||
ProxyValue,
|
||||
},
|
||||
CodeGenContext, CodeGenerator,
|
||||
};
|
||||
|
||||
impl<'ctx> NDArrayValue<'ctx> {
|
||||
|
@ -5,7 +5,6 @@ use nac3parser::ast::Operator;
|
||||
use super::{NDArrayOut, NDArrayValue, RustNDIndex};
|
||||
use crate::{
|
||||
codegen::{
|
||||
CodeGenContext, CodeGenerator,
|
||||
expr::gen_binop_expr_with_values,
|
||||
irrt,
|
||||
stmt::gen_for_callback_incrementing,
|
||||
@ -14,6 +13,7 @@ use crate::{
|
||||
ArrayLikeValue, ArraySliceValue, TypedArrayLikeAccessor, TypedArrayLikeAdapter,
|
||||
UntypedArrayLikeAccessor, UntypedArrayLikeMutator,
|
||||
},
|
||||
CodeGenContext, CodeGenerator,
|
||||
},
|
||||
toplevel::helper::arraylike_flatten_element_type,
|
||||
typecheck::{magic_methods::Binop, typedef::Type},
|
||||
|
@ -1,28 +1,29 @@
|
||||
use std::iter::repeat_n;
|
||||
|
||||
use inkwell::{
|
||||
AddressSpace, IntPredicate,
|
||||
types::{AnyType, AnyTypeEnum, BasicType, BasicTypeEnum, IntType},
|
||||
values::{BasicValue, BasicValueEnum, IntValue, PointerValue, StructValue},
|
||||
AddressSpace, IntPredicate,
|
||||
};
|
||||
use itertools::Itertools;
|
||||
|
||||
use super::{
|
||||
ArrayLikeIndexer, ArrayLikeValue, ProxyValue, TupleValue, TypedArrayLikeAccessor,
|
||||
TypedArrayLikeAdapter, TypedArrayLikeMutator, UntypedArrayLikeAccessor,
|
||||
UntypedArrayLikeMutator, structure::StructProxyValue,
|
||||
structure::StructProxyValue, ArrayLikeIndexer, ArrayLikeValue, ProxyValue, TupleValue,
|
||||
TypedArrayLikeAccessor, TypedArrayLikeAdapter, TypedArrayLikeMutator, UntypedArrayLikeAccessor,
|
||||
UntypedArrayLikeMutator,
|
||||
};
|
||||
use crate::{
|
||||
codegen::{
|
||||
CodeGenContext, CodeGenerator, irrt,
|
||||
irrt,
|
||||
llvm_intrinsics::{call_int_umin, call_memcpy_generic_array},
|
||||
stmt::gen_for_callback_incrementing,
|
||||
type_aligned_alloca,
|
||||
types::{
|
||||
TupleType,
|
||||
ndarray::NDArrayType,
|
||||
structure::{StructField, StructProxyType},
|
||||
TupleType,
|
||||
},
|
||||
CodeGenContext, CodeGenerator,
|
||||
},
|
||||
typecheck::typedef::{Type, TypeEnum},
|
||||
};
|
||||
|
@ -1,18 +1,19 @@
|
||||
use inkwell::{
|
||||
AddressSpace,
|
||||
types::{BasicType, IntType},
|
||||
values::{BasicValueEnum, IntValue, PointerValue, StructValue},
|
||||
AddressSpace,
|
||||
};
|
||||
|
||||
use super::NDArrayValue;
|
||||
use crate::codegen::{
|
||||
CodeGenContext, CodeGenerator, irrt,
|
||||
stmt::{BreakContinueHooks, gen_for_callback},
|
||||
irrt,
|
||||
stmt::{gen_for_callback, BreakContinueHooks},
|
||||
types::{
|
||||
ndarray::NDIterType,
|
||||
structure::{StructField, StructProxyType},
|
||||
},
|
||||
values::{ArraySliceValue, ProxyValue, TypedArrayLikeAdapter, structure::StructProxyValue},
|
||||
values::{structure::StructProxyValue, ArraySliceValue, ProxyValue, TypedArrayLikeAdapter},
|
||||
CodeGenContext, CodeGenerator,
|
||||
};
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
|
@ -2,13 +2,13 @@ use inkwell::values::{BasicValueEnum, IntValue};
|
||||
|
||||
use crate::{
|
||||
codegen::{
|
||||
CodeGenContext, CodeGenerator,
|
||||
stmt::gen_for_callback_incrementing,
|
||||
types::{ListType, TupleType},
|
||||
values::{
|
||||
ArraySliceValue, ProxyValue, TypedArrayLikeAccessor, TypedArrayLikeAdapter,
|
||||
TypedArrayLikeMutator, UntypedArrayLikeAccessor,
|
||||
},
|
||||
CodeGenContext, CodeGenerator,
|
||||
},
|
||||
typecheck::typedef::{Type, TypeEnum},
|
||||
};
|
||||
@ -29,7 +29,7 @@ pub fn parse_numpy_int_sequence<'ctx, G: CodeGenerator + ?Sized>(
|
||||
generator: &mut G,
|
||||
ctx: &mut CodeGenContext<'ctx, '_>,
|
||||
(input_seq_ty, input_seq): (Type, BasicValueEnum<'ctx>),
|
||||
) -> impl TypedArrayLikeAccessor<'ctx, G, IntValue<'ctx>> + use<'ctx, G> {
|
||||
) -> impl TypedArrayLikeAccessor<'ctx, G, IntValue<'ctx>> {
|
||||
let llvm_usize = ctx.get_size_type();
|
||||
let zero = llvm_usize.const_zero();
|
||||
let one = llvm_usize.const_int(1, false);
|
||||
|
@ -4,13 +4,14 @@ use inkwell::values::{IntValue, PointerValue};
|
||||
use itertools::Itertools;
|
||||
|
||||
use crate::codegen::{
|
||||
CodeGenContext, CodeGenerator, irrt,
|
||||
irrt,
|
||||
stmt::gen_if_callback,
|
||||
types::ndarray::NDArrayType,
|
||||
values::{
|
||||
ArrayLikeValue, ArraySliceValue, ProxyValue, TypedArrayLikeAccessor, TypedArrayLikeAdapter,
|
||||
ndarray::{NDArrayValue, RustNDIndex},
|
||||
ArrayLikeValue, ArraySliceValue, ProxyValue, TypedArrayLikeAccessor, TypedArrayLikeAdapter,
|
||||
},
|
||||
CodeGenContext, CodeGenerator,
|
||||
};
|
||||
|
||||
impl<'ctx> NDArrayValue<'ctx> {
|
||||
|
@ -4,7 +4,7 @@ use inkwell::{
|
||||
};
|
||||
|
||||
use super::ProxyValue;
|
||||
use crate::codegen::{CodeGenContext, types::OptionType};
|
||||
use crate::codegen::{types::OptionType, CodeGenContext};
|
||||
|
||||
/// Proxy type for accessing a `Option` value in LLVM.
|
||||
#[derive(Copy, Clone)]
|
||||
|
@ -4,7 +4,7 @@ use inkwell::{
|
||||
};
|
||||
|
||||
use super::ProxyValue;
|
||||
use crate::codegen::{CodeGenContext, CodeGenerator, types::RangeType};
|
||||
use crate::codegen::{types::RangeType, CodeGenContext, CodeGenerator};
|
||||
|
||||
/// Proxy type for accessing a `range` value in LLVM.
|
||||
#[derive(Copy, Clone)]
|
||||
|
@ -4,9 +4,9 @@ use inkwell::{
|
||||
};
|
||||
|
||||
use crate::codegen::{
|
||||
CodeGenContext,
|
||||
types::{StringType, structure::StructField},
|
||||
types::{structure::StructField, StringType},
|
||||
values::ProxyValue,
|
||||
CodeGenContext,
|
||||
};
|
||||
|
||||
/// Proxy type for accessing a `str` value in LLVM.
|
||||
|
@ -1,7 +1,7 @@
|
||||
use inkwell::values::{BasicValueEnum, PointerValue, StructValue};
|
||||
|
||||
use super::ProxyValue;
|
||||
use crate::codegen::{CodeGenContext, types::structure::StructProxyType};
|
||||
use crate::codegen::{types::structure::StructProxyType, CodeGenContext};
|
||||
|
||||
/// An LLVM value that is used to represent a corresponding structure-like value in NAC3.
|
||||
pub trait StructProxyValue<'ctx>:
|
||||
|
@ -4,7 +4,7 @@ use inkwell::{
|
||||
};
|
||||
|
||||
use super::ProxyValue;
|
||||
use crate::codegen::{CodeGenContext, types::TupleType};
|
||||
use crate::codegen::{types::TupleType, CodeGenContext};
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
pub struct TupleValue<'ctx> {
|
||||
|
@ -7,12 +7,12 @@ use nac3parser::ast::Expr;
|
||||
|
||||
use crate::{
|
||||
codegen::{
|
||||
CodeGenContext, CodeGenerator,
|
||||
types::{
|
||||
structure::{StructField, StructProxyType},
|
||||
utils::SliceType,
|
||||
},
|
||||
values::{ProxyValue, structure::StructProxyValue},
|
||||
values::{structure::StructProxyValue, ProxyValue},
|
||||
CodeGenContext, CodeGenerator,
|
||||
},
|
||||
typecheck::typedef::Type,
|
||||
};
|
||||
|
@ -6,14 +6,14 @@ use std::{
|
||||
};
|
||||
|
||||
use inkwell::values::{BasicValueEnum, FloatValue, IntValue, PointerValue, StructValue};
|
||||
use itertools::{Itertools, izip};
|
||||
use itertools::{izip, Itertools};
|
||||
use parking_lot::RwLock;
|
||||
|
||||
use nac3parser::ast::{Constant, Expr, Location, StrRef};
|
||||
|
||||
use crate::{
|
||||
codegen::{CodeGenContext, CodeGenerator},
|
||||
toplevel::{DefinitionId, TopLevelDef, type_annotation::TypeAnnotation},
|
||||
toplevel::{type_annotation::TypeAnnotation, DefinitionId, TopLevelDef},
|
||||
typecheck::{
|
||||
type_inferencer::PrimitiveStore,
|
||||
typedef::{Type, TypeEnum, Unifier, VarMap},
|
||||
|
@ -1,13 +1,13 @@
|
||||
use std::iter::once;
|
||||
|
||||
use indexmap::IndexMap;
|
||||
use inkwell::{IntPredicate, values::BasicValue};
|
||||
use inkwell::{values::BasicValue, IntPredicate};
|
||||
use strum::IntoEnumIterator;
|
||||
|
||||
use super::{
|
||||
helper::{
|
||||
PrimDef, PrimDefDetails, arraylike_flatten_element_type, debug_assert_prim_is_allowed,
|
||||
extract_ndims, make_exception_fields,
|
||||
arraylike_flatten_element_type, debug_assert_prim_is_allowed, extract_ndims,
|
||||
make_exception_fields, PrimDef, PrimDefDetails,
|
||||
},
|
||||
numpy::{make_ndarray_ty, unpack_ndarray_var_tys},
|
||||
*,
|
||||
@ -17,14 +17,14 @@ use crate::{
|
||||
builtin_fns,
|
||||
numpy::*,
|
||||
stmt::{exn_constructor, gen_if_callback},
|
||||
types::{RangeType, ndarray::NDArrayType},
|
||||
types::{ndarray::NDArrayType, RangeType},
|
||||
values::{
|
||||
ndarray::{shape::parse_numpy_int_sequence, ScalarOrNDArray},
|
||||
ProxyValue,
|
||||
ndarray::{ScalarOrNDArray, shape::parse_numpy_int_sequence},
|
||||
},
|
||||
},
|
||||
symbol_resolver::SymbolValue,
|
||||
typecheck::typedef::{TypeVar, VarMap, into_var_map, iter_type_vars},
|
||||
typecheck::typedef::{into_var_map, iter_type_vars, TypeVar, VarMap},
|
||||
};
|
||||
|
||||
type BuiltinInfo = Vec<(Arc<RwLock<TopLevelDef>>, Option<Stmt>)>;
|
||||
@ -479,9 +479,7 @@ impl<'a> BuiltinBuilder<'a> {
|
||||
assert_eq!(simple_name, &exp_simple_name.into());
|
||||
}
|
||||
_ => {
|
||||
panic!(
|
||||
"Class/function variant of the constructed TopLevelDef of PrimDef {prim:?} is different than what is defined by {prim:?}"
|
||||
)
|
||||
panic!("Class/function variant of the constructed TopLevelDef of PrimDef {prim:?} is different than what is defined by {prim:?}")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
use std::rc::Rc;
|
||||
|
||||
use indexmap::IndexMap;
|
||||
use nac3parser::ast::{ExprKind, Ident, fold::Fold};
|
||||
use nac3parser::ast::{fold::Fold, ExprKind, Ident};
|
||||
|
||||
use super::*;
|
||||
use crate::{
|
||||
@ -265,7 +265,8 @@ impl TopLevelComposer {
|
||||
if self.keyword_list.contains(class_name) {
|
||||
return Err(format!(
|
||||
"cannot use keyword `{}` as a class name (at {})",
|
||||
class_name, ast.location
|
||||
class_name,
|
||||
ast.location
|
||||
));
|
||||
}
|
||||
let fully_qualified_class_name = if mod_path.is_empty() {
|
||||
@ -276,7 +277,8 @@ impl TopLevelComposer {
|
||||
if !defined_names.insert(fully_qualified_class_name.into()) {
|
||||
return Err(format!(
|
||||
"duplicate definition of class `{}` (at {})",
|
||||
class_name, ast.location
|
||||
class_name,
|
||||
ast.location
|
||||
));
|
||||
}
|
||||
|
||||
@ -292,7 +294,7 @@ impl TopLevelComposer {
|
||||
resolver.clone(),
|
||||
fully_qualified_class_name,
|
||||
Some(constructor_ty),
|
||||
Some(ast.location),
|
||||
Some(ast.location)
|
||||
))),
|
||||
None,
|
||||
);
|
||||
@ -319,7 +321,8 @@ impl TopLevelComposer {
|
||||
if self.keyword_list.contains(method_name) {
|
||||
return Err(format!(
|
||||
"cannot use keyword `{}` as a method name (at {})",
|
||||
method_name, b.location
|
||||
method_name,
|
||||
b.location
|
||||
));
|
||||
}
|
||||
let global_class_method_name = Self::make_class_method_name(
|
||||
@ -329,7 +332,8 @@ impl TopLevelComposer {
|
||||
if !defined_names.insert(global_class_method_name.clone()) {
|
||||
return Err(format!(
|
||||
"class method `{}` defined twice (at {})",
|
||||
global_class_method_name, b.location
|
||||
global_class_method_name,
|
||||
b.location
|
||||
));
|
||||
}
|
||||
let method_def_id = self.definition_ast_list.len() + {
|
||||
@ -376,11 +380,7 @@ impl TopLevelComposer {
|
||||
self.definition_ast_list.push((def, Some(ast)));
|
||||
}
|
||||
|
||||
let result_ty = if allow_no_constructor || contains_constructor {
|
||||
Some(constructor_ty)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let result_ty = if allow_no_constructor || contains_constructor { Some(constructor_ty) } else { None };
|
||||
Ok((class_name, DefinitionId(class_def_id), result_ty))
|
||||
}
|
||||
|
||||
@ -393,7 +393,8 @@ impl TopLevelComposer {
|
||||
if !defined_names.insert(global_fun_name.clone()) {
|
||||
return Err(format!(
|
||||
"top level function `{}` defined twice (at {})",
|
||||
global_fun_name, ast.location
|
||||
global_fun_name,
|
||||
ast.location
|
||||
));
|
||||
}
|
||||
|
||||
@ -407,7 +408,7 @@ impl TopLevelComposer {
|
||||
// dummy here, unify with correct type later
|
||||
ty_to_be_unified,
|
||||
resolver,
|
||||
Some(ast.location),
|
||||
Some(ast.location)
|
||||
))
|
||||
.into(),
|
||||
Some(ast),
|
||||
@ -431,10 +432,7 @@ impl TopLevelComposer {
|
||||
// Make callers use `register_top_level_var` instead, as it provides more
|
||||
// fine-grained control over which symbols to register, while also simplifying the
|
||||
// usage of this function.
|
||||
panic!(
|
||||
"Registration of top-level Assign statements must use TopLevelComposer::register_top_level_var (at {})",
|
||||
ast.location
|
||||
);
|
||||
panic!("Registration of top-level Assign statements must use TopLevelComposer::register_top_level_var (at {})", ast.location);
|
||||
}
|
||||
|
||||
ast::StmtKind::AnnAssign { target, annotation, .. } => {
|
||||
@ -463,9 +461,9 @@ impl TopLevelComposer {
|
||||
|
||||
/// Registers a top-level variable with the given `name` into the composer.
|
||||
///
|
||||
/// - `annotation` - The type annotation of the top-level variable, or [`None`] if no type
|
||||
/// annotation is provided.
|
||||
/// - `location` - The location of the top-level variable.
|
||||
/// `annotation` - The type annotation of the top-level variable, or [`None`] if no type
|
||||
/// annotation is provided.
|
||||
/// `location` - The location of the top-level variable.
|
||||
pub fn register_top_level_var(
|
||||
&mut self,
|
||||
name: Ident,
|
||||
@ -1236,7 +1234,7 @@ impl TopLevelComposer {
|
||||
ExprKind::Subscript { value, slice, .. }
|
||||
if matches!(
|
||||
&value.node,
|
||||
ast::ExprKind::Name { id, .. } if core_config.kernel_ann.is_some_and(|c| id == &c.into())
|
||||
ast::ExprKind::Name { id, .. } if core_config.kernel_ann.map_or(false, |c| id == &c.into())
|
||||
) =>
|
||||
{
|
||||
(slice, true)
|
||||
@ -1407,14 +1405,14 @@ impl TopLevelComposer {
|
||||
);
|
||||
if !ok {
|
||||
return Err(HashSet::from([format!(
|
||||
"method {class_method_name} has same name as ancestors' method, but incompatible type"
|
||||
)]));
|
||||
"method {class_method_name} has same name as ancestors' method, but incompatible type"),
|
||||
]));
|
||||
}
|
||||
}
|
||||
}
|
||||
class_methods_def.clear();
|
||||
class_methods_def
|
||||
.extend(new_child_methods.iter().map(|f| (*f.0, f.1.0, f.1.1)).collect_vec());
|
||||
.extend(new_child_methods.iter().map(|f| (*f.0, f.1 .0, f.1 .1)).collect_vec());
|
||||
|
||||
// handle class fields
|
||||
let mut new_child_fields: IndexMap<StrRef, (Type, bool)> =
|
||||
@ -1443,10 +1441,10 @@ impl TopLevelComposer {
|
||||
|
||||
class_fields_def.clear();
|
||||
class_fields_def
|
||||
.extend(new_child_fields.iter().map(|f| (*f.0, f.1.0, f.1.1)).collect_vec());
|
||||
.extend(new_child_fields.iter().map(|f| (*f.0, f.1 .0, f.1 .1)).collect_vec());
|
||||
class_attribute_def.clear();
|
||||
class_attribute_def.extend(
|
||||
new_child_attributes.iter().map(|f| (*f.0, f.1.0, f.1.1.clone())).collect_vec(),
|
||||
new_child_attributes.iter().map(|f| (*f.0, f.1 .0, f.1 .1.clone())).collect_vec(),
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
@ -1623,10 +1621,14 @@ impl TopLevelComposer {
|
||||
)?;
|
||||
for (f, _, _) in fields {
|
||||
if !all_inited.contains(f) {
|
||||
return Err(HashSet::from([format!(
|
||||
"fields `{}` of class `{}` not fully initialized in the initializer (at {})",
|
||||
f, class_name, body[0].location,
|
||||
)]));
|
||||
return Err(HashSet::from([
|
||||
format!(
|
||||
"fields `{}` of class `{}` not fully initialized in the initializer (at {})",
|
||||
f,
|
||||
class_name,
|
||||
body[0].location,
|
||||
),
|
||||
]));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1898,8 +1900,8 @@ impl TopLevelComposer {
|
||||
let base_repr = inferencer.unifier.stringify(*base);
|
||||
let subtype_repr = inferencer.unifier.stringify(*subtype);
|
||||
return Err(HashSet::from([format!(
|
||||
"Expected a subtype of {base_repr}, but got {subtype_repr} (at {loc})"
|
||||
)]));
|
||||
"Expected a subtype of {base_repr}, but got {subtype_repr} (at {loc})"),
|
||||
]));
|
||||
}
|
||||
};
|
||||
let subtype_entry = defs[subtype_id.0].read();
|
||||
@ -1913,8 +1915,8 @@ impl TopLevelComposer {
|
||||
let base_repr = inferencer.unifier.stringify(*base);
|
||||
let subtype_repr = inferencer.unifier.stringify(*subtype);
|
||||
return Err(HashSet::from([format!(
|
||||
"Expected a subtype of {base_repr}, but got {subtype_repr} (at {loc})"
|
||||
)]));
|
||||
"Expected a subtype of {base_repr}, but got {subtype_repr} (at {loc})"),
|
||||
]));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1999,15 +2001,13 @@ impl TopLevelComposer {
|
||||
ExprKind::Subscript { value, slice, .. }
|
||||
if matches!(
|
||||
&value.node,
|
||||
ast::ExprKind::Name { id, .. } if self.core_config.kernel_ann.is_some_and(|c| id == &c.into()) || id == &self.core_config.kernel_invariant_ann.into()
|
||||
ast::ExprKind::Name { id, .. } if self.core_config.kernel_ann.map_or(false, |c| id == &c.into())
|
||||
) =>
|
||||
{
|
||||
slice
|
||||
}
|
||||
_ if self.core_config.kernel_ann.is_none() => ty_decl,
|
||||
_ => unreachable!(
|
||||
"Global variables should be annotated with Kernel[] or KernelInvariant[]"
|
||||
), // ignore fields annotated otherwise
|
||||
_ => unreachable!("Global variables should be annotated with Kernel[]"), // ignore fields annotated otherwise
|
||||
};
|
||||
|
||||
let ty_annotation = parse_ast_to_type_annotation_kinds(
|
||||
|
@ -8,7 +8,7 @@ use nac3parser::ast::{Constant, ExprKind, Location};
|
||||
use super::{numpy::unpack_ndarray_var_tys, *};
|
||||
use crate::{
|
||||
symbol_resolver::SymbolValue,
|
||||
typecheck::typedef::{Mapping, TypeVarId, VarMap, into_var_map, iter_type_vars},
|
||||
typecheck::typedef::{into_var_map, iter_type_vars, Mapping, TypeVarId, VarMap},
|
||||
};
|
||||
|
||||
/// All primitive types and functions in nac3core.
|
||||
@ -757,7 +757,7 @@ impl TopLevelComposer {
|
||||
return Err(HashSet::from([format!(
|
||||
"redundant type annotation for class fields at {}",
|
||||
s.location
|
||||
)]));
|
||||
)]))
|
||||
}
|
||||
ast::StmtKind::Assign { targets, .. } => {
|
||||
for t in targets {
|
||||
@ -1038,10 +1038,7 @@ impl TopLevelComposer {
|
||||
}
|
||||
ast::ExprKind::Name { .. } | ast::ExprKind::Subscript { .. } => {
|
||||
if has_base {
|
||||
return Err(HashSet::from([format!(
|
||||
"a class definition can only have at most one base class declaration and one generic declaration (at {})",
|
||||
b.location
|
||||
)]));
|
||||
return Err(HashSet::from([format!("a class definition can only have at most one base class declaration and one generic declaration (at {})", b.location )]));
|
||||
}
|
||||
has_base = true;
|
||||
// the function parse_ast_to make sure that no type var occurred in
|
||||
@ -1236,9 +1233,7 @@ pub fn arraylike_get_ndims(unifier: &mut Unifier, ty: Type) -> u64 {
|
||||
};
|
||||
|
||||
if values.len() > 1 {
|
||||
todo!(
|
||||
"Getting num of dimensions for ndarray with more than one ndim bound is unimplemented"
|
||||
)
|
||||
todo!("Getting num of dimensions for ndarray with more than one ndim bound is unimplemented")
|
||||
}
|
||||
|
||||
u64::try_from(values[0].clone()).unwrap()
|
||||
|
@ -5,11 +5,11 @@ use parking_lot::Mutex;
|
||||
use test_case::test_case;
|
||||
|
||||
use nac3parser::{
|
||||
ast::{FileName, fold::Fold},
|
||||
ast::{fold::Fold, FileName},
|
||||
parser::parse_program,
|
||||
};
|
||||
|
||||
use super::{DefinitionId, helper::PrimDef, *};
|
||||
use super::{helper::PrimDef, DefinitionId, *};
|
||||
use crate::{
|
||||
codegen::CodeGenContext,
|
||||
symbol_resolver::{SymbolResolver, ValueEnum},
|
||||
|
@ -43,7 +43,11 @@ impl TypeAnnotation {
|
||||
format!("{}{}", class_name, {
|
||||
let param_list =
|
||||
params.iter().map(|p| p.stringify(unifier)).collect_vec().join(", ");
|
||||
if param_list.is_empty() { String::new() } else { format!("[{param_list}]") }
|
||||
if param_list.is_empty() {
|
||||
String::new()
|
||||
} else {
|
||||
format!("[{param_list}]")
|
||||
}
|
||||
})
|
||||
}
|
||||
Literal(values) => {
|
||||
@ -60,182 +64,6 @@ impl TypeAnnotation {
|
||||
}
|
||||
}
|
||||
|
||||
/// Converts a [`DefinitionId`] representing a [`TopLevelDef::Class`] and its type arguments into a
|
||||
/// [`TypeAnnotation`].
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn class_def_id_to_type_annotation<T, S: std::hash::BuildHasher + Clone>(
|
||||
resolver: &(dyn SymbolResolver + Send + Sync),
|
||||
top_level_defs: &[Arc<RwLock<TopLevelDef>>],
|
||||
unifier: &mut Unifier,
|
||||
primitives: &PrimitiveStore,
|
||||
mut locked: HashMap<DefinitionId, Vec<Type>, S>,
|
||||
id: StrRef,
|
||||
(obj_id, type_args): (DefinitionId, Option<&Expr<T>>),
|
||||
location: &Location,
|
||||
) -> Result<TypeAnnotation, HashSet<String>> {
|
||||
let Some(top_level_def) = top_level_defs.get(obj_id.0) else {
|
||||
return Err(HashSet::from([format!(
|
||||
"NameError: name '{id}' is not defined (at {location})",
|
||||
)]));
|
||||
};
|
||||
|
||||
// We need to use `try_read` here, since the composer may be processing our class right now,
|
||||
// which requires exclusive access to modify the class internals.
|
||||
//
|
||||
// `locked` is guaranteed to hold a k-v pair of the composer-processing class, so fallback
|
||||
// to it if the `top_level_def` is already locked for mutation.
|
||||
let type_vars = if let Some(def_read) = top_level_def.try_read() {
|
||||
if let TopLevelDef::Class { type_vars, .. } = &*def_read {
|
||||
type_vars.clone()
|
||||
} else {
|
||||
return Err(HashSet::from([format!(
|
||||
"function cannot be used as a type (at {location})",
|
||||
)]));
|
||||
}
|
||||
} else {
|
||||
locked.get(&obj_id).unwrap().clone()
|
||||
};
|
||||
|
||||
let param_type_infos = if let Some(slice) = type_args {
|
||||
// we do not check whether the application of type variables are compatible here
|
||||
let params_ast = if let ast::ExprKind::Tuple { elts, .. } = &slice.node {
|
||||
elts.iter().collect_vec()
|
||||
} else {
|
||||
vec![slice]
|
||||
};
|
||||
|
||||
if type_vars.len() != params_ast.len() {
|
||||
return Err(HashSet::from([format!(
|
||||
"expect {} type parameters but got {} (at {})",
|
||||
type_vars.len(),
|
||||
params_ast.len(),
|
||||
params_ast[0].location,
|
||||
)]));
|
||||
}
|
||||
|
||||
let result = params_ast
|
||||
.iter()
|
||||
.map(|x| {
|
||||
parse_ast_to_type_annotation_kinds(
|
||||
resolver,
|
||||
top_level_defs,
|
||||
unifier,
|
||||
primitives,
|
||||
x,
|
||||
{
|
||||
locked.insert(obj_id, type_vars.clone());
|
||||
locked.clone()
|
||||
},
|
||||
)
|
||||
})
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
|
||||
// make sure the result do not contain any type vars
|
||||
let no_type_var =
|
||||
result.iter().all(|x| get_type_var_contained_in_type_annotation(x).is_empty());
|
||||
if no_type_var {
|
||||
result
|
||||
} else {
|
||||
return Err(HashSet::from([format!(
|
||||
"application of type vars to generic class is not currently supported (at {})",
|
||||
params_ast[0].location
|
||||
)]));
|
||||
}
|
||||
} else {
|
||||
// check param number here
|
||||
if !type_vars.is_empty() {
|
||||
return Err(HashSet::from([format!(
|
||||
"expect {} type variable parameter but got 0 (at {location})",
|
||||
type_vars.len(),
|
||||
)]));
|
||||
}
|
||||
|
||||
Vec::new()
|
||||
};
|
||||
|
||||
Ok(TypeAnnotation::CustomClass { id: obj_id, params: param_type_infos })
|
||||
}
|
||||
|
||||
/// Parses the `id` of a [`ast::ExprKind::Name`] expression as a [`TypeAnnotation`].
|
||||
fn parse_name_as_type_annotation<T, S: std::hash::BuildHasher + Clone>(
|
||||
resolver: &(dyn SymbolResolver + Send + Sync),
|
||||
top_level_defs: &[Arc<RwLock<TopLevelDef>>],
|
||||
unifier: &mut Unifier,
|
||||
primitives: &PrimitiveStore,
|
||||
locked: HashMap<DefinitionId, Vec<Type>, S>,
|
||||
id: StrRef,
|
||||
location: &Location,
|
||||
) -> Result<TypeAnnotation, HashSet<String>> {
|
||||
if id == "int32".into() {
|
||||
Ok(TypeAnnotation::Primitive(primitives.int32))
|
||||
} else if id == "int64".into() {
|
||||
Ok(TypeAnnotation::Primitive(primitives.int64))
|
||||
} else if id == "uint32".into() {
|
||||
Ok(TypeAnnotation::Primitive(primitives.uint32))
|
||||
} else if id == "uint64".into() {
|
||||
Ok(TypeAnnotation::Primitive(primitives.uint64))
|
||||
} else if id == "float".into() {
|
||||
Ok(TypeAnnotation::Primitive(primitives.float))
|
||||
} else if id == "bool".into() {
|
||||
Ok(TypeAnnotation::Primitive(primitives.bool))
|
||||
} else if id == "str".into() {
|
||||
Ok(TypeAnnotation::Primitive(primitives.str))
|
||||
} else if id == "Exception".into() {
|
||||
Ok(TypeAnnotation::CustomClass { id: PrimDef::Exception.id(), params: Vec::default() })
|
||||
} else if let Ok(obj_id) = resolver.get_identifier_def(id) {
|
||||
class_def_id_to_type_annotation(
|
||||
resolver,
|
||||
top_level_defs,
|
||||
unifier,
|
||||
primitives,
|
||||
locked,
|
||||
id,
|
||||
(obj_id, None as Option<&Expr<T>>),
|
||||
location,
|
||||
)
|
||||
} else if let Ok(ty) = resolver.get_symbol_type(unifier, top_level_defs, primitives, id) {
|
||||
if let TypeEnum::TVar { .. } = unifier.get_ty(ty).as_ref() {
|
||||
let var = unifier.get_fresh_var(Some(id), Some(*location)).ty;
|
||||
unifier.unify(var, ty).unwrap();
|
||||
Ok(TypeAnnotation::TypeVar(ty))
|
||||
} else {
|
||||
Err(HashSet::from([format!("`{id}` is not a valid type annotation (at {location})",)]))
|
||||
}
|
||||
} else {
|
||||
Err(HashSet::from([format!("`{id}` is not a valid type annotation (at {location})",)]))
|
||||
}
|
||||
}
|
||||
|
||||
/// Parses the `id` and generic arguments of a class as a [`TypeAnnotation`].
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn parse_class_id_as_type_annotation<T, S: std::hash::BuildHasher + Clone>(
|
||||
resolver: &(dyn SymbolResolver + Send + Sync),
|
||||
top_level_defs: &[Arc<RwLock<TopLevelDef>>],
|
||||
unifier: &mut Unifier,
|
||||
primitives: &PrimitiveStore,
|
||||
locked: HashMap<DefinitionId, Vec<Type>, S>,
|
||||
id: StrRef,
|
||||
slice: &Expr<T>,
|
||||
location: &Location,
|
||||
) -> Result<TypeAnnotation, HashSet<String>> {
|
||||
if ["virtual".into(), "Generic".into(), "tuple".into(), "Option".into()].contains(&id) {
|
||||
return Err(HashSet::from([format!("keywords cannot be class name (at {location})")]));
|
||||
}
|
||||
|
||||
let obj_id = resolver.get_identifier_def(id)?;
|
||||
|
||||
class_def_id_to_type_annotation(
|
||||
resolver,
|
||||
top_level_defs,
|
||||
unifier,
|
||||
primitives,
|
||||
locked,
|
||||
id,
|
||||
(obj_id, Some(slice)),
|
||||
location,
|
||||
)
|
||||
}
|
||||
|
||||
/// Parses an AST expression `expr` into a [`TypeAnnotation`].
|
||||
///
|
||||
/// * `locked` - A [`HashMap`] containing the IDs of known definitions, mapped to a [`Vec`] of all
|
||||
@ -251,17 +79,154 @@ pub fn parse_ast_to_type_annotation_kinds<T, S: std::hash::BuildHasher + Clone>(
|
||||
// the key stores the type_var of this topleveldef::class, we only need this field here
|
||||
locked: HashMap<DefinitionId, Vec<Type>, S>,
|
||||
) -> Result<TypeAnnotation, HashSet<String>> {
|
||||
match &expr.node {
|
||||
ast::ExprKind::Name { id, .. } => parse_name_as_type_annotation::<T, S>(
|
||||
resolver,
|
||||
top_level_defs,
|
||||
unifier,
|
||||
primitives,
|
||||
locked,
|
||||
*id,
|
||||
&expr.location,
|
||||
),
|
||||
let name_handle = |id: &StrRef,
|
||||
unifier: &mut Unifier,
|
||||
locked: HashMap<DefinitionId, Vec<Type>, S>| {
|
||||
if id == &"int32".into() {
|
||||
Ok(TypeAnnotation::Primitive(primitives.int32))
|
||||
} else if id == &"int64".into() {
|
||||
Ok(TypeAnnotation::Primitive(primitives.int64))
|
||||
} else if id == &"uint32".into() {
|
||||
Ok(TypeAnnotation::Primitive(primitives.uint32))
|
||||
} else if id == &"uint64".into() {
|
||||
Ok(TypeAnnotation::Primitive(primitives.uint64))
|
||||
} else if id == &"float".into() {
|
||||
Ok(TypeAnnotation::Primitive(primitives.float))
|
||||
} else if id == &"bool".into() {
|
||||
Ok(TypeAnnotation::Primitive(primitives.bool))
|
||||
} else if id == &"str".into() {
|
||||
Ok(TypeAnnotation::Primitive(primitives.str))
|
||||
} else if id == &"Exception".into() {
|
||||
Ok(TypeAnnotation::CustomClass { id: PrimDef::Exception.id(), params: Vec::default() })
|
||||
} else if let Ok(obj_id) = resolver.get_identifier_def(*id) {
|
||||
let type_vars = {
|
||||
let Some(top_level_def) = top_level_defs.get(obj_id.0) else {
|
||||
return Err(HashSet::from([format!(
|
||||
"NameError: name '{id}' is not defined (at {})",
|
||||
expr.location
|
||||
)]));
|
||||
};
|
||||
let def_read = top_level_def.try_read();
|
||||
if let Some(def_read) = def_read {
|
||||
if let TopLevelDef::Class { type_vars, .. } = &*def_read {
|
||||
type_vars.clone()
|
||||
} else {
|
||||
return Err(HashSet::from([format!(
|
||||
"function cannot be used as a type (at {})",
|
||||
expr.location
|
||||
)]));
|
||||
}
|
||||
} else {
|
||||
locked.get(&obj_id).unwrap().clone()
|
||||
}
|
||||
};
|
||||
// check param number here
|
||||
if !type_vars.is_empty() {
|
||||
return Err(HashSet::from([format!(
|
||||
"expect {} type variable parameter but got 0 (at {})",
|
||||
type_vars.len(),
|
||||
expr.location,
|
||||
)]));
|
||||
}
|
||||
Ok(TypeAnnotation::CustomClass { id: obj_id, params: vec![] })
|
||||
} else if let Ok(ty) = resolver.get_symbol_type(unifier, top_level_defs, primitives, *id) {
|
||||
if let TypeEnum::TVar { .. } = unifier.get_ty(ty).as_ref() {
|
||||
let var = unifier.get_fresh_var(Some(*id), Some(expr.location)).ty;
|
||||
unifier.unify(var, ty).unwrap();
|
||||
Ok(TypeAnnotation::TypeVar(ty))
|
||||
} else {
|
||||
Err(HashSet::from([format!(
|
||||
"`{}` is not a valid type annotation (at {})",
|
||||
id, expr.location
|
||||
)]))
|
||||
}
|
||||
} else {
|
||||
Err(HashSet::from([format!(
|
||||
"`{}` is not a valid type annotation (at {})",
|
||||
id, expr.location
|
||||
)]))
|
||||
}
|
||||
};
|
||||
|
||||
let class_name_handle =
|
||||
|id: &StrRef,
|
||||
slice: &ast::Expr<T>,
|
||||
unifier: &mut Unifier,
|
||||
mut locked: HashMap<DefinitionId, Vec<Type>, S>| {
|
||||
if ["virtual".into(), "Generic".into(), "tuple".into(), "Option".into()].contains(id) {
|
||||
return Err(HashSet::from([format!(
|
||||
"keywords cannot be class name (at {})",
|
||||
expr.location
|
||||
)]));
|
||||
}
|
||||
let obj_id = resolver.get_identifier_def(*id)?;
|
||||
let type_vars = {
|
||||
let Some(top_level_def) = top_level_defs.get(obj_id.0) else {
|
||||
return Err(HashSet::from([format!(
|
||||
"NameError: name '{id}' is not defined (at {})",
|
||||
expr.location
|
||||
)]));
|
||||
};
|
||||
let def_read = top_level_def.try_read();
|
||||
if let Some(def_read) = def_read {
|
||||
let TopLevelDef::Class { type_vars, .. } = &*def_read else {
|
||||
unreachable!("must be class here")
|
||||
};
|
||||
type_vars.clone()
|
||||
} else {
|
||||
locked.get(&obj_id).unwrap().clone()
|
||||
}
|
||||
};
|
||||
// we do not check whether the application of type variables are compatible here
|
||||
let param_type_infos = {
|
||||
let params_ast = if let ast::ExprKind::Tuple { elts, .. } = &slice.node {
|
||||
elts.iter().collect_vec()
|
||||
} else {
|
||||
vec![slice]
|
||||
};
|
||||
if type_vars.len() != params_ast.len() {
|
||||
return Err(HashSet::from([format!(
|
||||
"expect {} type parameters but got {} (at {})",
|
||||
type_vars.len(),
|
||||
params_ast.len(),
|
||||
params_ast[0].location,
|
||||
)]));
|
||||
}
|
||||
let result = params_ast
|
||||
.iter()
|
||||
.map(|x| {
|
||||
parse_ast_to_type_annotation_kinds(
|
||||
resolver,
|
||||
top_level_defs,
|
||||
unifier,
|
||||
primitives,
|
||||
x,
|
||||
{
|
||||
locked.insert(obj_id, type_vars.clone());
|
||||
locked.clone()
|
||||
},
|
||||
)
|
||||
})
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
// make sure the result do not contain any type vars
|
||||
let no_type_var =
|
||||
result.iter().all(|x| get_type_var_contained_in_type_annotation(x).is_empty());
|
||||
if no_type_var {
|
||||
result
|
||||
} else {
|
||||
return Err(HashSet::from([
|
||||
format!(
|
||||
"application of type vars to generic class is not currently supported (at {})",
|
||||
params_ast[0].location
|
||||
),
|
||||
]));
|
||||
}
|
||||
};
|
||||
Ok(TypeAnnotation::CustomClass { id: obj_id, params: param_type_infos })
|
||||
};
|
||||
|
||||
match &expr.node {
|
||||
ast::ExprKind::Name { id, .. } => name_handle(id, unifier, locked),
|
||||
// virtual
|
||||
ast::ExprKind::Subscript { value, slice, .. }
|
||||
if {
|
||||
@ -382,105 +347,9 @@ pub fn parse_ast_to_type_annotation_kinds<T, S: std::hash::BuildHasher + Clone>(
|
||||
|
||||
// custom class
|
||||
ast::ExprKind::Subscript { value, slice, .. } => {
|
||||
match &value.node {
|
||||
ast::ExprKind::Name { id, .. } => parse_class_id_as_type_annotation(
|
||||
resolver,
|
||||
top_level_defs,
|
||||
unifier,
|
||||
primitives,
|
||||
locked,
|
||||
*id,
|
||||
slice,
|
||||
&expr.location,
|
||||
),
|
||||
|
||||
ast::ExprKind::Attribute { value, attr, .. } => {
|
||||
if let ast::ExprKind::Name { id, .. } = &value.node {
|
||||
let mod_id = resolver.get_identifier_def(*id)?;
|
||||
let Some(mod_tld) = top_level_defs.get(mod_id.0) else {
|
||||
return Err(HashSet::from([format!(
|
||||
"NameError: name '{id}' is not defined (at {})",
|
||||
expr.location
|
||||
)]));
|
||||
};
|
||||
|
||||
let matching_attr =
|
||||
if let TopLevelDef::Module { methods, .. } = &*mod_tld.read() {
|
||||
methods.get(attr).copied()
|
||||
} else {
|
||||
unreachable!("must be module here")
|
||||
};
|
||||
|
||||
let Some(def_id) = matching_attr else {
|
||||
return Err(HashSet::from([format!(
|
||||
"AttributeError: module '{id}' has no attribute '{attr}' (at {})",
|
||||
expr.location
|
||||
)]));
|
||||
};
|
||||
|
||||
class_def_id_to_type_annotation::<T, S>(
|
||||
resolver,
|
||||
top_level_defs,
|
||||
unifier,
|
||||
primitives,
|
||||
locked,
|
||||
*attr,
|
||||
(def_id, Some(slice)),
|
||||
&expr.location,
|
||||
)
|
||||
} else {
|
||||
// TODO: Handle multiple indirection
|
||||
Err(HashSet::from([format!(
|
||||
"unsupported expression type for class name (at {})",
|
||||
value.location
|
||||
)]))
|
||||
}
|
||||
}
|
||||
|
||||
_ => Err(HashSet::from([format!(
|
||||
"unsupported expression type for class name (at {})",
|
||||
value.location
|
||||
)])),
|
||||
}
|
||||
}
|
||||
|
||||
ast::ExprKind::Constant { value, .. } => Ok(TypeAnnotation::Literal(vec![value.clone()])),
|
||||
|
||||
ast::ExprKind::Attribute { value, attr, .. } => {
|
||||
if let ast::ExprKind::Name { id, .. } = &value.node {
|
||||
let mod_id = resolver.get_identifier_def(*id)?;
|
||||
let Some(mod_tld) = top_level_defs.get(mod_id.0) else {
|
||||
return Err(HashSet::from([format!(
|
||||
"NameError: name '{id}' is not defined (at {})",
|
||||
expr.location
|
||||
)]));
|
||||
};
|
||||
|
||||
let matching_attr = if let TopLevelDef::Module { methods, .. } = &*mod_tld.read() {
|
||||
methods.get(attr).copied()
|
||||
} else {
|
||||
unreachable!("must be module here")
|
||||
};
|
||||
|
||||
let Some(def_id) = matching_attr else {
|
||||
return Err(HashSet::from([format!(
|
||||
"AttributeError: module '{id}' has no attribute '{attr}' (at {})",
|
||||
expr.location
|
||||
)]));
|
||||
};
|
||||
|
||||
class_def_id_to_type_annotation::<T, S>(
|
||||
resolver,
|
||||
top_level_defs,
|
||||
unifier,
|
||||
primitives,
|
||||
locked,
|
||||
*attr,
|
||||
(def_id, None),
|
||||
&expr.location,
|
||||
)
|
||||
class_name_handle(id, slice, unifier, locked)
|
||||
} else {
|
||||
// TODO: Handle multiple indirection
|
||||
Err(HashSet::from([format!(
|
||||
"unsupported expression type for class name (at {})",
|
||||
value.location
|
||||
@ -488,6 +357,8 @@ pub fn parse_ast_to_type_annotation_kinds<T, S: std::hash::BuildHasher + Clone>(
|
||||
}
|
||||
}
|
||||
|
||||
ast::ExprKind::Constant { value, .. } => Ok(TypeAnnotation::Literal(vec![value.clone()])),
|
||||
|
||||
_ => Err(HashSet::from([format!(
|
||||
"unsupported expression for type annotation (at {})",
|
||||
expr.location
|
||||
|
@ -139,7 +139,7 @@ impl Inferencer<'_> {
|
||||
return Err(HashSet::from([format!(
|
||||
"type error at identifier `{}` ({}) at {}",
|
||||
id, e, expr.location
|
||||
)]));
|
||||
)]))
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -376,11 +376,13 @@ impl Inferencer<'_> {
|
||||
}
|
||||
|
||||
if !self.check_return_value_ty(ret_ty) {
|
||||
return Err(HashSet::from([format!(
|
||||
"return value of type {} must be a primitive or a tuple of primitives at {}",
|
||||
self.unifier.stringify(ret_ty),
|
||||
value.location,
|
||||
)]));
|
||||
return Err(HashSet::from([
|
||||
format!(
|
||||
"return value of type {} must be a primitive or a tuple of primitives at {}",
|
||||
self.unifier.stringify(ret_ty),
|
||||
value.location,
|
||||
),
|
||||
]));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -423,7 +425,7 @@ impl Inferencer<'_> {
|
||||
return Err(HashSet::from([format!(
|
||||
"type error at identifier `{}` ({}) at {}",
|
||||
id, e, stmt.location
|
||||
)]));
|
||||
)]))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,18 +1,18 @@
|
||||
use std::{cmp::max, collections::HashMap, rc::Rc};
|
||||
|
||||
use itertools::{Itertools, iproduct};
|
||||
use itertools::{iproduct, Itertools};
|
||||
use strum::IntoEnumIterator;
|
||||
|
||||
use nac3parser::ast::{Cmpop, Operator, StrRef, Unaryop};
|
||||
|
||||
use super::{
|
||||
type_inferencer::*,
|
||||
typedef::{FunSignature, FuncArg, Type, TypeEnum, Unifier, VarMap, into_var_map},
|
||||
typedef::{into_var_map, FunSignature, FuncArg, Type, TypeEnum, Unifier, VarMap},
|
||||
};
|
||||
use crate::{
|
||||
symbol_resolver::SymbolValue,
|
||||
toplevel::{
|
||||
helper::{PrimDef, extract_ndims},
|
||||
helper::{extract_ndims, PrimDef},
|
||||
numpy::{make_ndarray_ty, unpack_ndarray_var_tys},
|
||||
},
|
||||
};
|
||||
@ -498,7 +498,11 @@ pub fn typeof_binop(
|
||||
));
|
||||
}
|
||||
|
||||
if is_left_list { lhs } else { rhs }
|
||||
if is_left_list {
|
||||
lhs
|
||||
} else {
|
||||
rhs
|
||||
}
|
||||
} else if is_left_ndarray || is_right_ndarray {
|
||||
typeof_ndarray_broadcast(unifier, primitives, lhs, rhs)?
|
||||
} else if unifier.unioned(lhs, rhs) {
|
||||
@ -522,9 +526,7 @@ pub fn typeof_binop(
|
||||
_ => {
|
||||
let lhs_str = unifier.stringify(lhs);
|
||||
let rhs_str = unifier.stringify(rhs);
|
||||
return Err(format!(
|
||||
"ndarray.__matmul__ only accepts ndarray operands, but left operand has type {lhs_str}, and right operand has type {rhs_str}"
|
||||
));
|
||||
return Err(format!("ndarray.__matmul__ only accepts ndarray operands, but left operand has type {lhs_str}, and right operand has type {rhs_str}"));
|
||||
}
|
||||
}
|
||||
|
||||
@ -550,7 +552,7 @@ pub fn typeof_binop(
|
||||
(0, _) | (_, 0) => {
|
||||
return Err(
|
||||
"ndarray.__matmul__ does not allow unsized ndarray input".to_string()
|
||||
);
|
||||
)
|
||||
}
|
||||
(1, 1) => 0,
|
||||
(1, _) => rhs_ndims - 1,
|
||||
|
@ -108,10 +108,7 @@ impl Display for DisplayTypeError<'_> {
|
||||
let expected_count = expected_min_count; // or expected_max_count
|
||||
write!(f, "Too many arguments. Expected {expected_count} but got {got_count}")
|
||||
} else {
|
||||
write!(
|
||||
f,
|
||||
"Too many arguments. Expected {expected_min_count} to {expected_max_count} arguments but got {got_count}"
|
||||
)
|
||||
write!(f, "Too many arguments. Expected {expected_min_count} to {expected_max_count} arguments but got {got_count}")
|
||||
}
|
||||
}
|
||||
MissingArgs { missing_arg_names } => {
|
||||
@ -126,10 +123,7 @@ impl Display for DisplayTypeError<'_> {
|
||||
let expected_rhs_type_str =
|
||||
self.unifier.stringify_with_notes(*expected_rhs_type, &mut notes);
|
||||
|
||||
write!(
|
||||
f,
|
||||
"Unsupported operand type(s) for {op_symbol}: '{lhs_type_str}' and '{rhs_type_str}' (right operand should have type {expected_rhs_type_str})"
|
||||
)
|
||||
write!(f, "Unsupported operand type(s) for {op_symbol}: '{lhs_type_str}' and '{rhs_type_str}' (right operand should have type {expected_rhs_type_str})")
|
||||
}
|
||||
UnsupportedComparsionOpTypes { operator, lhs_type, rhs_type, expected_rhs_type } => {
|
||||
let op_symbol = operator.op_info().symbol;
|
||||
@ -139,10 +133,7 @@ impl Display for DisplayTypeError<'_> {
|
||||
let expected_rhs_type_str =
|
||||
self.unifier.stringify_with_notes(*expected_rhs_type, &mut notes);
|
||||
|
||||
write!(
|
||||
f,
|
||||
"'{op_symbol}' not supported between instances of '{lhs_type_str}' and '{rhs_type_str}' (right operand should have type {expected_rhs_type_str})"
|
||||
)
|
||||
write!(f, "'{op_symbol}' not supported between instances of '{lhs_type_str}' and '{rhs_type_str}' (right operand should have type {expected_rhs_type_str})")
|
||||
}
|
||||
UnknownArgName(name) => {
|
||||
write!(f, "Unknown argument name: {name}")
|
||||
@ -150,10 +141,7 @@ impl Display for DisplayTypeError<'_> {
|
||||
IncorrectArgType { name, expected, got } => {
|
||||
let expected = self.unifier.stringify_with_notes(*expected, &mut notes);
|
||||
let got = self.unifier.stringify_with_notes(*got, &mut notes);
|
||||
write!(
|
||||
f,
|
||||
"Incorrect argument type for parameter {name}. Expected {expected}, but got {got}"
|
||||
)
|
||||
write!(f, "Incorrect argument type for parameter {name}. Expected {expected}, but got {got}")
|
||||
}
|
||||
FieldUnificationError { field, types, loc } => {
|
||||
let lhs = self.unifier.stringify_with_notes(types.0, &mut notes);
|
||||
|
@ -7,28 +7,29 @@ use std::{
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
use itertools::{Itertools, izip};
|
||||
use itertools::{izip, Itertools};
|
||||
|
||||
use nac3parser::ast::{
|
||||
self, Arguments, Comprehension, ExprContext, ExprKind, Ident, Located, Location, StrRef,
|
||||
self,
|
||||
fold::{self, Fold},
|
||||
Arguments, Comprehension, ExprContext, ExprKind, Ident, Located, Location, StrRef,
|
||||
};
|
||||
|
||||
use super::{
|
||||
magic_methods::*,
|
||||
type_error::{TypeError, TypeErrorKind},
|
||||
typedef::{
|
||||
Call, CallId, FunSignature, FuncArg, Mapping, OperatorInfo, RecordField, RecordKey, Type,
|
||||
TypeEnum, TypeVar, Unifier, VarMap, into_var_map, iter_type_vars,
|
||||
into_var_map, iter_type_vars, Call, CallId, FunSignature, FuncArg, Mapping, OperatorInfo,
|
||||
RecordField, RecordKey, Type, TypeEnum, TypeVar, Unifier, VarMap,
|
||||
},
|
||||
};
|
||||
use crate::{
|
||||
symbol_resolver::{SymbolResolver, SymbolValue},
|
||||
toplevel::{
|
||||
TopLevelContext, TopLevelDef,
|
||||
helper::{PrimDef, arraylike_flatten_element_type, arraylike_get_ndims},
|
||||
helper::{arraylike_flatten_element_type, arraylike_get_ndims, PrimDef},
|
||||
numpy::{make_ndarray_ty, unpack_ndarray_var_tys},
|
||||
type_annotation::TypeAnnotation,
|
||||
TopLevelContext, TopLevelDef,
|
||||
},
|
||||
};
|
||||
|
||||
@ -1017,11 +1018,13 @@ impl Inferencer<'_> {
|
||||
// This means the user is passing an expression of type `List`,
|
||||
// but it is done so indirectly (like putting a variable referencing a `List`)
|
||||
// rather than writing a List literal. We need to report an error.
|
||||
return Err(HashSet::from([format!(
|
||||
"Expected list literal, tuple, or int32 for argument {arg_num} of {id} at {location}. Input argument is of type list but not a list literal.",
|
||||
arg_num = arg_index + 1,
|
||||
location = shape.location
|
||||
)]));
|
||||
return Err(HashSet::from([
|
||||
format!(
|
||||
"Expected list literal, tuple, or int32 for argument {arg_num} of {id} at {location}. Input argument is of type list but not a list literal.",
|
||||
arg_num = arg_index + 1,
|
||||
location = shape.location
|
||||
)
|
||||
]));
|
||||
}
|
||||
}
|
||||
TypeEnum::TTuple { ty: tuple_element_types, .. } => {
|
||||
@ -1140,7 +1143,7 @@ impl Inferencer<'_> {
|
||||
)
|
||||
.as_str(),
|
||||
obj.location,
|
||||
);
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@ -2274,7 +2277,7 @@ impl Inferencer<'_> {
|
||||
targets.len() - 1,
|
||||
rhs_tys.len()
|
||||
),
|
||||
*target_list_location,
|
||||
*target_list_location
|
||||
);
|
||||
}
|
||||
|
||||
@ -2332,7 +2335,7 @@ impl Inferencer<'_> {
|
||||
targets.len() - 1,
|
||||
rhs_tys.len()
|
||||
),
|
||||
*target_list_location,
|
||||
*target_list_location
|
||||
);
|
||||
}
|
||||
|
||||
@ -2559,9 +2562,7 @@ impl Inferencer<'_> {
|
||||
|
||||
if new_ndims_values.iter().any(|v| *v == 0) {
|
||||
// TODO: Difficult to implement since now the return may both be a scalar type, or an ndarray type.
|
||||
unimplemented!(
|
||||
"Inference for ndarray subscript operator with Literal[0, ...] bound unimplemented"
|
||||
)
|
||||
unimplemented!("Inference for ndarray subscript operator with Literal[0, ...] bound unimplemented")
|
||||
}
|
||||
|
||||
let new_ndarray_ty =
|
||||
|
@ -11,7 +11,7 @@ use super::*;
|
||||
use crate::{
|
||||
codegen::{CodeGenContext, CodeGenerator},
|
||||
symbol_resolver::ValueEnum,
|
||||
toplevel::{DefinitionId, TopLevelDef, helper::PrimDef},
|
||||
toplevel::{helper::PrimDef, DefinitionId, TopLevelDef},
|
||||
typecheck::{magic_methods::with_fields, typedef::*},
|
||||
};
|
||||
|
||||
|
@ -21,7 +21,7 @@ use super::{
|
||||
};
|
||||
use crate::{
|
||||
symbol_resolver::SymbolValue,
|
||||
toplevel::{DefinitionId, TopLevelContext, TopLevelDef, helper::PrimDef},
|
||||
toplevel::{helper::PrimDef, DefinitionId, TopLevelContext, TopLevelDef},
|
||||
};
|
||||
|
||||
#[cfg(test)]
|
||||
@ -410,7 +410,11 @@ impl Unifier {
|
||||
|
||||
pub fn get_call_signature(&mut self, id: CallId) -> Option<FunSignature> {
|
||||
let fun = self.calls.get(id.0).unwrap().fun.borrow().unwrap();
|
||||
if let TypeEnum::TFunc(sign) = &*self.get_ty(fun) { Some(sign.clone()) } else { None }
|
||||
if let TypeEnum::TFunc(sign) = &*self.get_ty(fun) {
|
||||
Some(sign.clone())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
@ -1220,7 +1224,7 @@ impl Unifier {
|
||||
return Err(TypeError::new(
|
||||
TypeErrorKind::NoSuchField(*k, b),
|
||||
field.loc,
|
||||
));
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -357,7 +357,7 @@ fn test_invalid_unification(
|
||||
pairs.push((t1, t2));
|
||||
}
|
||||
let (t1, t2) =
|
||||
(env.parse(erroneous_pair.0.0, &mapping), env.parse(erroneous_pair.0.1, &mapping));
|
||||
(env.parse(erroneous_pair.0 .0, &mapping), env.parse(erroneous_pair.0 .1, &mapping));
|
||||
for (a, b) in pairs {
|
||||
env.unifier.unify(a, b).unwrap();
|
||||
}
|
||||
|
@ -121,7 +121,7 @@ impl<V> UnificationTable<V> {
|
||||
let (log_len, generation) = snapshot;
|
||||
assert!(self.log.len() >= log_len, "snapshot restoration error");
|
||||
assert!(
|
||||
matches!(self.log[log_len - 1], Action::Marker { generation: r#gen } if r#gen == generation),
|
||||
matches!(self.log[log_len - 1], Action::Marker { generation: gen } if gen == generation),
|
||||
"snapshot restoration error"
|
||||
);
|
||||
for action in self.log.drain(log_len - 1..).rev() {
|
||||
@ -144,7 +144,7 @@ impl<V> UnificationTable<V> {
|
||||
let (log_len, generation) = snapshot;
|
||||
assert!(self.log.len() >= log_len, "snapshot discard error");
|
||||
assert!(
|
||||
matches!(self.log[log_len - 1], Action::Marker { generation: r#gen } if r#gen == generation),
|
||||
matches!(self.log[log_len - 1], Action::Marker { generation: gen } if gen == generation),
|
||||
"snapshot discard error"
|
||||
);
|
||||
self.log.clear();
|
||||
|
@ -2,7 +2,7 @@
|
||||
name = "nac3ld"
|
||||
version = "0.1.0"
|
||||
authors = ["M-Labs"]
|
||||
edition = "2024"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
byteorder = { version = "1.5", default-features = false }
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user