forked from M-Labs/nac3
Compare commits
6 Commits
master
...
escape-ana
Author | SHA1 | Date |
---|---|---|
pca006132 | 830fc0f453 | |
pca006132 | ea5a8c95dc | |
pca006132 | fb79b47b38 | |
pca006132 | 1257a80ed1 | |
pca006132 | c93f79f94a | |
pca006132 | 10c4544553 |
|
@ -50,9 +50,9 @@ checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
|
|||
|
||||
[[package]]
|
||||
name = "bit-set"
|
||||
version = "0.5.3"
|
||||
version = "0.5.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0700ddab506f33b20a03b13996eccd309a48e5ff77d0d95926aa0210fb4e95f1"
|
||||
checksum = "6e11e16035ea35e4e5997b393eacbf6f63983188f7a2ad25bfb13465f5ad59de"
|
||||
dependencies = [
|
||||
"bit-vec",
|
||||
]
|
||||
|
@ -89,9 +89,9 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
|
|||
|
||||
[[package]]
|
||||
name = "console"
|
||||
version = "0.15.1"
|
||||
version = "0.15.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "89eab4d20ce20cea182308bca13088fecea9c05f6776cf287205d41a0ed3c847"
|
||||
checksum = "a28b32d32ca44b70c3e4acd7db1babf555fa026e385fb95f18028f88848b3c31"
|
||||
dependencies = [
|
||||
"encode_unicode",
|
||||
"libc",
|
||||
|
@ -102,9 +102,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "crossbeam"
|
||||
version = "0.8.2"
|
||||
version = "0.8.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2801af0d36612ae591caa9568261fddce32ce6e08a7275ea334a06a4ad021a2c"
|
||||
checksum = "4ae5588f6b3c3cb05239e90bd110f257254aecd01e4635400391aeae07497845"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"crossbeam-channel",
|
||||
|
@ -116,9 +116,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "crossbeam-channel"
|
||||
version = "0.5.6"
|
||||
version = "0.5.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c2dd04ddaf88237dc3b8d8f9a3c1004b506b54b3313403944054d23c0870c521"
|
||||
checksum = "5aaa7bd5fb665c6864b5f963dd9097905c54125909c7aa94c9e18507cdbe6c53"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"crossbeam-utils",
|
||||
|
@ -126,9 +126,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "crossbeam-deque"
|
||||
version = "0.8.2"
|
||||
version = "0.8.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "715e8152b692bba2d374b53d4875445368fdf21a94751410af607a5ac677d1fc"
|
||||
checksum = "6455c0ca19f0d2fbf751b908d5c55c1f5cbc65e03c4225427254b46890bdde1e"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"crossbeam-epoch",
|
||||
|
@ -137,23 +137,23 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "crossbeam-epoch"
|
||||
version = "0.9.10"
|
||||
version = "0.9.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "045ebe27666471bb549370b4b0b3e51b07f56325befa4284db65fc89c02511b1"
|
||||
checksum = "1145cf131a2c6ba0615079ab6a638f7e1973ac9c2634fcbeaaad6114246efe8c"
|
||||
dependencies = [
|
||||
"autocfg",
|
||||
"cfg-if",
|
||||
"crossbeam-utils",
|
||||
"lazy_static",
|
||||
"memoffset",
|
||||
"once_cell",
|
||||
"scopeguard",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "crossbeam-queue"
|
||||
version = "0.3.6"
|
||||
version = "0.3.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1cd42583b04998a5363558e5f9291ee5a5ff6b49944332103f251e7479a82aa7"
|
||||
checksum = "1f25d8400f4a7a5778f0e4e52384a48cbd9b5c495d110786187fc750075277a2"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"crossbeam-utils",
|
||||
|
@ -161,12 +161,12 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "crossbeam-utils"
|
||||
version = "0.8.11"
|
||||
version = "0.8.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "51887d4adc7b564537b15adcfb307936f8075dfcd5f00dde9a9f1d29383682bc"
|
||||
checksum = "0bf124c720b7686e3c2663cf54062ab0f68a88af2fb6a030e87e30bf721fcb38"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"once_cell",
|
||||
"lazy_static",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -177,9 +177,9 @@ checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7"
|
|||
|
||||
[[package]]
|
||||
name = "diff"
|
||||
version = "0.1.13"
|
||||
version = "0.1.12"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "56254986775e3233ffa9c4d7d3faaf6d36a2c09d30b20687e9f88bc8bafc16c8"
|
||||
checksum = "0e25ea47919b1560c4e3b7fe0aaab9becf5b84a10325ddf7db0f0ba5e1026499"
|
||||
|
||||
[[package]]
|
||||
name = "dirs-next"
|
||||
|
@ -204,9 +204,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "either"
|
||||
version = "1.7.0"
|
||||
version = "1.6.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3f107b87b6afc2a64fd13cac55fe06d6c8859f12d4b14cbcdd2c67d0976781be"
|
||||
checksum = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457"
|
||||
|
||||
[[package]]
|
||||
name = "ena"
|
||||
|
@ -225,18 +225,18 @@ checksum = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f"
|
|||
|
||||
[[package]]
|
||||
name = "fastrand"
|
||||
version = "1.8.0"
|
||||
version = "1.7.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a7a407cfaa3385c4ae6b23e84623d48c2798d06e3e6a1878f7f59f17b3f86499"
|
||||
checksum = "c3fcf0cee53519c866c09b5de1f6c56ff9d647101f81c1964fa632e148896cdf"
|
||||
dependencies = [
|
||||
"instant",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fixedbitset"
|
||||
version = "0.4.2"
|
||||
version = "0.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80"
|
||||
checksum = "37ab347416e802de484e4d03c7316c48f1ecb56574dfd4a46a80f173ce1de04d"
|
||||
|
||||
[[package]]
|
||||
name = "fxhash"
|
||||
|
@ -249,9 +249,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "getrandom"
|
||||
version = "0.2.7"
|
||||
version = "0.2.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4eb1a864a501629691edf6c15a593b7a51eebaa1e8468e9ddc623de7c9b58ec6"
|
||||
checksum = "9be70c98951c83b8d2f8f60d7065fa6d5146873094452a1008da8c2f1e4205ad"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"libc",
|
||||
|
@ -267,12 +267,6 @@ dependencies = [
|
|||
"ahash",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hashbrown"
|
||||
version = "0.12.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888"
|
||||
|
||||
[[package]]
|
||||
name = "hermit-abi"
|
||||
version = "0.1.19"
|
||||
|
@ -284,37 +278,66 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "indexmap"
|
||||
version = "1.9.1"
|
||||
version = "1.8.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "10a35a97730320ffe8e2d410b5d3b69279b98d2c14bdb8b70ea89ecf7888d41e"
|
||||
checksum = "0f647032dfaa1f8b6dc29bd3edb7bbef4861b8b8007ebb118d6db284fd59f6ee"
|
||||
dependencies = [
|
||||
"autocfg",
|
||||
"hashbrown 0.12.3",
|
||||
"hashbrown",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "indoc"
|
||||
version = "1.0.7"
|
||||
version = "0.3.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "adab1eaa3408fb7f0c777a73e7465fd5656136fc93b670eb6df3c88c2c1344e3"
|
||||
checksum = "47741a8bc60fb26eb8d6e0238bbb26d8575ff623fdc97b1a2c00c050b9684ed8"
|
||||
dependencies = [
|
||||
"indoc-impl",
|
||||
"proc-macro-hack",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "indoc"
|
||||
version = "1.0.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e7906a9fababaeacb774f72410e497a1d18de916322e33797bb2cd29baa23c9e"
|
||||
dependencies = [
|
||||
"unindent",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "indoc-impl"
|
||||
version = "0.3.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ce046d161f000fffde5f432a0d034d0341dc152643b2598ed5bfce44c4f3a8f0"
|
||||
dependencies = [
|
||||
"proc-macro-hack",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"unindent",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "inkwell"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/TheDan64/inkwell.git#25b9fc5870370211504e874e7c81dc53573bca79"
|
||||
version = "0.1.0-beta.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2223d0eba0ae6d40a3e4680c6a3209143471e1f38b41746ea309aa36dde9f90b"
|
||||
dependencies = [
|
||||
"either",
|
||||
"inkwell_internals",
|
||||
"libc",
|
||||
"llvm-sys",
|
||||
"once_cell",
|
||||
"parking_lot",
|
||||
"parking_lot 0.11.2",
|
||||
"regex",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "inkwell_internals"
|
||||
version = "0.5.0"
|
||||
source = "git+https://github.com/TheDan64/inkwell.git#25b9fc5870370211504e874e7c81dc53573bca79"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3c7090af3d300424caa81976b8c97bca41cd70e861272c072e188ae082fb49f9"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
@ -355,15 +378,15 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "itoa"
|
||||
version = "1.0.3"
|
||||
version = "1.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6c8af84674fe1f223a982c933a0ee1086ac4d4052aa0fb8060c12c6ad838e754"
|
||||
checksum = "1aab8fc367588b89dcee83ab0fd66b72b50b72fa1904d7095045ace2b0c81c35"
|
||||
|
||||
[[package]]
|
||||
name = "lalrpop"
|
||||
version = "0.19.8"
|
||||
version = "0.19.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b30455341b0e18f276fa64540aff54deafb54c589de6aca68659c63dd2d5d823"
|
||||
checksum = "852b75a095da6b69da8c5557731c3afd06525d4f655a4fc1c799e2ec8bc4dce4"
|
||||
dependencies = [
|
||||
"ascii-canvas",
|
||||
"atty",
|
||||
|
@ -384,9 +407,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "lalrpop-util"
|
||||
version = "0.19.8"
|
||||
version = "0.19.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bcf796c978e9b4d983414f4caedc9273aa33ee214c5b887bd55fde84c85d2dc4"
|
||||
checksum = "d6d265705249fe209280676d8f68887859fa42e1d34f342fc05bd47726a5e188"
|
||||
dependencies = [
|
||||
"regex",
|
||||
]
|
||||
|
@ -399,9 +422,9 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
|
|||
|
||||
[[package]]
|
||||
name = "libc"
|
||||
version = "0.2.127"
|
||||
version = "0.2.121"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "505e71a4706fa491e9b1b55f51b95d4037d0821ee40131190475f692b35b009b"
|
||||
checksum = "efaa7b300f3b5fe8eb6bf21ce3895e1751d9665086af2d64b42f19701015ff4f"
|
||||
|
||||
[[package]]
|
||||
name = "libloading"
|
||||
|
@ -415,15 +438,15 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "linked-hash-map"
|
||||
version = "0.5.6"
|
||||
version = "0.5.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f"
|
||||
checksum = "7fb9b38af92608140b86b693604b9ffcc5824240a484d1ecd4795bacb2fe88f3"
|
||||
|
||||
[[package]]
|
||||
name = "llvm-sys"
|
||||
version = "140.0.2"
|
||||
version = "130.0.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5a4ad24a72823cb06764e04a008d4aedc1be56d2be87aa4212b12a3e9f77bb10"
|
||||
checksum = "95eb03b4f7ae21f48ef7c565a3e3aa22c50616aea64645fb1fd7f6f56b51c274"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"lazy_static",
|
||||
|
@ -444,18 +467,18 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "log"
|
||||
version = "0.4.17"
|
||||
version = "0.4.16"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e"
|
||||
checksum = "6389c490849ff5bc16be905ae24bc913a9c8892e19b2341dbc175e14c341c2b8"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "memchr"
|
||||
version = "2.5.0"
|
||||
version = "2.4.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
|
||||
checksum = "308cc39be01b73d0d18f82a0e7b2a3df85245f84af96fdddc5d202d27e47b86a"
|
||||
|
||||
[[package]]
|
||||
name = "memoffset"
|
||||
|
@ -472,9 +495,8 @@ version = "0.1.0"
|
|||
dependencies = [
|
||||
"inkwell",
|
||||
"nac3core",
|
||||
"nac3ld",
|
||||
"nac3parser",
|
||||
"parking_lot",
|
||||
"parking_lot 0.11.2",
|
||||
"pyo3",
|
||||
"tempfile",
|
||||
]
|
||||
|
@ -485,7 +507,7 @@ version = "0.1.0"
|
|||
dependencies = [
|
||||
"fxhash",
|
||||
"lazy_static",
|
||||
"parking_lot",
|
||||
"parking_lot 0.11.2",
|
||||
"string-interner",
|
||||
]
|
||||
|
||||
|
@ -494,25 +516,18 @@ name = "nac3core"
|
|||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"crossbeam",
|
||||
"indoc",
|
||||
"indoc 1.0.4",
|
||||
"inkwell",
|
||||
"insta",
|
||||
"itertools",
|
||||
"lazy_static",
|
||||
"nac3parser",
|
||||
"parking_lot",
|
||||
"parking_lot 0.11.2",
|
||||
"rayon",
|
||||
"regex",
|
||||
"slab",
|
||||
"test-case",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nac3ld"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"byteorder",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nac3parser"
|
||||
version = "0.1.2"
|
||||
|
@ -536,7 +551,7 @@ dependencies = [
|
|||
"inkwell",
|
||||
"nac3core",
|
||||
"nac3parser",
|
||||
"parking_lot",
|
||||
"parking_lot 0.11.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -557,25 +572,50 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "once_cell"
|
||||
version = "1.13.0"
|
||||
version = "1.10.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "18a6dbe30758c9f83eb00cbea4ac95966305f5a7772f3f42ebfc7fc7eddbd8e1"
|
||||
checksum = "87f3e037eac156d1775da914196f0f37741a274155e34a0b7e427c35d2a2ecb9"
|
||||
|
||||
[[package]]
|
||||
name = "parking_lot"
|
||||
version = "0.12.1"
|
||||
version = "0.11.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f"
|
||||
checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99"
|
||||
dependencies = [
|
||||
"instant",
|
||||
"lock_api",
|
||||
"parking_lot_core 0.8.5",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "parking_lot"
|
||||
version = "0.12.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "87f5ec2493a61ac0506c0f4199f99070cbe83857b0337006a30f3e6719b8ef58"
|
||||
dependencies = [
|
||||
"lock_api",
|
||||
"parking_lot_core",
|
||||
"parking_lot_core 0.9.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "parking_lot_core"
|
||||
version = "0.9.3"
|
||||
version = "0.8.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "09a279cbf25cb0757810394fbc1e359949b59e348145c643a939a525692e6929"
|
||||
checksum = "d76e8e1493bcac0d2766c42737f34458f1c8c50c0d23bcb24ea953affb273216"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"instant",
|
||||
"libc",
|
||||
"redox_syscall",
|
||||
"smallvec",
|
||||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "parking_lot_core"
|
||||
version = "0.9.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "995f667a6c822200b0433ac218e05582f0e2efa1b922a3fd2fbaadc5f87bab37"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"libc",
|
||||
|
@ -585,10 +625,38 @@ dependencies = [
|
|||
]
|
||||
|
||||
[[package]]
|
||||
name = "petgraph"
|
||||
version = "0.6.2"
|
||||
name = "paste"
|
||||
version = "0.1.18"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e6d5014253a1331579ce62aa67443b4a658c5e7dd03d4bc6d302b94474888143"
|
||||
checksum = "45ca20c77d80be666aef2b45486da86238fabe33e38306bd3118fe4af33fa880"
|
||||
dependencies = [
|
||||
"paste-impl",
|
||||
"proc-macro-hack",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "paste-impl"
|
||||
version = "0.1.18"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d95a7db200b97ef370c8e6de0088252f7e0dfff7d047a28528e47456c0fc98b6"
|
||||
dependencies = [
|
||||
"proc-macro-hack",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pest"
|
||||
version = "2.1.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "10f4872ae94d7b90ae48754df22fd42ad52ce740b8f370b03da4835417403e53"
|
||||
dependencies = [
|
||||
"ucd-trie",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "petgraph"
|
||||
version = "0.5.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "467d164a6de56270bd7c4d070df81d07beace25012d5103ced4e9ff08d6afdb7"
|
||||
dependencies = [
|
||||
"fixedbitset",
|
||||
"indexmap",
|
||||
|
@ -596,32 +664,34 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "phf"
|
||||
version = "0.11.0"
|
||||
version = "0.9.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4724fa946c8d1e7cd881bd3dbee63ce32fc1e9e191e35786b3dc1320a3f68131"
|
||||
checksum = "b2ac8b67553a7ca9457ce0e526948cad581819238f4a9d1ea74545851fa24f37"
|
||||
dependencies = [
|
||||
"phf_macros",
|
||||
"phf_shared 0.11.0",
|
||||
"phf_shared 0.9.0",
|
||||
"proc-macro-hack",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "phf_generator"
|
||||
version = "0.11.0"
|
||||
version = "0.9.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5b450720b6f75cfbfabc195814bd3765f337a4f9a83186f8537297cac12f6705"
|
||||
checksum = "d43f3220d96e0080cc9ea234978ccd80d904eafb17be31bb0f76daaea6493082"
|
||||
dependencies = [
|
||||
"phf_shared 0.11.0",
|
||||
"phf_shared 0.9.0",
|
||||
"rand",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "phf_macros"
|
||||
version = "0.11.0"
|
||||
version = "0.9.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cd94351ac44e70e56b59883e15029a5135f902a8a3020f9c18d580a420e526aa"
|
||||
checksum = "b706f5936eb50ed880ae3009395b43ed19db5bff2ebd459c95e7bf013a89ab86"
|
||||
dependencies = [
|
||||
"phf_generator",
|
||||
"phf_shared 0.11.0",
|
||||
"phf_shared 0.9.0",
|
||||
"proc-macro-hack",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
|
@ -629,18 +699,18 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "phf_shared"
|
||||
version = "0.10.0"
|
||||
version = "0.9.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b6796ad771acdc0123d2a88dc428b5e38ef24456743ddb1744ed628f9815c096"
|
||||
checksum = "a68318426de33640f02be62b4ae8eb1261be2efbc337b60c54d845bf4484e0d9"
|
||||
dependencies = [
|
||||
"siphasher",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "phf_shared"
|
||||
version = "0.11.0"
|
||||
version = "0.10.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9dd5609d4b2df87167f908a32e1b146ce309c16cf35df76bc11f440b756048e4"
|
||||
checksum = "b6796ad771acdc0123d2a88dc428b5e38ef24456743ddb1744ed628f9815c096"
|
||||
dependencies = [
|
||||
"siphasher",
|
||||
]
|
||||
|
@ -664,57 +734,51 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c"
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro2"
|
||||
version = "1.0.43"
|
||||
name = "proc-macro-hack"
|
||||
version = "0.5.19"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0a2ca2c61bc9f3d74d2886294ab7b9853abd9c1ad903a3ac7815c58989bb7bab"
|
||||
checksum = "dbf0c48bc1d91375ae5c3cd81e3722dff1abcf81a30960240640d223f59fe0e5"
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro2"
|
||||
version = "1.0.36"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c7342d5883fbccae1cc37a2353b09c87c9b0f3afd73f5fb9bba687a1f733b029"
|
||||
dependencies = [
|
||||
"unicode-ident",
|
||||
"unicode-xid",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyo3"
|
||||
version = "0.16.5"
|
||||
version = "0.14.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1e6302e85060011447471887705bb7838f14aba43fcb06957d823739a496b3dc"
|
||||
checksum = "35100f9347670a566a67aa623369293703322bb9db77d99d7df7313b575ae0c8"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"indoc",
|
||||
"indoc 0.3.6",
|
||||
"libc",
|
||||
"parking_lot",
|
||||
"parking_lot 0.11.2",
|
||||
"paste",
|
||||
"pyo3-build-config",
|
||||
"pyo3-ffi",
|
||||
"pyo3-macros",
|
||||
"unindent",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyo3-build-config"
|
||||
version = "0.16.5"
|
||||
version = "0.14.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b5b65b546c35d8a3b1b2f0ddbac7c6a569d759f357f2b9df884f5d6b719152c8"
|
||||
checksum = "d12961738cacbd7f91b7c43bc25cfeeaa2698ad07a04b3be0aa88b950865738f"
|
||||
dependencies = [
|
||||
"once_cell",
|
||||
"target-lexicon",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyo3-ffi"
|
||||
version = "0.16.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c275a07127c1aca33031a563e384ffdd485aee34ef131116fcd58e3430d1742b"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"pyo3-build-config",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyo3-macros"
|
||||
version = "0.16.5"
|
||||
version = "0.14.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "284fc4485bfbcc9850a6d661d627783f18d19c2ab55880b021671c4ba83e90f7"
|
||||
checksum = "fc0bc5215d704824dfddddc03f93cb572e1155c68b6761c37005e1c288808ea8"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"pyo3-macros-backend",
|
||||
"quote",
|
||||
"syn",
|
||||
|
@ -722,20 +786,21 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "pyo3-macros-backend"
|
||||
version = "0.16.5"
|
||||
version = "0.14.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "53bda0f58f73f5c5429693c96ed57f7abdb38fdfc28ae06da4101a257adb7faf"
|
||||
checksum = "71623fc593224afaab918aa3afcaf86ed2f43d34f6afde7f3922608f253240df"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"pyo3-build-config",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "quote"
|
||||
version = "1.0.21"
|
||||
version = "1.0.17"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bbe448f377a7d6961e30f5955f9b8d106c3f5e449d493ee1b125c1d43c2b5179"
|
||||
checksum = "632d02bff7f874a36f33ea8bb416cd484b90cc66c1194b1a1110d067a7013f58"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
]
|
||||
|
@ -772,9 +837,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "rayon"
|
||||
version = "1.5.3"
|
||||
version = "1.5.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bd99e5772ead8baa5215278c9b15bf92087709e9c1b2d1f97cdb5a183c933a7d"
|
||||
checksum = "c06aca804d41dbc8ba42dfd964f0d01334eceb64314b9ecf7c5fad5188a06d90"
|
||||
dependencies = [
|
||||
"autocfg",
|
||||
"crossbeam-deque",
|
||||
|
@ -784,21 +849,22 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "rayon-core"
|
||||
version = "1.9.3"
|
||||
version = "1.9.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "258bcdb5ac6dad48491bb2992db6b7cf74878b0384908af124823d118c99683f"
|
||||
checksum = "d78120e2c850279833f1dd3582f730c4ab53ed95aeaaaa862a2a5c71b1656d8e"
|
||||
dependencies = [
|
||||
"crossbeam-channel",
|
||||
"crossbeam-deque",
|
||||
"crossbeam-utils",
|
||||
"lazy_static",
|
||||
"num_cpus",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "redox_syscall"
|
||||
version = "0.2.16"
|
||||
version = "0.2.13"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a"
|
||||
checksum = "62f25bc4c7e55e0b0b7a1d43fb893f4fa1361d0abe38b9ce4f323c2adfe6ef42"
|
||||
dependencies = [
|
||||
"bitflags",
|
||||
]
|
||||
|
@ -816,9 +882,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "regex"
|
||||
version = "1.6.0"
|
||||
version = "1.5.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4c4eb3267174b8c6c2f654116623910a0fef09c4753f8dd83db29c48a0df988b"
|
||||
checksum = "1a11647b6b25ff05a515cb92c365cec08801e83423a235b51e231e1808747286"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"memchr",
|
||||
|
@ -827,9 +893,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "regex-syntax"
|
||||
version = "0.6.27"
|
||||
version = "0.6.25"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a3f87b73ce11b1619a3c6332f45341e0047173771e8b8b73f87bfeefb7b56244"
|
||||
checksum = "f497285884f3fcff424ffc933e56d7cbca511def0c9831a7f9b5f6153e3cc89b"
|
||||
|
||||
[[package]]
|
||||
name = "remove_dir_all"
|
||||
|
@ -849,15 +915,15 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "rustversion"
|
||||
version = "1.0.9"
|
||||
version = "1.0.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "97477e48b4cf8603ad5f7aaf897467cf42ab4218a38ef76fb14c2d6773a6d6a8"
|
||||
checksum = "f2cc38e8fa666e2de3c4aba7edeb5ffc5246c1c2ed0e3d17e560aeeba736b23f"
|
||||
|
||||
[[package]]
|
||||
name = "ryu"
|
||||
version = "1.0.11"
|
||||
version = "1.0.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4501abdff3ae82a1c1b477a17252eb69cee9e66eb915c1abaa4f44d873df9f09"
|
||||
checksum = "73b4b750c782965c211b42f022f59af1fbceabdd026623714f104152f1ec149f"
|
||||
|
||||
[[package]]
|
||||
name = "scopeguard"
|
||||
|
@ -867,24 +933,36 @@ checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
|
|||
|
||||
[[package]]
|
||||
name = "semver"
|
||||
version = "1.0.13"
|
||||
version = "0.11.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "93f6841e709003d68bb2deee8c343572bf446003ec20a583e76f7b15cebf3711"
|
||||
checksum = "f301af10236f6df4160f7c3f04eec6dbc70ace82d23326abad5edee88801c6b6"
|
||||
dependencies = [
|
||||
"semver-parser",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "semver-parser"
|
||||
version = "0.10.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "00b0bef5b7f9e0df16536d3961cfb6e84331c065b4066afb39768d0e319411f7"
|
||||
dependencies = [
|
||||
"pest",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde"
|
||||
version = "1.0.142"
|
||||
version = "1.0.136"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e590c437916fb6b221e1d00df6e3294f3fccd70ca7e92541c475d6ed6ef5fee2"
|
||||
checksum = "ce31e24b01e1e524df96f1c2fdd054405f8d7376249a5110886fb4b658484789"
|
||||
dependencies = [
|
||||
"serde_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_derive"
|
||||
version = "1.0.142"
|
||||
version = "1.0.136"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "34b5b8d809babe02f538c2cfec6f2c1ed10804c0e5a6a041a049a4f5588ccc2e"
|
||||
checksum = "08597e7152fcd306f41838ed3e37be9eaeed2b61c42e2117266a554fab4662f9"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
@ -893,9 +971,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "serde_json"
|
||||
version = "1.0.83"
|
||||
version = "1.0.79"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "38dd04e3c8279e75b31ef29dbdceebfe5ad89f4d0937213c53f7d49d01b3d5a7"
|
||||
checksum = "8e8d9fa5c3b304765ce1fd9c4c8a3de2c8db365a5b91be52f186efc675681d95"
|
||||
dependencies = [
|
||||
"itoa",
|
||||
"ryu",
|
||||
|
@ -904,9 +982,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "serde_yaml"
|
||||
version = "0.8.26"
|
||||
version = "0.8.23"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "578a7433b776b56a35785ed5ce9a7e777ac0598aac5a6dd1b4b18a307c7fc71b"
|
||||
checksum = "a4a521f2940385c165a24ee286aa8599633d162077a54bdcae2a6fd5a7bfa7a0"
|
||||
dependencies = [
|
||||
"indexmap",
|
||||
"ryu",
|
||||
|
@ -916,9 +994,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "similar"
|
||||
version = "2.2.0"
|
||||
version = "2.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "62ac7f900db32bf3fd12e0117dd3dc4da74bc52ebaac97f39668446d89694803"
|
||||
checksum = "2e24979f63a11545f5f2c60141afe249d4f19f84581ea2138065e400941d83d3"
|
||||
|
||||
[[package]]
|
||||
name = "siphasher"
|
||||
|
@ -927,19 +1005,25 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
checksum = "7bd3e3206899af3f8b12af284fafc038cc1dc2b41d1b89dd17297221c5d225de"
|
||||
|
||||
[[package]]
|
||||
name = "smallvec"
|
||||
version = "1.9.0"
|
||||
name = "slab"
|
||||
version = "0.4.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2fd0db749597d91ff862fd1d55ea87f7855a744a8425a64695b6fca237d1dad1"
|
||||
checksum = "eb703cfe953bccee95685111adeedb76fabe4e97549a58d16f03ea7b9367bb32"
|
||||
|
||||
[[package]]
|
||||
name = "smallvec"
|
||||
version = "1.8.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f2dd574626839106c320a323308629dcb1acfc96e32a8cba364ddc61ac23ee83"
|
||||
|
||||
[[package]]
|
||||
name = "string-interner"
|
||||
version = "0.14.0"
|
||||
version = "0.13.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "91e2531d8525b29b514d25e275a43581320d587b86db302b9a7e464bac579648"
|
||||
checksum = "ecc77d3a5728ef82235df1f9b9430507f555c7404797f42b49c2403d4c1d8c6c"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"hashbrown 0.11.2",
|
||||
"hashbrown",
|
||||
"serde",
|
||||
]
|
||||
|
||||
|
@ -951,28 +1035,22 @@ checksum = "213494b7a2b503146286049378ce02b482200519accc31872ee8be91fa820a08"
|
|||
dependencies = [
|
||||
"new_debug_unreachable",
|
||||
"once_cell",
|
||||
"parking_lot",
|
||||
"parking_lot 0.12.0",
|
||||
"phf_shared 0.10.0",
|
||||
"precomputed-hash",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "1.0.99"
|
||||
version = "1.0.90"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "58dbef6ec655055e20b86b15a8cc6d439cca19b667537ac6a1369572d151ab13"
|
||||
checksum = "704df27628939572cd88d33f171cd6f896f4eaca85252c6e0a72d8d8287ee86f"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"unicode-ident",
|
||||
"unicode-xid",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "target-lexicon"
|
||||
version = "0.12.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c02424087780c9b71cc96799eaeddff35af2bc513278cda5c99fc1f5d026d3c1"
|
||||
|
||||
[[package]]
|
||||
name = "tempfile"
|
||||
version = "3.3.0"
|
||||
|
@ -1023,18 +1101,18 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "thiserror"
|
||||
version = "1.0.32"
|
||||
version = "1.0.30"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f5f6586b7f764adc0231f4c79be7b920e766bb2f3e51b3661cdb263828f19994"
|
||||
checksum = "854babe52e4df1653706b98fcfc05843010039b406875930a70e4d9644e5c417"
|
||||
dependencies = [
|
||||
"thiserror-impl",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "thiserror-impl"
|
||||
version = "1.0.32"
|
||||
version = "1.0.30"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "12bafc5b54507e0149cdf1b145a5d80ab80a90bcd9275df43d4fff68460f6c21"
|
||||
checksum = "aa32fd3f627f367fe16f893e2597ae3c05020f8bba2666a4e6ea73d377e5714b"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
@ -1050,6 +1128,12 @@ dependencies = [
|
|||
"crunchy",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ucd-trie"
|
||||
version = "0.1.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "56dee185309b50d1f11bfedef0fe6d036842e3fb77413abef29f8f8d1c5d4c1c"
|
||||
|
||||
[[package]]
|
||||
name = "unic-char-property"
|
||||
version = "0.9.0"
|
||||
|
@ -1102,29 +1186,23 @@ dependencies = [
|
|||
"unic-common",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "unicode-ident"
|
||||
version = "1.0.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c4f5b37a154999a8f3f98cc23a628d850e154479cd94decf3414696e12e31aaf"
|
||||
|
||||
[[package]]
|
||||
name = "unicode-xid"
|
||||
version = "0.2.3"
|
||||
version = "0.2.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "957e51f3646910546462e67d5f7599b9e4fb8acdd304b087a6494730f9eebf04"
|
||||
checksum = "8ccb82d61f80a663efe1f787a51b16b5a51e3314d6ac365b08639f52387b33f3"
|
||||
|
||||
[[package]]
|
||||
name = "unicode_names2"
|
||||
version = "0.5.0"
|
||||
version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "eec8e807a365e5c972debc47b8f06d361b37b94cfd18d48f7adc715fb86404dd"
|
||||
checksum = "87d6678d7916394abad0d4b19df4d3802e1fd84abd7d701f39b75ee71b9e8cf1"
|
||||
|
||||
[[package]]
|
||||
name = "unindent"
|
||||
version = "0.1.10"
|
||||
version = "0.1.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "58ee9362deb4a96cef4d437d1ad49cffc9b9e92d202b6995674e928ce684f112"
|
||||
checksum = "514672a55d7380da379785a4d70ca8386c8883ff7eaae877be4d2081cebe73d8"
|
||||
|
||||
[[package]]
|
||||
name = "version_check"
|
||||
|
@ -1134,9 +1212,9 @@ checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
|
|||
|
||||
[[package]]
|
||||
name = "wasi"
|
||||
version = "0.11.0+wasi-snapshot-preview1"
|
||||
version = "0.10.2+wasi-snapshot-preview1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
|
||||
checksum = "fd6fbd9a79829dd1ad0cc20627bf1ed606756a7f77edff7b66b7064f9cb327c6"
|
||||
|
||||
[[package]]
|
||||
name = "winapi"
|
||||
|
@ -1162,9 +1240,9 @@ checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
|
|||
|
||||
[[package]]
|
||||
name = "windows-sys"
|
||||
version = "0.36.1"
|
||||
version = "0.34.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ea04155a16a59f9eab786fe12a4a450e75cdb175f9e0d80da1e17db09f55b8d2"
|
||||
checksum = "5acdd78cb4ba54c0045ac14f62d8f94a03d10047904ae2a40afa1e99d8f70825"
|
||||
dependencies = [
|
||||
"windows_aarch64_msvc",
|
||||
"windows_i686_gnu",
|
||||
|
@ -1175,33 +1253,33 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "windows_aarch64_msvc"
|
||||
version = "0.36.1"
|
||||
version = "0.34.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9bb8c3fd39ade2d67e9874ac4f3db21f0d710bee00fe7cab16949ec184eeaa47"
|
||||
checksum = "17cffbe740121affb56fad0fc0e421804adf0ae00891205213b5cecd30db881d"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_gnu"
|
||||
version = "0.36.1"
|
||||
version = "0.34.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "180e6ccf01daf4c426b846dfc66db1fc518f074baa793aa7d9b9aaeffad6a3b6"
|
||||
checksum = "2564fde759adb79129d9b4f54be42b32c89970c18ebf93124ca8870a498688ed"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_msvc"
|
||||
version = "0.36.1"
|
||||
version = "0.34.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e2e7917148b2812d1eeafaeb22a97e4813dfa60a3f8f78ebe204bcc88f12f024"
|
||||
checksum = "9cd9d32ba70453522332c14d38814bceeb747d80b3958676007acadd7e166956"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_gnu"
|
||||
version = "0.36.1"
|
||||
version = "0.34.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4dcd171b8776c41b97521e5da127a2d86ad280114807d0b2ab1e462bc764d9e1"
|
||||
checksum = "cfce6deae227ee8d356d19effc141a509cc503dfd1f850622ec4b0f84428e1f4"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_msvc"
|
||||
version = "0.36.1"
|
||||
version = "0.34.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c811ca4a8c853ef420abd8592ba53ddbbac90410fab6903b3e79972a631f7680"
|
||||
checksum = "d19538ccc21819d01deaf88d6a17eae6596a12e9aafdbb97916fb49896d89de9"
|
||||
|
||||
[[package]]
|
||||
name = "yaml-rust"
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
[workspace]
|
||||
members = [
|
||||
"nac3ld",
|
||||
"nac3ast",
|
||||
"nac3parser",
|
||||
"nac3core",
|
||||
|
|
47
README.md
47
README.md
|
@ -1,10 +1,5 @@
|
|||
<div align="center">
|
||||
|
||||
![icon](https://git.m-labs.hk/M-Labs/nac3/raw/branch/master/nac3.svg)
|
||||
|
||||
</div>
|
||||
|
||||
# NAC3
|
||||
|
||||
NAC3 is a major, backward-incompatible rewrite of the compiler for the [ARTIQ](https://m-labs.hk/artiq) physics experiment control and data acquisition system. It features greatly improved compilation speeds, a much better type system, and more predictable and transparent operation.
|
||||
|
||||
NAC3 has a modular design and its applicability reaches beyond ARTIQ. The ``nac3core`` module does not contain anything specific to ARTIQ, and can be used in any project that requires compiling Python to machine code.
|
||||
|
@ -13,7 +8,7 @@ NAC3 has a modular design and its applicability reaches beyond ARTIQ. The ``nac3
|
|||
|
||||
## Packaging
|
||||
|
||||
NAC3 is packaged using the [Nix](https://nixos.org) Flakes system. Install Nix 2.8+ and enable flakes by adding ``experimental-features = nix-command flakes`` to ``nix.conf`` (e.g. ``~/.config/nix/nix.conf``).
|
||||
NAC3 is packaged using the [Nix](https://nixos.org) Flakes system. Install Nix 2.4+ and enable flakes by adding ``experimental-features = nix-command flakes`` to ``nix.conf`` (e.g. ``~/.config/nix/nix.conf``).
|
||||
|
||||
## Try NAC3
|
||||
|
||||
|
@ -21,21 +16,42 @@ NAC3 is packaged using the [Nix](https://nixos.org) Flakes system. Install Nix 2
|
|||
|
||||
After setting up Nix as above, use ``nix shell git+https://github.com/m-labs/artiq.git?ref=nac3`` to get a shell with the NAC3 version of ARTIQ. See the ``examples`` directory in ARTIQ (``nac3`` Git branch) for some samples of NAC3 kernel code.
|
||||
|
||||
### Windows
|
||||
### Windows (work in progress)
|
||||
|
||||
Install [MSYS2](https://www.msys2.org/), and open "MSYS2 MinGW x64". Edit ``/etc/pacman.conf`` to add:
|
||||
NAC3 ARTIQ packaging for MSYS2/Windows is not yet complete so installation involves many manual steps. It is also less tested and you may encounter problems.
|
||||
|
||||
Install [MSYS2](https://www.msys2.org/) and run the following commands:
|
||||
```
|
||||
[artiq]
|
||||
SigLevel = Optional TrustAll
|
||||
Server = https://lab.m-labs.hk/msys2
|
||||
pacman -S mingw-w64-x86_64-python-h5py mingw-w64-x86_64-python-pyqt5 mingw-w64-x86_64-python-scipy mingw-w64-x86_64-python-prettytable mingw-w64-x86_64-python-pygit2
|
||||
pacman -S mingw-w64-x86_64-python-pip
|
||||
pip install qasync
|
||||
pip install pyqtgraph
|
||||
pacman -S patch git
|
||||
git clone https://github.com/m-labs/sipyco
|
||||
cd sipyco
|
||||
git show 20c946aad78872fe60b78d9b57a624d69f3eea47 | patch -p1 -R
|
||||
python setup.py install
|
||||
cd ..
|
||||
git clone -b nac3 https://github.com/m-labs/artiq
|
||||
cd artiq
|
||||
python setup.py install
|
||||
```
|
||||
|
||||
Then run the following commands:
|
||||
Locate a recent build of ``nac3artiq-msys2`` from [Hydra](https://nixbld.m-labs.hk) and download ``nac3artiq.zip``. Then extract the contents in the appropriate location:
|
||||
```
|
||||
pacman -Syu
|
||||
pacman -S mingw-w64-x86_64-artiq
|
||||
pacman -S unzip
|
||||
wget https://nixbld.m-labs.hk/build/115529/download/1/nac3artiq.zip # edit the build number
|
||||
unzip nac3artiq.zip -d C:/msys64/mingw64/lib/python3.9/site-packages
|
||||
```
|
||||
|
||||
Do the same for ``lld-msys2``:
|
||||
```
|
||||
wget https://nixbld.m-labs.hk/build/115527/download/1/ld.lld.exe
|
||||
mv ld.lld.exe C:/msys64/mingw64/bin
|
||||
```
|
||||
|
||||
And you should be good to go.
|
||||
|
||||
Note: This build of NAC3 cannot be used with Anaconda Python nor the python.org binaries for Windows. Those Python versions are compiled with Visual Studio (MSVC) and their ABI is incompatible with the GNU ABI used in this build. We have no plans to support Visual Studio nor the MSVC ABI. If you need a MSVC build, please install the requisite bloated spyware from Microsoft and compile NAC3 yourself.
|
||||
|
||||
## For developers
|
||||
|
@ -45,7 +61,6 @@ This repository contains:
|
|||
- ``nac3parser``: Python parser (based on RustPython).
|
||||
- ``nac3core``: Core compiler library, containing type-checking and code generation.
|
||||
- ``nac3standalone``: Standalone compiler tool (core language only).
|
||||
- ``nac3ld``: Minimalist RISC-V and ARM linker.
|
||||
- ``nac3artiq``: Integration with ARTIQ and implementation of ARTIQ-specific extensions to the core language.
|
||||
- ``runkernel``: Simple program that runs compiled ARTIQ kernels on the host and displays RTIO operations. Useful for testing without hardware.
|
||||
|
||||
|
|
|
@ -2,16 +2,16 @@
|
|||
"nodes": {
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1659689094,
|
||||
"narHash": "sha256-cXrWxpPYpV1PeEhtpQf9W++8aCgwzxpx2PzfszPofJE=",
|
||||
"lastModified": 1648553562,
|
||||
"narHash": "sha256-xQhRKu6h0phd56oCzGjkhHkY4eDI1XKedGqkFtlXapk=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "697fc6ae98d077f6448cada3ecd63465c48c6af5",
|
||||
"rev": "9b168e5e62406fa2e55e132f390379a6ba22b402",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "NixOS",
|
||||
"ref": "master",
|
||||
"ref": "nixos-21.11",
|
||||
"repo": "nixpkgs",
|
||||
"type": "github"
|
||||
}
|
||||
|
|
73
flake.nix
73
flake.nix
|
@ -1,7 +1,7 @@
|
|||
{
|
||||
description = "The third-generation ARTIQ compiler";
|
||||
|
||||
inputs.nixpkgs.url = github:NixOS/nixpkgs/master;
|
||||
inputs.nixpkgs.url = github:NixOS/nixpkgs/nixos-21.11;
|
||||
|
||||
outputs = { self, nixpkgs }:
|
||||
let
|
||||
|
@ -10,18 +10,12 @@
|
|||
packages.x86_64-linux = rec {
|
||||
llvm-nac3 = pkgs.callPackage ./nix/llvm {};
|
||||
nac3artiq = pkgs.python3Packages.toPythonModule (
|
||||
pkgs.rustPlatform.buildRustPackage rec {
|
||||
pkgs.rustPlatform.buildRustPackage {
|
||||
name = "nac3artiq";
|
||||
outputs = [ "out" "runkernel" "standalone" ];
|
||||
src = self;
|
||||
cargoLock = {
|
||||
lockFile = ./Cargo.lock;
|
||||
outputHashes = {
|
||||
"inkwell-0.1.0" = "sha256-+ih3SO0n6YmZ/mcf+rLDwPAy/1MEZ/A+tI4pM1pUhvU=";
|
||||
};
|
||||
};
|
||||
passthru.cargoLock = cargoLock;
|
||||
nativeBuildInputs = [ pkgs.python3 pkgs.llvmPackages_14.clang-unwrapped pkgs.llvmPackages_14.llvm.out llvm-nac3 ];
|
||||
cargoLock = { lockFile = ./Cargo.lock; };
|
||||
nativeBuildInputs = [ pkgs.python3 pkgs.llvmPackages_13.clang-unwrapped llvm-nac3 ];
|
||||
buildInputs = [ pkgs.python3 llvm-nac3 ];
|
||||
checkInputs = [ (pkgs.python3.withPackages(ps: [ ps.numpy ])) ];
|
||||
checkPhase =
|
||||
|
@ -55,21 +49,21 @@
|
|||
|
||||
# LLVM PGO support
|
||||
llvm-nac3-instrumented = pkgs.callPackage ./nix/llvm {
|
||||
stdenv = pkgs.llvmPackages_14.stdenv;
|
||||
stdenv = pkgs.llvmPackages_13.stdenv;
|
||||
extraCmakeFlags = [ "-DLLVM_BUILD_INSTRUMENTED=IR" ];
|
||||
};
|
||||
nac3artiq-instrumented = pkgs.python3Packages.toPythonModule (
|
||||
pkgs.rustPlatform.buildRustPackage {
|
||||
name = "nac3artiq-instrumented";
|
||||
src = self;
|
||||
inherit (nac3artiq) cargoLock;
|
||||
nativeBuildInputs = [ pkgs.python3 pkgs.llvmPackages_14.clang-unwrapped pkgs.llvmPackages_14.llvm.out llvm-nac3-instrumented ];
|
||||
cargoLock = { lockFile = ./Cargo.lock; };
|
||||
nativeBuildInputs = [ pkgs.python3 pkgs.llvmPackages_13.clang-unwrapped llvm-nac3-instrumented ];
|
||||
buildInputs = [ pkgs.python3 llvm-nac3-instrumented ];
|
||||
cargoBuildFlags = [ "--package" "nac3artiq" "--features" "init-llvm-profile" ];
|
||||
doCheck = false;
|
||||
configurePhase =
|
||||
''
|
||||
export CARGO_TARGET_X86_64_UNKNOWN_LINUX_GNU_RUSTFLAGS="-C link-arg=-L${pkgs.llvmPackages_14.compiler-rt}/lib/linux -C link-arg=-lclang_rt.profile-x86_64"
|
||||
export CARGO_TARGET_X86_64_UNKNOWN_LINUX_GNU_RUSTFLAGS="-C link-arg=-L${pkgs.llvmPackages_13.compiler-rt}/lib/linux -C link-arg=-lclang_rt.profile-x86_64"
|
||||
'';
|
||||
installPhase =
|
||||
''
|
||||
|
@ -81,35 +75,11 @@
|
|||
);
|
||||
nac3artiq-profile = pkgs.stdenvNoCC.mkDerivation {
|
||||
name = "nac3artiq-profile";
|
||||
srcs = [
|
||||
(pkgs.fetchFromGitHub {
|
||||
owner = "m-labs";
|
||||
repo = "sipyco";
|
||||
rev = "939f84f9b5eef7efbf7423c735d1834783b6140e";
|
||||
sha256 = "sha256-15Nun4EY35j+6SPZkjzZtyH/ncxLS60KuGJjFh5kSTc=";
|
||||
})
|
||||
(pkgs.fetchFromGitHub {
|
||||
owner = "m-labs";
|
||||
repo = "artiq";
|
||||
rev = "dd57fdc530baf926a5f354dc1c2bd90564affd96";
|
||||
sha256 = "sha256-hcqVcToYWkc3oDFkKr9wZUF65ydiSYVHdmiGiu2Mc1c=";
|
||||
})
|
||||
];
|
||||
buildInputs = [
|
||||
(python3-mimalloc.withPackages(ps: [ ps.numpy ps.jsonschema nac3artiq-instrumented ]))
|
||||
pkgs.llvmPackages_14.llvm.out
|
||||
];
|
||||
src = self;
|
||||
buildInputs = [ (python3-mimalloc.withPackages(ps: [ ps.numpy nac3artiq-instrumented ])) pkgs.lld_13 pkgs.llvmPackages_13.libllvm ];
|
||||
phases = [ "buildPhase" "installPhase" ];
|
||||
buildPhase =
|
||||
''
|
||||
srcs=($srcs)
|
||||
sipyco=''${srcs[0]}
|
||||
artiq=''${srcs[1]}
|
||||
export PYTHONPATH=$sipyco:$artiq
|
||||
python -m artiq.frontend.artiq_ddb_template $artiq/artiq/examples/nac3devices/nac3devices.json > device_db.py
|
||||
cp $artiq/artiq/examples/nac3devices/nac3devices.py .
|
||||
python -m artiq.frontend.artiq_compile nac3devices.py
|
||||
'';
|
||||
# TODO: get more representative code.
|
||||
buildPhase = "python $src/nac3artiq/demo/demo.py";
|
||||
installPhase =
|
||||
''
|
||||
mkdir $out
|
||||
|
@ -117,15 +87,15 @@
|
|||
'';
|
||||
};
|
||||
llvm-nac3-pgo = pkgs.callPackage ./nix/llvm {
|
||||
stdenv = pkgs.llvmPackages_14.stdenv;
|
||||
stdenv = pkgs.llvmPackages_13.stdenv;
|
||||
extraCmakeFlags = [ "-DLLVM_PROFDATA_FILE=${nac3artiq-profile}/llvm.profdata" ];
|
||||
};
|
||||
nac3artiq-pgo = pkgs.python3Packages.toPythonModule (
|
||||
pkgs.rustPlatform.buildRustPackage {
|
||||
name = "nac3artiq-pgo";
|
||||
src = self;
|
||||
inherit (nac3artiq) cargoLock;
|
||||
nativeBuildInputs = [ pkgs.python3 pkgs.llvmPackages_14.clang-unwrapped pkgs.llvmPackages_14.llvm.out llvm-nac3-pgo ];
|
||||
cargoLock = { lockFile = ./Cargo.lock; };
|
||||
nativeBuildInputs = [ pkgs.python3 pkgs.llvmPackages_13.clang-unwrapped llvm-nac3-pgo ];
|
||||
buildInputs = [ pkgs.python3 llvm-nac3-pgo ];
|
||||
cargoBuildFlags = [ "--package" "nac3artiq" ];
|
||||
cargoTestFlags = [ "--package" "nac3ast" "--package" "nac3parser" "--package" "nac3core" "--package" "nac3artiq" ];
|
||||
|
@ -146,12 +116,11 @@
|
|||
buildInputs = with pkgs; [
|
||||
# build dependencies
|
||||
packages.x86_64-linux.llvm-nac3
|
||||
llvmPackages_14.clang-unwrapped # IRRT
|
||||
pkgs.llvmPackages_14.llvm.out # IRRT
|
||||
llvmPackages_13.clang-unwrapped # IRRT
|
||||
cargo
|
||||
rustc
|
||||
# runtime dependencies
|
||||
lld_14 # for running kernels on the host
|
||||
lld_13
|
||||
(packages.x86_64-linux.python3-mimalloc.withPackages(ps: [ ps.numpy ]))
|
||||
# development tools
|
||||
cargo-insta
|
||||
|
@ -170,15 +139,15 @@
|
|||
};
|
||||
|
||||
hydraJobs = {
|
||||
inherit (packages.x86_64-linux) llvm-nac3 nac3artiq nac3artiq-pgo;
|
||||
inherit (packages.x86_64-linux) llvm-nac3 nac3artiq;
|
||||
llvm-nac3-msys2 = packages.x86_64-w64-mingw32.llvm-nac3;
|
||||
nac3artiq-msys2 = packages.x86_64-w64-mingw32.nac3artiq;
|
||||
nac3artiq-msys2-pkg = packages.x86_64-w64-mingw32.nac3artiq-pkg;
|
||||
lld-msys2 = packages.x86_64-w64-mingw32.lld;
|
||||
};
|
||||
};
|
||||
|
||||
nixConfig = {
|
||||
extra-trusted-public-keys = "nixbld.m-labs.hk-1:5aSRVA5b320xbNvu30tqxVPXpld73bhtOeH6uAjRyHc=";
|
||||
extra-substituters = "https://nixbld.m-labs.hk";
|
||||
binaryCachePublicKeys = ["nixbld.m-labs.hk-1:5aSRVA5b320xbNvu30tqxVPXpld73bhtOeH6uAjRyHc="];
|
||||
binaryCaches = ["https://nixbld.m-labs.hk" "https://cache.nixos.org"];
|
||||
};
|
||||
}
|
||||
|
|
56
nac3.svg
56
nac3.svg
|
@ -1,56 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<svg
|
||||
id="a"
|
||||
width="128"
|
||||
height="128"
|
||||
viewBox="0 0 95.99999 95.99999"
|
||||
version="1.1"
|
||||
sodipodi:docname="nac3.svg"
|
||||
inkscape:version="1.1.1 (3bf5ae0d25, 2021-09-20)"
|
||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:svg="http://www.w3.org/2000/svg">
|
||||
<defs
|
||||
id="defs11" />
|
||||
<sodipodi:namedview
|
||||
id="namedview9"
|
||||
pagecolor="#ffffff"
|
||||
bordercolor="#666666"
|
||||
borderopacity="1.0"
|
||||
inkscape:pageshadow="2"
|
||||
inkscape:pageopacity="0.0"
|
||||
inkscape:pagecheckerboard="0"
|
||||
inkscape:document-units="mm"
|
||||
showgrid="false"
|
||||
units="px"
|
||||
width="128px"
|
||||
inkscape:zoom="5.9448568"
|
||||
inkscape:cx="60.472441"
|
||||
inkscape:cy="60.556547"
|
||||
inkscape:window-width="2560"
|
||||
inkscape:window-height="1371"
|
||||
inkscape:window-x="0"
|
||||
inkscape:window-y="32"
|
||||
inkscape:window-maximized="1"
|
||||
inkscape:current-layer="a" />
|
||||
<rect
|
||||
x="40.072601"
|
||||
y="-26.776209"
|
||||
width="55.668747"
|
||||
height="55.668747"
|
||||
transform="matrix(0.71803815,0.69600374,-0.71803815,0.69600374,0,0)"
|
||||
style="fill:#be211e;stroke:#000000;stroke-width:4.37375px;stroke-linecap:round;stroke-linejoin:round"
|
||||
id="rect2" />
|
||||
<line
|
||||
x1="38.00692"
|
||||
y1="63.457153"
|
||||
x2="57.993061"
|
||||
y2="63.457153"
|
||||
style="fill:none;stroke:#000000;stroke-width:4.37269px;stroke-linecap:round;stroke-linejoin:round"
|
||||
id="line4" />
|
||||
<path
|
||||
d="m 48.007301,57.843329 c -1.943097,0 -3.877522,-0.41727 -5.686157,-1.246007 -3.218257,-1.474616 -5.650382,-4.075418 -6.849639,-7.323671 -2.065624,-5.588921 -1.192751,-10.226647 2.575258,-13.827 0.611554,-0.584909 1.518048,-0.773041 2.323689,-0.488206 0.80673,0.286405 1.369495,0.998486 1.447563,1.827234 0.237469,2.549302 2.439719,5.917376 4.28414,6.55273 0.396859,0.13506 0.820953,-0.05859 1.097084,-0.35222 0.339254,-0.360754 0.451065,-0.961893 -1.013597,-3.191372 -2.089851,-3.181137 -4.638728,-8.754903 -0.262407,-15.069853 0.494457,-0.713491 1.384673,-1.068907 2.256469,-0.909156 0.871795,0.161332 1.583757,0.806404 1.752251,1.651189 0.716448,3.591862 2.962357,6.151755 5.199306,8.023138 1.935503,1.61861 4.344688,3.867387 5.435687,7.096643 2.283183,6.758017 -1.202511,14.114988 -8.060822,16.494025 -1.467083,0.509226 -2.98513,0.762536 -4.498836,0.762536 z M 39.358865,40.002192 c -0.304711,0.696206 -0.541636,2.080524 -0.56865,2.237454 -0.330316,1.918771 0.168305,3.803963 0.846157,5.539951 0.856828,2.19436 2.437543,3.942467 4.583411,4.925713 2.143691,0.981675 4.554131,1.097816 6.789992,0.322666 4.571485,-1.586549 6.977584,-6.532238 5.363036,-11.02597 v -5.27e-4 C 55.455481,39.447968 54.023463,38.162043 52.221335,36.65432 50.876945,35.529534 49.409662,33.987726 48.417983,32.135555 48.01343,31.37996 47.79547,30.34303 47.76669,29.413263 c -0.187481,0.669514 -0.212441,2.325923 -0.150396,2.93691 0.179209,1.764456 1.333476,3.644546 2.340611,5.171243 1.311568,1.988179 2.72058,6.037272 0.459681,8.367985 -1.54192,1.58953 -4.038511,2.052034 -5.839973,1.38492 -2.398314,-0.888147 -3.942744,-2.690627 -4.941118,-4.768029 -0.121194,-0.25217 -0.532464,-1.174187 -0.276619,-2.5041 z"
|
||||
id="path6"
|
||||
style="stroke-width:1.09317" />
|
||||
</svg>
|
Before Width: | Height: | Size: 3.3 KiB |
|
@ -9,17 +9,16 @@ name = "nac3artiq"
|
|||
crate-type = ["cdylib"]
|
||||
|
||||
[dependencies]
|
||||
pyo3 = { version = "0.16", features = ["extension-module"] }
|
||||
parking_lot = "0.12"
|
||||
pyo3 = { version = "0.14", features = ["extension-module"] }
|
||||
parking_lot = "0.11"
|
||||
tempfile = "3"
|
||||
nac3parser = { path = "../nac3parser" }
|
||||
nac3core = { path = "../nac3core" }
|
||||
nac3ld = { path = "../nac3ld" }
|
||||
|
||||
[dependencies.inkwell]
|
||||
git = "https://github.com/TheDan64/inkwell.git"
|
||||
version = "0.1.0-beta.4"
|
||||
default-features = false
|
||||
features = ["llvm14-0", "target-x86", "target-arm", "target-riscv", "no-libffi-linking"]
|
||||
features = ["llvm13-0", "target-x86", "target-arm", "target-riscv", "no-libffi-linking"]
|
||||
|
||||
[features]
|
||||
init-llvm-profile = []
|
||||
|
|
|
@ -1,4 +1,10 @@
|
|||
from min_artiq import *
|
||||
from numpy import int32, int64
|
||||
|
||||
|
||||
@extern
|
||||
def output_int(x: int32):
|
||||
...
|
||||
|
||||
|
||||
@nac3
|
||||
|
|
|
@ -68,7 +68,7 @@ impl<'b> CodeGenerator for ArtiqCodeGenerator<'b> {
|
|||
) -> Result<Option<BasicValueEnum<'ctx>>, String> {
|
||||
let result = gen_call(self, ctx, obj, fun, params)?;
|
||||
if let Some(end) = self.end.clone() {
|
||||
let old_end = self.gen_expr(ctx, &end)?.unwrap().to_basic_value_enum(ctx, self, end.custom.unwrap())?;
|
||||
let old_end = self.gen_expr(ctx, &end)?.unwrap().to_basic_value_enum(ctx, self)?;
|
||||
let now = self.timeline.emit_now_mu(ctx);
|
||||
let smax = ctx.module.get_function("llvm.smax.i64").unwrap_or_else(|| {
|
||||
let i64 = ctx.ctx.i64_type();
|
||||
|
@ -88,7 +88,7 @@ impl<'b> CodeGenerator for ArtiqCodeGenerator<'b> {
|
|||
ctx.builder.build_store(end_store, max);
|
||||
}
|
||||
if let Some(start) = self.start.clone() {
|
||||
let start_val = self.gen_expr(ctx, &start)?.unwrap().to_basic_value_enum(ctx, self, start.custom.unwrap())?;
|
||||
let start_val = self.gen_expr(ctx, &start)?.unwrap().to_basic_value_enum(ctx, self)?;
|
||||
self.timeline.emit_at_mu(ctx, start_val);
|
||||
}
|
||||
Ok(result)
|
||||
|
@ -120,7 +120,7 @@ impl<'b> CodeGenerator for ArtiqCodeGenerator<'b> {
|
|||
let old_start = self.start.take();
|
||||
let old_end = self.end.take();
|
||||
let now = if let Some(old_start) = &old_start {
|
||||
self.gen_expr(ctx, old_start)?.unwrap().to_basic_value_enum(ctx, self, old_start.custom.unwrap())?
|
||||
self.gen_expr(ctx, old_start)?.unwrap().to_basic_value_enum(ctx, self)?
|
||||
} else {
|
||||
self.timeline.emit_now_mu(ctx)
|
||||
};
|
||||
|
@ -174,10 +174,8 @@ impl<'b> CodeGenerator for ArtiqCodeGenerator<'b> {
|
|||
};
|
||||
// set duration
|
||||
let end_expr = self.end.take().unwrap();
|
||||
let end_val = self
|
||||
.gen_expr(ctx, &end_expr)?
|
||||
.unwrap()
|
||||
.to_basic_value_enum(ctx, self, end_expr.custom.unwrap())?;
|
||||
let end_val =
|
||||
self.gen_expr(ctx, &end_expr)?.unwrap().to_basic_value_enum(ctx, self)?;
|
||||
|
||||
// inside a sequential block
|
||||
if old_start.is_none() {
|
||||
|
@ -188,7 +186,7 @@ impl<'b> CodeGenerator for ArtiqCodeGenerator<'b> {
|
|||
let outer_end_val = self
|
||||
.gen_expr(ctx, old_end)?
|
||||
.unwrap()
|
||||
.to_basic_value_enum(ctx, self, old_end.custom.unwrap())?;
|
||||
.to_basic_value_enum(ctx, self)?;
|
||||
let smax =
|
||||
ctx.module.get_function("llvm.smax.i64").unwrap_or_else(|| {
|
||||
let i64 = ctx.ctx.i64_type();
|
||||
|
@ -373,7 +371,7 @@ fn rpc_codegen_callback_fn<'ctx, 'a>(
|
|||
.0
|
||||
.args
|
||||
.iter()
|
||||
.map(|arg| mapping.remove(&arg.name).unwrap().to_basic_value_enum(ctx, generator, arg.ty))
|
||||
.map(|arg| mapping.remove(&arg.name).unwrap().to_basic_value_enum(ctx, generator))
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
if let Some(obj) = obj {
|
||||
if let ValueEnum::Static(obj) = obj.1 {
|
||||
|
@ -513,13 +511,11 @@ pub fn attributes_writeback<'ctx, 'a>(
|
|||
}
|
||||
let ty = ty.unwrap();
|
||||
match &*ctx.unifier.get_ty(ty) {
|
||||
TypeEnum::TObj { fields, obj_id, .. }
|
||||
if *obj_id != ctx.primitives.option.get_obj_id(&ctx.unifier) =>
|
||||
{
|
||||
TypeEnum::TObj { fields, .. } => {
|
||||
// we only care about primitive attributes
|
||||
// for non-primitive attributes, they should be in another global
|
||||
let mut attributes = Vec::new();
|
||||
let obj = inner_resolver.get_obj_value(py, val, ctx, generator, ty)?.unwrap();
|
||||
let obj = inner_resolver.get_obj_value(py, val, ctx, generator)?.unwrap();
|
||||
for (name, (field_ty, is_mutable)) in fields.iter() {
|
||||
if !is_mutable {
|
||||
continue
|
||||
|
@ -544,7 +540,7 @@ pub fn attributes_writeback<'ctx, 'a>(
|
|||
let pydict = PyDict::new(py);
|
||||
pydict.set_item("obj", val)?;
|
||||
host_attributes.append(pydict)?;
|
||||
values.push((ty, inner_resolver.get_obj_value(py, val, ctx, generator, ty)?.unwrap()));
|
||||
values.push((ty, inner_resolver.get_obj_value(py, val, ctx, generator)?.unwrap()));
|
||||
}
|
||||
},
|
||||
_ => {}
|
||||
|
|
|
@ -1,13 +1,11 @@
|
|||
use std::collections::{HashMap, HashSet};
|
||||
use std::fs;
|
||||
use std::io::Write;
|
||||
use std::process::Command;
|
||||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
|
||||
use inkwell::{
|
||||
memory_buffer::MemoryBuffer,
|
||||
module::{Linkage, Module},
|
||||
passes::{PassManager, PassManagerBuilder},
|
||||
targets::*,
|
||||
OptimizationLevel,
|
||||
|
@ -37,8 +35,6 @@ use nac3core::{
|
|||
typecheck::{type_inferencer::PrimitiveStore, typedef::Type},
|
||||
};
|
||||
|
||||
use nac3ld::Linker;
|
||||
|
||||
use tempfile::{self, TempDir};
|
||||
|
||||
use crate::codegen::attributes_writeback;
|
||||
|
@ -69,7 +65,6 @@ pub struct PrimitivePythonId {
|
|||
uint32: u64,
|
||||
uint64: u64,
|
||||
float: u64,
|
||||
float64: u64,
|
||||
bool: u64,
|
||||
list: u64,
|
||||
tuple: u64,
|
||||
|
@ -266,16 +261,210 @@ impl Nac3 {
|
|||
}
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn compile_method<T>(
|
||||
&self,
|
||||
fn add_exceptions(
|
||||
composer: &mut TopLevelComposer,
|
||||
builtin_def: &mut HashMap<StrRef, DefinitionId>,
|
||||
builtin_ty: &mut HashMap<StrRef, Type>,
|
||||
error_names: &[&str]
|
||||
) -> Vec<Type> {
|
||||
let mut types = Vec::new();
|
||||
// note: this is only for builtin exceptions, i.e. the exception name is "0:{exn}"
|
||||
for name in error_names {
|
||||
let def_id = composer.definition_ast_list.len();
|
||||
let (exception_fn, exception_class, exception_cons, exception_type) = get_exn_constructor(
|
||||
name,
|
||||
// class id
|
||||
def_id,
|
||||
// constructor id
|
||||
def_id + 1,
|
||||
&mut composer.unifier,
|
||||
&composer.primitives_ty
|
||||
);
|
||||
composer.definition_ast_list.push((Arc::new(RwLock::new(exception_class)), None));
|
||||
composer.definition_ast_list.push((Arc::new(RwLock::new(exception_fn)), None));
|
||||
builtin_ty.insert((*name).into(), exception_cons);
|
||||
builtin_def.insert((*name).into(), DefinitionId(def_id));
|
||||
types.push(exception_type);
|
||||
}
|
||||
types
|
||||
}
|
||||
|
||||
#[pymethods]
|
||||
impl Nac3 {
|
||||
#[new]
|
||||
fn new(isa: &str, py: Python) -> PyResult<Self> {
|
||||
let isa = match isa {
|
||||
"host" => Isa::Host,
|
||||
"rv32g" => Isa::RiscV32G,
|
||||
"rv32ima" => Isa::RiscV32IMA,
|
||||
"cortexa9" => Isa::CortexA9,
|
||||
_ => return Err(exceptions::PyValueError::new_err("invalid ISA")),
|
||||
};
|
||||
let time_fns: &(dyn TimeFns + Sync) = match isa {
|
||||
Isa::Host => &timeline::EXTERN_TIME_FNS,
|
||||
Isa::RiscV32G => &timeline::NOW_PINNING_TIME_FNS_64,
|
||||
Isa::RiscV32IMA => &timeline::NOW_PINNING_TIME_FNS,
|
||||
Isa::CortexA9 => &timeline::EXTERN_TIME_FNS,
|
||||
};
|
||||
let primitive: PrimitiveStore = TopLevelComposer::make_primitives().0;
|
||||
let builtins = vec![
|
||||
(
|
||||
"now_mu".into(),
|
||||
FunSignature { args: vec![], ret: primitive.int64, vars: HashMap::new() },
|
||||
Arc::new(GenCall::new(Box::new(move |ctx, _, _, _, _| {
|
||||
Ok(Some(time_fns.emit_now_mu(ctx)))
|
||||
}))),
|
||||
),
|
||||
(
|
||||
"at_mu".into(),
|
||||
FunSignature {
|
||||
args: vec![FuncArg {
|
||||
name: "t".into(),
|
||||
ty: primitive.int64,
|
||||
default_value: None,
|
||||
}],
|
||||
ret: primitive.none,
|
||||
vars: HashMap::new(),
|
||||
},
|
||||
Arc::new(GenCall::new(Box::new(move |ctx, _, _, args, generator| {
|
||||
let arg = args[0].1.clone().to_basic_value_enum(ctx, generator).unwrap();
|
||||
time_fns.emit_at_mu(ctx, arg);
|
||||
Ok(None)
|
||||
}))),
|
||||
),
|
||||
(
|
||||
"delay_mu".into(),
|
||||
FunSignature {
|
||||
args: vec![FuncArg {
|
||||
name: "dt".into(),
|
||||
ty: primitive.int64,
|
||||
default_value: None,
|
||||
}],
|
||||
ret: primitive.none,
|
||||
vars: HashMap::new(),
|
||||
},
|
||||
Arc::new(GenCall::new(Box::new(move |ctx, _, _, args, generator| {
|
||||
let arg = args[0].1.clone().to_basic_value_enum(ctx, generator).unwrap();
|
||||
time_fns.emit_delay_mu(ctx, arg);
|
||||
Ok(None)
|
||||
}))),
|
||||
),
|
||||
];
|
||||
|
||||
let builtins_mod = PyModule::import(py, "builtins").unwrap();
|
||||
let id_fn = builtins_mod.getattr("id").unwrap();
|
||||
let numpy_mod = PyModule::import(py, "numpy").unwrap();
|
||||
let typing_mod = PyModule::import(py, "typing").unwrap();
|
||||
let types_mod = PyModule::import(py, "types").unwrap();
|
||||
|
||||
let get_id = |x| id_fn.call1((x,)).unwrap().extract().unwrap();
|
||||
let get_attr_id = |obj: &PyModule, attr| id_fn.call1((obj.getattr(attr).unwrap(),))
|
||||
.unwrap().extract().unwrap();
|
||||
let primitive_ids = PrimitivePythonId {
|
||||
virtual_id: get_id(
|
||||
builtins_mod
|
||||
.getattr("globals")
|
||||
.unwrap()
|
||||
.call0()
|
||||
.unwrap()
|
||||
.get_item("virtual")
|
||||
.unwrap(
|
||||
)),
|
||||
generic_alias: (
|
||||
get_attr_id(typing_mod, "_GenericAlias"),
|
||||
get_attr_id(types_mod, "GenericAlias"),
|
||||
),
|
||||
none: id_fn
|
||||
.call1((builtins_mod
|
||||
.getattr("globals")
|
||||
.unwrap()
|
||||
.call0()
|
||||
.unwrap()
|
||||
.get_item("none")
|
||||
.unwrap(),))
|
||||
.unwrap()
|
||||
.extract()
|
||||
.unwrap(),
|
||||
typevar: get_attr_id(typing_mod, "TypeVar"),
|
||||
int: get_attr_id(builtins_mod, "int"),
|
||||
int32: get_attr_id(numpy_mod, "int32"),
|
||||
int64: get_attr_id(numpy_mod, "int64"),
|
||||
uint32: get_attr_id(numpy_mod, "uint32"),
|
||||
uint64: get_attr_id(numpy_mod, "uint64"),
|
||||
bool: get_attr_id(builtins_mod, "bool"),
|
||||
float: get_attr_id(builtins_mod, "float"),
|
||||
list: get_attr_id(builtins_mod, "list"),
|
||||
tuple: get_attr_id(builtins_mod, "tuple"),
|
||||
exception: get_attr_id(builtins_mod, "Exception"),
|
||||
option: id_fn
|
||||
.call1((builtins_mod
|
||||
.getattr("globals")
|
||||
.unwrap()
|
||||
.call0()
|
||||
.unwrap()
|
||||
.get_item("Option")
|
||||
.unwrap(),))
|
||||
.unwrap()
|
||||
.extract()
|
||||
.unwrap(),
|
||||
};
|
||||
|
||||
let working_directory = tempfile::Builder::new().prefix("nac3-").tempdir().unwrap();
|
||||
fs::write(working_directory.path().join("kernel.ld"), include_bytes!("kernel.ld")).unwrap();
|
||||
|
||||
Ok(Nac3 {
|
||||
isa,
|
||||
time_fns,
|
||||
primitive,
|
||||
builtins,
|
||||
primitive_ids,
|
||||
top_levels: Default::default(),
|
||||
pyid_to_def: Default::default(),
|
||||
working_directory,
|
||||
string_store: Default::default(),
|
||||
exception_ids: Default::default(),
|
||||
deferred_eval_store: DeferredEvaluationStore::new(),
|
||||
})
|
||||
}
|
||||
|
||||
fn analyze(&mut self, functions: &PySet, classes: &PySet) -> PyResult<()> {
|
||||
let (modules, class_ids) =
|
||||
Python::with_gil(|py| -> PyResult<(HashMap<u64, PyObject>, HashSet<u64>)> {
|
||||
let mut modules: HashMap<u64, PyObject> = HashMap::new();
|
||||
let mut class_ids: HashSet<u64> = HashSet::new();
|
||||
|
||||
let id_fn = PyModule::import(py, "builtins")?.getattr("id")?;
|
||||
let getmodule_fn = PyModule::import(py, "inspect")?.getattr("getmodule")?;
|
||||
|
||||
for function in functions.iter() {
|
||||
let module = getmodule_fn.call1((function,))?.extract()?;
|
||||
modules.insert(id_fn.call1((&module,))?.extract()?, module);
|
||||
}
|
||||
for class in classes.iter() {
|
||||
let module = getmodule_fn.call1((class,))?.extract()?;
|
||||
modules.insert(id_fn.call1((&module,))?.extract()?, module);
|
||||
class_ids.insert(id_fn.call1((class,))?.extract()?);
|
||||
}
|
||||
Ok((modules, class_ids))
|
||||
})?;
|
||||
|
||||
for module in modules.into_values() {
|
||||
self.register_module(module, &class_ids)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn compile_method_to_file(
|
||||
&mut self,
|
||||
obj: &PyAny,
|
||||
method_name: &str,
|
||||
args: Vec<&PyAny>,
|
||||
filename: &str,
|
||||
embedding_map: &PyAny,
|
||||
py: Python,
|
||||
link_fn: &dyn Fn(&Module) -> PyResult<T>,
|
||||
) -> PyResult<T> {
|
||||
) -> PyResult<()> {
|
||||
let (mut composer, mut builtins_def, mut builtins_ty) = TopLevelComposer::new(
|
||||
self.builtins.clone(),
|
||||
ComposerConfig { kernel_ann: Some("Kernel"), kernel_invariant_ann: "KernelInvariant" },
|
||||
|
@ -371,7 +560,7 @@ impl Nac3 {
|
|||
});
|
||||
|
||||
let (name, def_id, ty) = composer
|
||||
.register_top_level(stmt.clone(), Some(resolver.clone()), path.clone(), false)
|
||||
.register_top_level(stmt.clone(), Some(resolver.clone()), path.clone())
|
||||
.map_err(|e| {
|
||||
CompileError::new_err(format!(
|
||||
"compilation failed\n----------\n{}",
|
||||
|
@ -390,17 +579,10 @@ impl Nac3 {
|
|||
}
|
||||
}
|
||||
StmtKind::ClassDef { name, body, .. } => {
|
||||
let class_name = name.to_string();
|
||||
let class_obj = module.getattr(py, &class_name).unwrap();
|
||||
let class_obj = module.getattr(py, name.to_string()).unwrap();
|
||||
for stmt in body.iter() {
|
||||
if let StmtKind::FunctionDef { name, decorator_list, .. } = &stmt.node {
|
||||
if decorator_list.iter().any(|decorator| matches!(decorator.node, ExprKind::Name { id, .. } if id == "rpc".into())) {
|
||||
if name == &"__init__".into() {
|
||||
return Err(CompileError::new_err(format!(
|
||||
"compilation failed\n----------\nThe constructor of class {} should not be decorated with rpc decorator (at {})",
|
||||
class_name, stmt.location
|
||||
)));
|
||||
}
|
||||
rpc_ids.push((Some((class_obj.clone(), *name)), def_id));
|
||||
}
|
||||
}
|
||||
|
@ -437,7 +619,7 @@ impl Nac3 {
|
|||
format!("def __modinit__():\n base.{}({})", method_name, arg_names.join(", "))
|
||||
};
|
||||
let mut synthesized =
|
||||
parse_program(&synthesized, "<nac3_synthesized_modinit>".to_string().into()).unwrap();
|
||||
parse_program(&synthesized, "__nac3_synthesized_modinit__".to_string().into()).unwrap();
|
||||
let inner_resolver = Arc::new(InnerResolver {
|
||||
id_to_type: builtins_ty.clone().into(),
|
||||
id_to_def: builtins_def.clone().into(),
|
||||
|
@ -458,7 +640,7 @@ impl Nac3 {
|
|||
});
|
||||
let resolver = Arc::new(Resolver(inner_resolver.clone())) as Arc<dyn SymbolResolver + Send + Sync>;
|
||||
let (_, def_id, _) = composer
|
||||
.register_top_level(synthesized.pop().unwrap(), Some(resolver.clone()), "".into(), false)
|
||||
.register_top_level(synthesized.pop().unwrap(), Some(resolver.clone()), "".into())
|
||||
.unwrap();
|
||||
|
||||
let fun_signature =
|
||||
|
@ -471,7 +653,7 @@ impl Nac3 {
|
|||
|
||||
if let Err(e) = composer.start_analysis(true) {
|
||||
// report error of __modinit__ separately
|
||||
if !e.contains("<nac3_synthesized_modinit>") {
|
||||
if !e.contains("__nac3_synthesized_modinit__") {
|
||||
return Err(CompileError::new_err(format!(
|
||||
"compilation failed\n----------\n{}",
|
||||
e
|
||||
|
@ -485,10 +667,7 @@ impl Nac3 {
|
|||
&mut composer.unifier,
|
||||
&self.primitive,
|
||||
);
|
||||
return Err(CompileError::new_err(format!(
|
||||
"compilation failed\n----------\n{}",
|
||||
msg.unwrap_or(e)
|
||||
)));
|
||||
return Err(CompileError::new_err(msg.unwrap_or(e)));
|
||||
}
|
||||
}
|
||||
let top_level = Arc::new(composer.make_top_level_context());
|
||||
|
@ -569,6 +748,8 @@ impl Nac3 {
|
|||
calls: Arc::new(Default::default()),
|
||||
id: 0,
|
||||
};
|
||||
let isa = self.isa;
|
||||
let working_directory = self.working_directory.path().to_owned();
|
||||
|
||||
let membuffers: Arc<Mutex<Vec<Vec<u8>>>> = Default::default();
|
||||
|
||||
|
@ -634,19 +815,6 @@ impl Nac3 {
|
|||
function_iter = func.get_next_function();
|
||||
}
|
||||
|
||||
// Demote all global variables that will not be referenced in the kernel to private
|
||||
let preserved_symbols: Vec<&'static [u8]> = vec![
|
||||
b"typeinfo",
|
||||
b"now",
|
||||
];
|
||||
let mut global_option = main.get_first_global();
|
||||
while let Some(global) = global_option {
|
||||
if !preserved_symbols.contains(&(global.get_name().to_bytes())) {
|
||||
global.set_linkage(Linkage::Private);
|
||||
}
|
||||
global_option = global.get_next_global();
|
||||
}
|
||||
|
||||
let builder = PassManagerBuilder::create();
|
||||
builder.set_optimization_level(OptimizationLevel::Aggressive);
|
||||
let passes = PassManager::create(());
|
||||
|
@ -654,13 +822,7 @@ impl Nac3 {
|
|||
builder.populate_module_pass_manager(&passes);
|
||||
passes.run_on(&main);
|
||||
|
||||
link_fn(&main)
|
||||
}
|
||||
|
||||
fn get_llvm_target_machine(
|
||||
&self,
|
||||
) -> TargetMachine {
|
||||
let (triple, features) = match self.isa {
|
||||
let (triple, features) = match isa {
|
||||
Isa::Host => (
|
||||
TargetMachine::get_default_triple(),
|
||||
TargetMachine::get_host_cpu_features().to_string(),
|
||||
|
@ -671,12 +833,12 @@ impl Nac3 {
|
|||
Isa::RiscV32IMA => (TargetTriple::create("riscv32-unknown-linux"), "+a,+m".to_string()),
|
||||
Isa::CortexA9 => (
|
||||
TargetTriple::create("armv7-unknown-linux-gnueabihf"),
|
||||
"+dsp,+fp16,+neon,+vfp3,+long-calls".to_string(),
|
||||
"+dsp,+fp16,+neon,+vfp3".to_string(),
|
||||
),
|
||||
};
|
||||
let target =
|
||||
Target::from_triple(&triple).expect("couldn't create target from target triple");
|
||||
target
|
||||
let target_machine = target
|
||||
.create_target_machine(
|
||||
&triple,
|
||||
"",
|
||||
|
@ -685,280 +847,41 @@ impl Nac3 {
|
|||
RelocMode::PIC,
|
||||
CodeModel::Default,
|
||||
)
|
||||
.expect("couldn't create target machine")
|
||||
}
|
||||
}
|
||||
.expect("couldn't create target machine");
|
||||
target_machine
|
||||
.write_to_file(&main, FileType::Object, &working_directory.join("module.o"))
|
||||
.expect("couldn't write module to file");
|
||||
|
||||
fn link_with_lld(
|
||||
elf_filename: String,
|
||||
obj_filename: String,
|
||||
) -> PyResult<()>{
|
||||
let linker_args = vec![
|
||||
"-shared".to_string(),
|
||||
"--eh-frame-hdr".to_string(),
|
||||
"-x".to_string(),
|
||||
"-o".to_string(),
|
||||
elf_filename,
|
||||
obj_filename,
|
||||
];
|
||||
|
||||
#[cfg(not(windows))]
|
||||
let lld_command = "ld.lld";
|
||||
#[cfg(windows)]
|
||||
let lld_command = "ld.lld.exe";
|
||||
if let Ok(linker_status) = Command::new(lld_command).args(linker_args).status() {
|
||||
if !linker_status.success() {
|
||||
return Err(CompileError::new_err("failed to start linker"));
|
||||
}
|
||||
} else {
|
||||
return Err(CompileError::new_err(
|
||||
"linker returned non-zero status code",
|
||||
));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn add_exceptions(
|
||||
composer: &mut TopLevelComposer,
|
||||
builtin_def: &mut HashMap<StrRef, DefinitionId>,
|
||||
builtin_ty: &mut HashMap<StrRef, Type>,
|
||||
error_names: &[&str]
|
||||
) -> Vec<Type> {
|
||||
let mut types = Vec::new();
|
||||
// note: this is only for builtin exceptions, i.e. the exception name is "0:{exn}"
|
||||
for name in error_names {
|
||||
let def_id = composer.definition_ast_list.len();
|
||||
let (exception_fn, exception_class, exception_cons, exception_type) = get_exn_constructor(
|
||||
name,
|
||||
// class id
|
||||
def_id,
|
||||
// constructor id
|
||||
def_id + 1,
|
||||
&mut composer.unifier,
|
||||
&composer.primitives_ty
|
||||
);
|
||||
composer.definition_ast_list.push((Arc::new(RwLock::new(exception_class)), None));
|
||||
composer.definition_ast_list.push((Arc::new(RwLock::new(exception_fn)), None));
|
||||
builtin_ty.insert((*name).into(), exception_cons);
|
||||
builtin_def.insert((*name).into(), DefinitionId(def_id));
|
||||
types.push(exception_type);
|
||||
}
|
||||
types
|
||||
}
|
||||
|
||||
#[pymethods]
|
||||
impl Nac3 {
|
||||
#[new]
|
||||
fn new(isa: &str, py: Python) -> PyResult<Self> {
|
||||
let isa = match isa {
|
||||
"host" => Isa::Host,
|
||||
"rv32g" => Isa::RiscV32G,
|
||||
"rv32ima" => Isa::RiscV32IMA,
|
||||
"cortexa9" => Isa::CortexA9,
|
||||
_ => return Err(exceptions::PyValueError::new_err("invalid ISA")),
|
||||
};
|
||||
let time_fns: &(dyn TimeFns + Sync) = match isa {
|
||||
Isa::Host => &timeline::EXTERN_TIME_FNS,
|
||||
Isa::RiscV32G => &timeline::NOW_PINNING_TIME_FNS_64,
|
||||
Isa::RiscV32IMA => &timeline::NOW_PINNING_TIME_FNS,
|
||||
Isa::CortexA9 => &timeline::EXTERN_TIME_FNS,
|
||||
};
|
||||
let primitive: PrimitiveStore = TopLevelComposer::make_primitives().0;
|
||||
let builtins = vec![
|
||||
(
|
||||
"now_mu".into(),
|
||||
FunSignature { args: vec![], ret: primitive.int64, vars: HashMap::new() },
|
||||
Arc::new(GenCall::new(Box::new(move |ctx, _, _, _, _| {
|
||||
Ok(Some(time_fns.emit_now_mu(ctx)))
|
||||
}))),
|
||||
),
|
||||
(
|
||||
"at_mu".into(),
|
||||
FunSignature {
|
||||
args: vec![FuncArg {
|
||||
name: "t".into(),
|
||||
ty: primitive.int64,
|
||||
default_value: None,
|
||||
}],
|
||||
ret: primitive.none,
|
||||
vars: HashMap::new(),
|
||||
},
|
||||
Arc::new(GenCall::new(Box::new(move |ctx, _, fun, args, generator| {
|
||||
let arg_ty = fun.0.args[0].ty;
|
||||
let arg = args[0].1.clone().to_basic_value_enum(ctx, generator, arg_ty).unwrap();
|
||||
time_fns.emit_at_mu(ctx, arg);
|
||||
Ok(None)
|
||||
}))),
|
||||
),
|
||||
(
|
||||
"delay_mu".into(),
|
||||
FunSignature {
|
||||
args: vec![FuncArg {
|
||||
name: "dt".into(),
|
||||
ty: primitive.int64,
|
||||
default_value: None,
|
||||
}],
|
||||
ret: primitive.none,
|
||||
vars: HashMap::new(),
|
||||
},
|
||||
Arc::new(GenCall::new(Box::new(move |ctx, _, fun, args, generator| {
|
||||
let arg_ty = fun.0.args[0].ty;
|
||||
let arg = args[0].1.clone().to_basic_value_enum(ctx, generator, arg_ty).unwrap();
|
||||
time_fns.emit_delay_mu(ctx, arg);
|
||||
Ok(None)
|
||||
}))),
|
||||
),
|
||||
let mut linker_args = vec![
|
||||
"-shared".to_string(),
|
||||
"--eh-frame-hdr".to_string(),
|
||||
"-x".to_string(),
|
||||
"-o".to_string(),
|
||||
filename.to_string(),
|
||||
working_directory.join("module.o").to_string_lossy().to_string(),
|
||||
];
|
||||
|
||||
let builtins_mod = PyModule::import(py, "builtins").unwrap();
|
||||
let id_fn = builtins_mod.getattr("id").unwrap();
|
||||
let numpy_mod = PyModule::import(py, "numpy").unwrap();
|
||||
let typing_mod = PyModule::import(py, "typing").unwrap();
|
||||
let types_mod = PyModule::import(py, "types").unwrap();
|
||||
|
||||
let get_id = |x| id_fn.call1((x,)).unwrap().extract().unwrap();
|
||||
let get_attr_id = |obj: &PyModule, attr| id_fn.call1((obj.getattr(attr).unwrap(),))
|
||||
.unwrap().extract().unwrap();
|
||||
let primitive_ids = PrimitivePythonId {
|
||||
virtual_id: get_id(
|
||||
builtins_mod
|
||||
.getattr("globals")
|
||||
.unwrap()
|
||||
.call0()
|
||||
.unwrap()
|
||||
.get_item("virtual")
|
||||
.unwrap(
|
||||
)),
|
||||
generic_alias: (
|
||||
get_attr_id(typing_mod, "_GenericAlias"),
|
||||
get_attr_id(types_mod, "GenericAlias"),
|
||||
),
|
||||
none: id_fn
|
||||
.call1((builtins_mod
|
||||
.getattr("globals")
|
||||
.unwrap()
|
||||
.call0()
|
||||
.unwrap()
|
||||
.get_item("none")
|
||||
.unwrap(),))
|
||||
.unwrap()
|
||||
.extract()
|
||||
.unwrap(),
|
||||
typevar: get_attr_id(typing_mod, "TypeVar"),
|
||||
int: get_attr_id(builtins_mod, "int"),
|
||||
int32: get_attr_id(numpy_mod, "int32"),
|
||||
int64: get_attr_id(numpy_mod, "int64"),
|
||||
uint32: get_attr_id(numpy_mod, "uint32"),
|
||||
uint64: get_attr_id(numpy_mod, "uint64"),
|
||||
bool: get_attr_id(builtins_mod, "bool"),
|
||||
float: get_attr_id(builtins_mod, "float"),
|
||||
float64: get_attr_id(numpy_mod, "float64"),
|
||||
list: get_attr_id(builtins_mod, "list"),
|
||||
tuple: get_attr_id(builtins_mod, "tuple"),
|
||||
exception: get_attr_id(builtins_mod, "Exception"),
|
||||
option: id_fn
|
||||
.call1((builtins_mod
|
||||
.getattr("globals")
|
||||
.unwrap()
|
||||
.call0()
|
||||
.unwrap()
|
||||
.get_item("Option")
|
||||
.unwrap(),))
|
||||
.unwrap()
|
||||
.extract()
|
||||
.unwrap(),
|
||||
};
|
||||
|
||||
let working_directory = tempfile::Builder::new().prefix("nac3-").tempdir().unwrap();
|
||||
fs::write(working_directory.path().join("kernel.ld"), include_bytes!("kernel.ld")).unwrap();
|
||||
|
||||
Ok(Nac3 {
|
||||
isa,
|
||||
time_fns,
|
||||
primitive,
|
||||
builtins,
|
||||
primitive_ids,
|
||||
top_levels: Default::default(),
|
||||
pyid_to_def: Default::default(),
|
||||
working_directory,
|
||||
string_store: Default::default(),
|
||||
exception_ids: Default::default(),
|
||||
deferred_eval_store: DeferredEvaluationStore::new(),
|
||||
})
|
||||
}
|
||||
|
||||
fn analyze(&mut self, functions: &PySet, classes: &PySet) -> PyResult<()> {
|
||||
let (modules, class_ids) =
|
||||
Python::with_gil(|py| -> PyResult<(HashMap<u64, PyObject>, HashSet<u64>)> {
|
||||
let mut modules: HashMap<u64, PyObject> = HashMap::new();
|
||||
let mut class_ids: HashSet<u64> = HashSet::new();
|
||||
|
||||
let id_fn = PyModule::import(py, "builtins")?.getattr("id")?;
|
||||
let getmodule_fn = PyModule::import(py, "inspect")?.getattr("getmodule")?;
|
||||
|
||||
for function in functions.iter() {
|
||||
let module = getmodule_fn.call1((function,))?.extract()?;
|
||||
modules.insert(id_fn.call1((&module,))?.extract()?, module);
|
||||
}
|
||||
for class in classes.iter() {
|
||||
let module = getmodule_fn.call1((class,))?.extract()?;
|
||||
modules.insert(id_fn.call1((&module,))?.extract()?, module);
|
||||
class_ids.insert(id_fn.call1((class,))?.extract()?);
|
||||
}
|
||||
Ok((modules, class_ids))
|
||||
})?;
|
||||
|
||||
for module in modules.into_values() {
|
||||
self.register_module(module, &class_ids)?;
|
||||
if isa != Isa::Host {
|
||||
linker_args.push(
|
||||
"-T".to_string()
|
||||
+ self.working_directory.path().join("kernel.ld").to_str().unwrap(),
|
||||
);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn compile_method_to_file(
|
||||
&mut self,
|
||||
obj: &PyAny,
|
||||
method_name: &str,
|
||||
args: Vec<&PyAny>,
|
||||
filename: &str,
|
||||
embedding_map: &PyAny,
|
||||
py: Python,
|
||||
) -> PyResult<()> {
|
||||
let target_machine = self.get_llvm_target_machine();
|
||||
|
||||
if self.isa == Isa::Host {
|
||||
let link_fn = |module: &Module| {
|
||||
let working_directory = self.working_directory.path().to_owned();
|
||||
target_machine
|
||||
.write_to_file(module, FileType::Object, &working_directory.join("module.o"))
|
||||
.expect("couldn't write module to file");
|
||||
link_with_lld(
|
||||
filename.to_string(),
|
||||
working_directory.join("module.o").to_string_lossy().to_string()
|
||||
)?;
|
||||
Ok(())
|
||||
};
|
||||
|
||||
self.compile_method(obj, method_name, args, embedding_map, py, &link_fn)
|
||||
#[cfg(not(windows))]
|
||||
let lld_command = "ld.lld";
|
||||
#[cfg(windows)]
|
||||
let lld_command = "ld.lld.exe";
|
||||
if let Ok(linker_status) = Command::new(lld_command).args(linker_args).status() {
|
||||
if !linker_status.success() {
|
||||
return Err(CompileError::new_err("failed to start linker"));
|
||||
}
|
||||
} else {
|
||||
let link_fn = |module: &Module| {
|
||||
let object_mem = target_machine
|
||||
.write_to_memory_buffer(module, FileType::Object)
|
||||
.expect("couldn't write module to object file buffer");
|
||||
if let Ok(dyn_lib) = Linker::ld(object_mem.as_slice()) {
|
||||
if let Ok(mut file) = fs::File::create(filename) {
|
||||
file.write_all(&dyn_lib).expect("couldn't write linked library to file");
|
||||
Ok(())
|
||||
} else {
|
||||
Err(CompileError::new_err("failed to create file"))
|
||||
}
|
||||
} else {
|
||||
Err(CompileError::new_err("linker failed to process object file"))
|
||||
}
|
||||
};
|
||||
|
||||
self.compile_method(obj, method_name, args, embedding_map, py, &link_fn)
|
||||
return Err(CompileError::new_err(
|
||||
"linker returned non-zero status code",
|
||||
));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn compile_method_to_mem(
|
||||
|
@ -969,40 +892,10 @@ impl Nac3 {
|
|||
embedding_map: &PyAny,
|
||||
py: Python,
|
||||
) -> PyResult<PyObject> {
|
||||
let target_machine = self.get_llvm_target_machine();
|
||||
|
||||
if self.isa == Isa::Host {
|
||||
let link_fn = |module: &Module| {
|
||||
let working_directory = self.working_directory.path().to_owned();
|
||||
target_machine
|
||||
.write_to_file(&module, FileType::Object, &working_directory.join("module.o"))
|
||||
.expect("couldn't write module to file");
|
||||
|
||||
let filename_path = self.working_directory.path().join("module.elf");
|
||||
let filename = filename_path.to_str().unwrap();
|
||||
link_with_lld(
|
||||
filename.to_string(),
|
||||
working_directory.join("module.o").to_string_lossy().to_string()
|
||||
)?;
|
||||
|
||||
Ok(PyBytes::new(py, &fs::read(filename).unwrap()).into())
|
||||
};
|
||||
|
||||
self.compile_method(obj, method_name, args, embedding_map, py, &link_fn)
|
||||
} else {
|
||||
let link_fn = |module: &Module| {
|
||||
let object_mem = target_machine
|
||||
.write_to_memory_buffer(&module, FileType::Object)
|
||||
.expect("couldn't write module to object file buffer");
|
||||
if let Ok(dyn_lib) = Linker::ld(object_mem.as_slice()) {
|
||||
Ok(PyBytes::new(py, &dyn_lib).into())
|
||||
} else {
|
||||
Err(CompileError::new_err("linker failed to process object file"))
|
||||
}
|
||||
};
|
||||
|
||||
self.compile_method(obj, method_name, args, embedding_map, py, &link_fn)
|
||||
}
|
||||
let filename_path = self.working_directory.path().join("module.elf");
|
||||
let filename = filename_path.to_str().unwrap();
|
||||
self.compile_method_to_file(obj, method_name, args, filename, embedding_map, py)?;
|
||||
Ok(PyBytes::new(py, &fs::read(filename).unwrap()).into())
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -99,7 +99,7 @@ impl StaticValue for PythonValue {
|
|||
_: &mut dyn CodeGenerator,
|
||||
) -> BasicValueEnum<'ctx> {
|
||||
ctx.module
|
||||
.get_global(format!("{}_const", self.id).as_str())
|
||||
.get_global(self.id.to_string().as_str())
|
||||
.map(|val| val.as_pointer_value().into())
|
||||
.unwrap_or_else(|| {
|
||||
Python::with_gil(|py| -> PyResult<BasicValueEnum<'ctx>> {
|
||||
|
@ -115,7 +115,13 @@ impl StaticValue for PythonValue {
|
|||
&[ctx.ctx.i32_type().const_int(id as u64, false).into()],
|
||||
false,
|
||||
));
|
||||
Ok(global.as_pointer_value().into())
|
||||
let global2 = ctx.module.add_global(
|
||||
struct_type.ptr_type(AddressSpace::Generic),
|
||||
None,
|
||||
format!("{}_const2", self.id).as_str(),
|
||||
);
|
||||
global2.set_initializer(&global.as_pointer_value());
|
||||
Ok(global2.as_pointer_value().into())
|
||||
})
|
||||
.unwrap()
|
||||
})
|
||||
|
@ -125,7 +131,6 @@ impl StaticValue for PythonValue {
|
|||
&self,
|
||||
ctx: &mut CodeGenContext<'ctx, 'a>,
|
||||
generator: &mut dyn CodeGenerator,
|
||||
expected_ty: Type,
|
||||
) -> Result<BasicValueEnum<'ctx>, String> {
|
||||
if let Some(val) = self.resolver.id_to_primitive.read().get(&self.id) {
|
||||
return Ok(match val {
|
||||
|
@ -145,7 +150,7 @@ impl StaticValue for PythonValue {
|
|||
|
||||
Python::with_gil(|py| -> PyResult<BasicValueEnum<'ctx>> {
|
||||
self.resolver
|
||||
.get_obj_value(py, self.value.as_ref(py), ctx, generator, expected_ty)
|
||||
.get_obj_value(py, self.value.as_ref(py), ctx, generator)
|
||||
.map(Option::unwrap)
|
||||
}).map_err(|e| e.to_string())
|
||||
}
|
||||
|
@ -164,16 +169,6 @@ impl StaticValue for PythonValue {
|
|||
let helper = &self.resolver.helper;
|
||||
let ty = helper.type_fn.call1(py, (&self.value,))?;
|
||||
let ty_id: u64 = helper.id_fn.call1(py, (ty,))?.extract(py)?;
|
||||
// for optimizing unwrap KernelInvariant
|
||||
if ty_id == self.resolver.primitive_ids.option && name == "_nac3_option".into() {
|
||||
let obj = self.value.getattr(py, &name.to_string())?;
|
||||
let id = self.resolver.helper.id_fn.call1(py, (&obj,))?.extract(py)?;
|
||||
if self.id == self.resolver.primitive_ids.none {
|
||||
return Ok(None)
|
||||
} else {
|
||||
return Ok(Some((id, obj)))
|
||||
}
|
||||
}
|
||||
let def_id = { *self.resolver.pyid_to_def.read().get(&ty_id).unwrap() };
|
||||
let mut mutable = true;
|
||||
let defs = ctx.top_level.definitions.read();
|
||||
|
@ -206,28 +201,6 @@ impl StaticValue for PythonValue {
|
|||
}))
|
||||
})
|
||||
}
|
||||
|
||||
fn get_tuple_element<'ctx>(&self, index: u32) -> Option<ValueEnum<'ctx>> {
|
||||
Python::with_gil(|py| -> PyResult<Option<(u64, PyObject)>> {
|
||||
let helper = &self.resolver.helper;
|
||||
let ty = helper.type_fn.call1(py, (&self.value,))?;
|
||||
let ty_id: u64 = helper.id_fn.call1(py, (ty,))?.extract(py)?;
|
||||
assert_eq!(ty_id, self.resolver.primitive_ids.tuple);
|
||||
let tup: &PyTuple = self.value.extract(py)?;
|
||||
let elem = tup.get_item(index as usize)?;
|
||||
let id = self.resolver.helper.id_fn.call1(py, (elem,))?.extract(py)?;
|
||||
Ok(Some((id, elem.into())))
|
||||
})
|
||||
.unwrap()
|
||||
.map(|(id, obj)| {
|
||||
ValueEnum::Static(Arc::new(PythonValue {
|
||||
id,
|
||||
value: obj,
|
||||
store_obj: self.store_obj.clone(),
|
||||
resolver: self.resolver.clone(),
|
||||
}))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl InnerResolver {
|
||||
|
@ -296,8 +269,6 @@ impl InnerResolver {
|
|||
Ok(Ok((primitives.bool, true)))
|
||||
} else if ty_id == self.primitive_ids.float {
|
||||
Ok(Ok((primitives.float, true)))
|
||||
} else if ty_id == self.primitive_ids.float64 {
|
||||
Ok(Ok((primitives.float, true)))
|
||||
} else if ty_id == self.primitive_ids.exception {
|
||||
Ok(Ok((primitives.exception, true)))
|
||||
} else if ty_id == self.primitive_ids.list {
|
||||
|
@ -351,7 +322,7 @@ impl InnerResolver {
|
|||
let constraints = pyty.getattr("__constraints__").unwrap();
|
||||
let mut result: Vec<Type> = vec![];
|
||||
let needs_defer = self.deferred_eval_store.needs_defer.load(Relaxed);
|
||||
for i in 0usize.. {
|
||||
for i in 0.. {
|
||||
if let Ok(constr) = constraints.get_item(i) {
|
||||
if needs_defer {
|
||||
result.push(unifier.get_dummy_var().0);
|
||||
|
@ -409,7 +380,7 @@ impl InnerResolver {
|
|||
if args.len() == 1 {
|
||||
let ty = match self.get_pyty_obj_type(
|
||||
py,
|
||||
args.get_item(0)?,
|
||||
args.get_item(0),
|
||||
unifier,
|
||||
defs,
|
||||
primitives,
|
||||
|
@ -490,7 +461,7 @@ impl InnerResolver {
|
|||
if args.len() == 1 {
|
||||
let ty = match self.get_pyty_obj_type(
|
||||
py,
|
||||
args.get_item(0)?,
|
||||
args.get_item(0),
|
||||
unifier,
|
||||
defs,
|
||||
primitives,
|
||||
|
@ -545,31 +516,6 @@ impl InnerResolver {
|
|||
if let Some(ty) = self.pyid_to_type.read().get(&py_obj_id) {
|
||||
return Ok(Ok(*ty))
|
||||
}
|
||||
|
||||
// check if constructor function exists in the methods list
|
||||
let pyid_to_def = self.pyid_to_def.read();
|
||||
let constructor_ty = pyid_to_def
|
||||
.get(&py_obj_id)
|
||||
.and_then(|def_id| {
|
||||
defs
|
||||
.iter()
|
||||
.find_map(|def| {
|
||||
if let TopLevelDef::Class {
|
||||
object_id, methods, constructor, ..
|
||||
} = &*def.read() {
|
||||
if object_id == def_id && constructor.is_some() && methods.iter().any(|(s, _, _)| s == &"__init__".into()) {
|
||||
return constructor.clone();
|
||||
}
|
||||
}
|
||||
None
|
||||
})
|
||||
});
|
||||
|
||||
if let Some(ty) = constructor_ty {
|
||||
self.pyid_to_type.write().insert(py_obj_id, ty);
|
||||
return Ok(Ok(ty))
|
||||
}
|
||||
|
||||
let (extracted_ty, inst_check) = match self.get_pyty_obj_type(
|
||||
py,
|
||||
{
|
||||
|
@ -598,7 +544,7 @@ impl InnerResolver {
|
|||
let len: usize = self.helper.len_fn.call1(py, (obj,))?.extract(py)?;
|
||||
if len == 0 {
|
||||
assert!(matches!(
|
||||
&*unifier.get_ty(*ty),
|
||||
&*unifier.get_ty(extracted_ty),
|
||||
TypeEnum::TVar { fields: None, range, .. }
|
||||
if range.is_empty()
|
||||
));
|
||||
|
@ -695,10 +641,7 @@ impl InnerResolver {
|
|||
if let TypeEnum::TFunc(..) = &*unifier.get_ty(field.1.0) {
|
||||
continue;
|
||||
} else {
|
||||
let field_data = match obj.getattr(&name) {
|
||||
Ok(d) => d,
|
||||
Err(e) => return Ok(Err(format!("{}", e))),
|
||||
};
|
||||
let field_data = obj.getattr(&name)?;
|
||||
let ty = match self
|
||||
.get_obj_type(py, field_data, unifier, defs, primitives)?
|
||||
{
|
||||
|
@ -739,42 +682,7 @@ impl InnerResolver {
|
|||
};
|
||||
result
|
||||
}
|
||||
_ => {
|
||||
// check integer bounds
|
||||
if unifier.unioned(extracted_ty, primitives.int32) {
|
||||
obj.extract::<i32>().map_or_else(
|
||||
|_| Ok(Err(format!("{} is not in the range of int32", obj))),
|
||||
|_| Ok(Ok(extracted_ty))
|
||||
)
|
||||
} else if unifier.unioned(extracted_ty, primitives.int64) {
|
||||
obj.extract::<i64>().map_or_else(
|
||||
|_| Ok(Err(format!("{} is not in the range of int64", obj))),
|
||||
|_| Ok(Ok(extracted_ty))
|
||||
)
|
||||
} else if unifier.unioned(extracted_ty, primitives.uint32) {
|
||||
obj.extract::<u32>().map_or_else(
|
||||
|_| Ok(Err(format!("{} is not in the range of uint32", obj))),
|
||||
|_| Ok(Ok(extracted_ty))
|
||||
)
|
||||
} else if unifier.unioned(extracted_ty, primitives.uint64) {
|
||||
obj.extract::<u64>().map_or_else(
|
||||
|_| Ok(Err(format!("{} is not in the range of uint64", obj))),
|
||||
|_| Ok(Ok(extracted_ty))
|
||||
)
|
||||
} else if unifier.unioned(extracted_ty, primitives.bool) {
|
||||
obj.extract::<bool>().map_or_else(
|
||||
|_| Ok(Err(format!("{} is not in the range of bool", obj))),
|
||||
|_| Ok(Ok(extracted_ty))
|
||||
)
|
||||
} else if unifier.unioned(extracted_ty, primitives.float) {
|
||||
obj.extract::<f64>().map_or_else(
|
||||
|_| Ok(Err(format!("{} is not in the range of float64", obj))),
|
||||
|_| Ok(Ok(extracted_ty))
|
||||
)
|
||||
} else {
|
||||
Ok(Ok(extracted_ty))
|
||||
}
|
||||
}
|
||||
_ => Ok(Ok(extracted_ty)),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -784,33 +692,38 @@ impl InnerResolver {
|
|||
obj: &PyAny,
|
||||
ctx: &mut CodeGenContext<'ctx, 'a>,
|
||||
generator: &mut dyn CodeGenerator,
|
||||
expected_ty: Type,
|
||||
) -> PyResult<Option<BasicValueEnum<'ctx>>> {
|
||||
let ty_id: u64 =
|
||||
self.helper.id_fn.call1(py, (self.helper.type_fn.call1(py, (obj,))?,))?.extract(py)?;
|
||||
let id: u64 = self.helper.id_fn.call1(py, (obj,))?.extract(py)?;
|
||||
if ty_id == self.primitive_ids.int || ty_id == self.primitive_ids.int32 {
|
||||
let val: i32 = obj.extract().unwrap();
|
||||
let val: i32 = obj.extract().map_err(|_| super::CompileError::new_err(
|
||||
format!("{} is not in the range of int32", obj)))?;
|
||||
self.id_to_primitive.write().insert(id, PrimitiveValue::I32(val));
|
||||
Ok(Some(ctx.ctx.i32_type().const_int(val as u64, false).into()))
|
||||
} else if ty_id == self.primitive_ids.int64 {
|
||||
let val: i64 = obj.extract().unwrap();
|
||||
let val: i64 = obj.extract().map_err(|_| super::CompileError::new_err(
|
||||
format!("{} is not in the range of int64", obj)))?;
|
||||
self.id_to_primitive.write().insert(id, PrimitiveValue::I64(val));
|
||||
Ok(Some(ctx.ctx.i64_type().const_int(val as u64, false).into()))
|
||||
} else if ty_id == self.primitive_ids.uint32 {
|
||||
let val: u32 = obj.extract().unwrap();
|
||||
let val: u32 = obj.extract().map_err(|_| super::CompileError::new_err(
|
||||
format!("{} is not in the range of uint32", obj)))?;
|
||||
self.id_to_primitive.write().insert(id, PrimitiveValue::U32(val));
|
||||
Ok(Some(ctx.ctx.i32_type().const_int(val as u64, false).into()))
|
||||
} else if ty_id == self.primitive_ids.uint64 {
|
||||
let val: u64 = obj.extract().unwrap();
|
||||
let val: u64 = obj.extract().map_err(|_| super::CompileError::new_err(
|
||||
format!("{} is not in the range of uint64", obj)))?;
|
||||
self.id_to_primitive.write().insert(id, PrimitiveValue::U64(val));
|
||||
Ok(Some(ctx.ctx.i64_type().const_int(val, false).into()))
|
||||
} else if ty_id == self.primitive_ids.bool {
|
||||
let val: bool = obj.extract().unwrap();
|
||||
let val: bool = obj.extract().map_err(|_| super::CompileError::new_err(
|
||||
format!("{} is not in the range of bool", obj)))?;
|
||||
self.id_to_primitive.write().insert(id, PrimitiveValue::Bool(val));
|
||||
Ok(Some(ctx.ctx.bool_type().const_int(val as u64, false).into()))
|
||||
} else if ty_id == self.primitive_ids.float || ty_id == self.primitive_ids.float64 {
|
||||
let val: f64 = obj.extract().unwrap();
|
||||
} else if ty_id == self.primitive_ids.float {
|
||||
let val: f64 = obj.extract().map_err(|_| super::CompileError::new_err(
|
||||
format!("{} is not in the range of float64", obj)))?;
|
||||
self.id_to_primitive.write().insert(id, PrimitiveValue::F64(val));
|
||||
Ok(Some(ctx.ctx.f64_type().const_float(val).into()))
|
||||
} else if ty_id == self.primitive_ids.list {
|
||||
|
@ -821,14 +734,20 @@ impl InnerResolver {
|
|||
}
|
||||
|
||||
let len: usize = self.helper.len_fn.call1(py, (obj,))?.extract(py)?;
|
||||
let elem_ty =
|
||||
if let TypeEnum::TList { ty } = ctx.unifier.get_ty_immutable(expected_ty).as_ref()
|
||||
{
|
||||
*ty
|
||||
let ty = if len == 0 {
|
||||
ctx.primitives.int32
|
||||
} else {
|
||||
unreachable!("must be list")
|
||||
self.get_list_elem_type(
|
||||
py,
|
||||
obj,
|
||||
len,
|
||||
&mut ctx.unifier,
|
||||
&ctx.top_level.definitions.read(),
|
||||
&ctx.primitives,
|
||||
)?
|
||||
.unwrap()
|
||||
};
|
||||
let ty = ctx.get_llvm_type(generator, elem_ty);
|
||||
let ty = ctx.get_llvm_type(generator, ty);
|
||||
let size_t = generator.get_size_type(ctx.ctx);
|
||||
let arr_ty = ctx
|
||||
.ctx
|
||||
|
@ -847,13 +766,8 @@ impl InnerResolver {
|
|||
|
||||
let arr: Result<Option<Vec<_>>, _> = (0..len)
|
||||
.map(|i| {
|
||||
obj
|
||||
.get_item(i)
|
||||
.and_then(|elem| self.get_obj_value(py, elem, ctx, generator, elem_ty)
|
||||
.map_err(
|
||||
|e| super::CompileError::new_err(
|
||||
format!("Error getting element {}: {}", i, e))
|
||||
))
|
||||
obj.get_item(i).and_then(|elem| self.get_obj_value(py, elem, ctx, generator).map_err(
|
||||
|e| super::CompileError::new_err(format!("Error getting element {}: {}", i, e))))
|
||||
})
|
||||
.collect();
|
||||
let arr = arr?.unwrap();
|
||||
|
@ -894,48 +808,21 @@ impl InnerResolver {
|
|||
|
||||
Ok(Some(global.as_pointer_value().into()))
|
||||
} else if ty_id == self.primitive_ids.tuple {
|
||||
if let TypeEnum::TTuple { ty } = ctx.unifier.get_ty_immutable(expected_ty).as_ref() {
|
||||
let tup_tys = ty.iter();
|
||||
let elements: &PyTuple = obj.cast_as()?;
|
||||
assert_eq!(elements.len(), tup_tys.len());
|
||||
let val: Result<Option<Vec<_>>, _> =
|
||||
elements
|
||||
.iter()
|
||||
.enumerate()
|
||||
.zip(tup_tys)
|
||||
.map(|((i, elem), ty)| self
|
||||
.get_obj_value(py, elem, ctx, generator, *ty).map_err(|e|
|
||||
super::CompileError::new_err(
|
||||
format!("Error getting element {}: {}", i, e)
|
||||
)
|
||||
)
|
||||
).collect();
|
||||
let val = val?.unwrap();
|
||||
let val = ctx.ctx.const_struct(&val, false);
|
||||
Ok(Some(val.into()))
|
||||
} else {
|
||||
unreachable!("must expect tuple type")
|
||||
}
|
||||
let elements: &PyTuple = obj.cast_as()?;
|
||||
let val: Result<Option<Vec<_>>, _> =
|
||||
elements.iter().enumerate().map(|(i, elem)| self.get_obj_value(py, elem, ctx, generator).map_err(|e|
|
||||
super::CompileError::new_err(format!("Error getting element {}: {}", i, e)))).collect();
|
||||
let val = val?.unwrap();
|
||||
let val = ctx.ctx.const_struct(&val, false);
|
||||
Ok(Some(val.into()))
|
||||
} else if ty_id == self.primitive_ids.option {
|
||||
let option_val_ty = match ctx.unifier.get_ty_immutable(expected_ty).as_ref() {
|
||||
TypeEnum::TObj { obj_id, params, .. }
|
||||
if *obj_id == ctx.primitives.option.get_obj_id(&ctx.unifier) =>
|
||||
{
|
||||
*params.iter().next().unwrap().1
|
||||
}
|
||||
_ => unreachable!("must be option type")
|
||||
};
|
||||
if id == self.primitive_ids.none {
|
||||
// for option type, just a null ptr
|
||||
Ok(Some(
|
||||
ctx.get_llvm_type(generator, option_val_ty)
|
||||
.ptr_type(AddressSpace::Generic)
|
||||
.const_null()
|
||||
.into(),
|
||||
))
|
||||
// for option type, just a null ptr, whose type needs to be casted in codegen
|
||||
// according to the type info attached in the ast
|
||||
Ok(Some(ctx.ctx.i8_type().ptr_type(AddressSpace::Generic).const_null().into()))
|
||||
} else {
|
||||
match self
|
||||
.get_obj_value(py, obj.getattr("_nac3_option").unwrap(), ctx, generator, option_val_ty)
|
||||
.get_obj_value(py, obj.getattr("_nac3_option").unwrap(), ctx, generator)
|
||||
.map_err(|e| {
|
||||
super::CompileError::new_err(format!(
|
||||
"Error getting value of Option object: {}",
|
||||
|
@ -994,8 +881,23 @@ impl InnerResolver {
|
|||
let values: Result<Option<Vec<_>>, _> = fields
|
||||
.iter()
|
||||
.map(|(name, ty, _)| {
|
||||
self.get_obj_value(py, obj.getattr(&name.to_string())?, ctx, generator, *ty)
|
||||
.map_err(|e| super::CompileError::new_err(format!("Error getting field {}: {}", name, e)))
|
||||
let v = self.get_obj_value(py, obj.getattr(&name.to_string())?, ctx, generator)
|
||||
.map_err(|e| super::CompileError::new_err(format!("Error getting field {}: {}", name, e)));
|
||||
match (v, ctx.unifier.get_ty_immutable(*ty).as_ref()) {
|
||||
(Ok(Some(v)), TypeEnum::TObj { obj_id, params, .. })
|
||||
if *obj_id == ctx.primitives.option.get_obj_id(&ctx.unifier) =>
|
||||
{
|
||||
let actual_ptr_ty = ctx
|
||||
.get_llvm_type(generator, *params.iter().next().unwrap().1)
|
||||
.ptr_type(AddressSpace::Generic);
|
||||
Ok(Some(ctx.builder.build_bitcast(
|
||||
v,
|
||||
actual_ptr_ty,
|
||||
"option_none_ptr_cast",
|
||||
)))
|
||||
}
|
||||
(v, _) => v,
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
let values = values?;
|
||||
|
@ -1038,7 +940,7 @@ impl InnerResolver {
|
|||
} else if ty_id == self.primitive_ids.bool {
|
||||
let val: bool = obj.extract()?;
|
||||
Ok(SymbolValue::Bool(val))
|
||||
} else if ty_id == self.primitive_ids.float || ty_id == self.primitive_ids.float64 {
|
||||
} else if ty_id == self.primitive_ids.float {
|
||||
let val: f64 = obj.extract()?;
|
||||
Ok(SymbolValue::Double(val))
|
||||
} else if ty_id == self.primitive_ids.tuple {
|
||||
|
|
|
@ -10,7 +10,7 @@ constant-optimization = ["fold"]
|
|||
fold = []
|
||||
|
||||
[dependencies]
|
||||
lazy_static = "1.4"
|
||||
parking_lot = "0.12"
|
||||
string-interner = "0.14"
|
||||
fxhash = "0.2"
|
||||
lazy_static = "1.4.0"
|
||||
parking_lot = "0.11.1"
|
||||
string-interner = "0.13.0"
|
||||
fxhash = "0.2.1"
|
||||
|
|
|
@ -8,7 +8,7 @@ use parking_lot::{Mutex, MutexGuard};
|
|||
use string_interner::{DefaultBackend, DefaultSymbol, StringInterner, symbol::SymbolU32};
|
||||
use fxhash::FxBuildHasher;
|
||||
|
||||
pub type Interner = StringInterner<DefaultBackend<DefaultSymbol>, FxBuildHasher>;
|
||||
pub type Interner = StringInterner<DefaultSymbol, DefaultBackend<DefaultSymbol>, FxBuildHasher>;
|
||||
lazy_static! {
|
||||
static ref INTERNER: Mutex<Interner> = Mutex::new(StringInterner::with_hasher(FxBuildHasher::default()));
|
||||
}
|
||||
|
|
|
@ -5,17 +5,17 @@ authors = ["M-Labs"]
|
|||
edition = "2018"
|
||||
|
||||
[dependencies]
|
||||
itertools = "0.10"
|
||||
crossbeam = "0.8"
|
||||
parking_lot = "0.12"
|
||||
rayon = "1.5"
|
||||
itertools = "0.10.1"
|
||||
crossbeam = "0.8.1"
|
||||
parking_lot = "0.11.1"
|
||||
rayon = "1.5.1"
|
||||
slab = "0.4.6"
|
||||
nac3parser = { path = "../nac3parser" }
|
||||
lazy_static = "1.4"
|
||||
|
||||
[dependencies.inkwell]
|
||||
git = "https://github.com/TheDan64/inkwell.git"
|
||||
version = "0.1.0-beta.4"
|
||||
default-features = false
|
||||
features = ["llvm14-0", "target-x86", "target-arm", "target-riscv", "no-libffi-linking"]
|
||||
features = ["llvm13-0", "target-x86", "target-arm", "target-riscv", "no-libffi-linking"]
|
||||
|
||||
[dev-dependencies]
|
||||
test-case = "1.2.0"
|
||||
|
|
|
@ -10,10 +10,7 @@ use crate::{
|
|||
},
|
||||
symbol_resolver::{SymbolValue, ValueEnum},
|
||||
toplevel::{DefinitionId, TopLevelDef},
|
||||
typecheck::{
|
||||
typedef::{FunSignature, FuncArg, Type, TypeEnum, Unifier},
|
||||
magic_methods::{binop_name, binop_assign_name},
|
||||
},
|
||||
typecheck::typedef::{FunSignature, FuncArg, Type, TypeEnum, Unifier},
|
||||
};
|
||||
use inkwell::{
|
||||
AddressSpace,
|
||||
|
@ -153,7 +150,11 @@ impl<'ctx, 'a> CodeGenContext<'ctx, 'a> {
|
|||
};
|
||||
let actual_ptr_type =
|
||||
self.get_llvm_type(generator, ty).ptr_type(AddressSpace::Generic);
|
||||
actual_ptr_type.const_null().into()
|
||||
self.builder.build_bitcast(
|
||||
self.ctx.i8_type().ptr_type(AddressSpace::Generic).const_null(),
|
||||
actual_ptr_type,
|
||||
"default_opt_none",
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -165,7 +166,6 @@ impl<'ctx, 'a> CodeGenContext<'ctx, 'a> {
|
|||
) -> BasicTypeEnum<'ctx> {
|
||||
get_llvm_type(
|
||||
self.ctx,
|
||||
&self.module,
|
||||
generator,
|
||||
&mut self.unifier,
|
||||
self.top_level,
|
||||
|
@ -238,7 +238,6 @@ impl<'ctx, 'a> CodeGenContext<'ctx, 'a> {
|
|||
|
||||
pub fn gen_int_ops(
|
||||
&mut self,
|
||||
generator: &mut dyn CodeGenerator,
|
||||
op: &Operator,
|
||||
lhs: BasicValueEnum<'ctx>,
|
||||
rhs: BasicValueEnum<'ctx>,
|
||||
|
@ -274,7 +273,7 @@ impl<'ctx, 'a> CodeGenContext<'ctx, 'a> {
|
|||
(Operator::RShift, _) => self.builder.build_right_shift(lhs, rhs, true, "rshift").into(),
|
||||
(Operator::FloorDiv, true) => self.builder.build_int_signed_div(lhs, rhs, "floordiv").into(),
|
||||
(Operator::FloorDiv, false) => self.builder.build_int_unsigned_div(lhs, rhs, "floordiv").into(),
|
||||
(Operator::Pow, s) => integer_power(generator, self, lhs, rhs, s).into(),
|
||||
(Operator::Pow, s) => integer_power(self, lhs, rhs, s).into(),
|
||||
// special implementation?
|
||||
(Operator::MatMult, _) => unreachable!(),
|
||||
}
|
||||
|
@ -362,31 +361,17 @@ impl<'ctx, 'a> CodeGenContext<'ctx, 'a> {
|
|||
}
|
||||
}
|
||||
}
|
||||
let params = if loc_params.is_empty() { params } else { &loc_params };
|
||||
let params = fun
|
||||
.get_type()
|
||||
.get_param_types()
|
||||
.into_iter()
|
||||
.zip(params.iter())
|
||||
.map(|(ty, val)| match (ty, val.get_type()) {
|
||||
(BasicTypeEnum::PointerType(arg_ty), BasicTypeEnum::PointerType(val_ty))
|
||||
if {
|
||||
ty != val.get_type()
|
||||
&& arg_ty.get_element_type().is_struct_type()
|
||||
&& val_ty.get_element_type().is_struct_type()
|
||||
} =>
|
||||
{
|
||||
self.builder.build_bitcast(*val, arg_ty, "call_arg_cast")
|
||||
}
|
||||
_ => *val,
|
||||
})
|
||||
.collect_vec();
|
||||
let params = if loc_params.is_empty() {
|
||||
params
|
||||
} else {
|
||||
&loc_params
|
||||
};
|
||||
let result = if let Some(target) = self.unwind_target {
|
||||
let current = self.builder.get_insert_block().unwrap().get_parent().unwrap();
|
||||
let then_block = self.ctx.append_basic_block(current, &format!("after.{}", call_name));
|
||||
let result = self
|
||||
.builder
|
||||
.build_invoke(fun, ¶ms, then_block, target, call_name)
|
||||
.build_invoke(fun, params, then_block, target, call_name)
|
||||
.try_as_basic_value()
|
||||
.left();
|
||||
self.builder.position_at_end(then_block);
|
||||
|
@ -410,9 +395,9 @@ impl<'ctx, 'a> CodeGenContext<'ctx, 'a> {
|
|||
self.gen_const(generator, &nac3parser::ast::Constant::Str(s.into()), self.primitives.str)
|
||||
}
|
||||
|
||||
pub fn raise_exn(
|
||||
pub fn raise_exn<G: CodeGenerator>(
|
||||
&mut self,
|
||||
generator: &mut dyn CodeGenerator,
|
||||
generator: &mut G,
|
||||
name: &str,
|
||||
msg: BasicValueEnum<'ctx>,
|
||||
params: [Option<IntValue<'ctx>>; 3],
|
||||
|
@ -449,9 +434,9 @@ impl<'ctx, 'a> CodeGenContext<'ctx, 'a> {
|
|||
gen_raise(generator, self, Some(&zelf.into()), loc);
|
||||
}
|
||||
|
||||
pub fn make_assert(
|
||||
pub fn make_assert<G: CodeGenerator>(
|
||||
&mut self,
|
||||
generator: &mut dyn CodeGenerator,
|
||||
generator: &mut G,
|
||||
cond: IntValue<'ctx>,
|
||||
err_name: &str,
|
||||
err_msg: &str,
|
||||
|
@ -462,9 +447,9 @@ impl<'ctx, 'a> CodeGenContext<'ctx, 'a> {
|
|||
self.make_assert_impl(generator, cond, err_name, err_msg, params, loc)
|
||||
}
|
||||
|
||||
pub fn make_assert_impl(
|
||||
pub fn make_assert_impl<G: CodeGenerator>(
|
||||
&mut self,
|
||||
generator: &mut dyn CodeGenerator,
|
||||
generator: &mut G,
|
||||
cond: IntValue<'ctx>,
|
||||
err_name: &str,
|
||||
err_msg: BasicValueEnum<'ctx>,
|
||||
|
@ -475,7 +460,7 @@ impl<'ctx, 'a> CodeGenContext<'ctx, 'a> {
|
|||
let i1_true = i1.const_all_ones();
|
||||
let expect_fun = self.module.get_function("llvm.expect.i1").unwrap_or_else(|| {
|
||||
self.module.add_function(
|
||||
"llvm.expect.i1",
|
||||
"llvm.expect",
|
||||
i1.fn_type(&[i1.into(), i1.into()], false),
|
||||
None,
|
||||
)
|
||||
|
@ -511,7 +496,12 @@ pub fn gen_constructor<'ctx, 'a, G: CodeGenerator>(
|
|||
match def {
|
||||
TopLevelDef::Class { methods, .. } => {
|
||||
// TODO: what about other fields that require alloca?
|
||||
let fun_id = methods.iter().find(|method| method.0 == "__init__".into()).and_then(|method| Some(method.2));
|
||||
let mut fun_id = None;
|
||||
for (name, _, id) in methods.iter() {
|
||||
if name == &"__init__".into() {
|
||||
fun_id = Some(*id);
|
||||
}
|
||||
}
|
||||
let ty = ctx.get_llvm_type(generator, signature.ret).into_pointer_type();
|
||||
let zelf_ty: BasicTypeEnum = ty.get_element_type().try_into().unwrap();
|
||||
let zelf: BasicValueEnum<'ctx> = ctx.builder.build_alloca(zelf_ty, "alloca").into();
|
||||
|
@ -652,14 +642,14 @@ pub fn gen_call<'ctx, 'a, G: CodeGenerator>(
|
|||
}
|
||||
// reorder the parameters
|
||||
let mut real_params =
|
||||
fun.0.args.iter().map(|arg| (mapping.remove(&arg.name).unwrap(), arg.ty)).collect_vec();
|
||||
fun.0.args.iter().map(|arg| mapping.remove(&arg.name).unwrap()).collect_vec();
|
||||
if let Some(obj) = &obj {
|
||||
real_params.insert(0, (obj.1.clone(), obj.0));
|
||||
real_params.insert(0, obj.1.clone());
|
||||
}
|
||||
let static_params = real_params
|
||||
.iter()
|
||||
.enumerate()
|
||||
.filter_map(|(i, (v, _))| {
|
||||
.filter_map(|(i, v)| {
|
||||
if let ValueEnum::Static(s) = v {
|
||||
Some((i, s.clone()))
|
||||
} else {
|
||||
|
@ -691,7 +681,7 @@ pub fn gen_call<'ctx, 'a, G: CodeGenerator>(
|
|||
};
|
||||
param_vals = real_params
|
||||
.into_iter()
|
||||
.map(|(p, t)| p.to_basic_value_enum(ctx, generator, t))
|
||||
.map(|p| p.to_basic_value_enum(ctx, generator))
|
||||
.collect::<Result<Vec<_>, String>>()?;
|
||||
instance_to_symbol.get(&key).cloned().ok_or_else(|| "".into())
|
||||
}
|
||||
|
@ -804,7 +794,7 @@ pub fn gen_comprehension<'ctx, 'a, G: CodeGenerator>(
|
|||
let cont_bb = ctx.ctx.append_basic_block(current, "cont");
|
||||
|
||||
let Comprehension { target, iter, ifs, .. } = &generators[0];
|
||||
let iter_val = generator.gen_expr(ctx, iter)?.unwrap().to_basic_value_enum(ctx, generator, iter.custom.unwrap())?;
|
||||
let iter_val = generator.gen_expr(ctx, iter)?.unwrap().to_basic_value_enum(ctx, generator)?;
|
||||
let int32 = ctx.ctx.i32_type();
|
||||
let size_t = generator.get_size_type(ctx.ctx);
|
||||
let zero_size_t = size_t.const_zero();
|
||||
|
@ -909,7 +899,7 @@ pub fn gen_comprehension<'ctx, 'a, G: CodeGenerator>(
|
|||
let result = generator
|
||||
.gen_expr(ctx, cond)?
|
||||
.unwrap()
|
||||
.to_basic_value_enum(ctx, generator, cond.custom.unwrap())?
|
||||
.to_basic_value_enum(ctx, generator)?
|
||||
.into_int_value();
|
||||
let succ = ctx.ctx.append_basic_block(current, "then");
|
||||
ctx.builder.build_conditional_branch(result, succ, test_bb);
|
||||
|
@ -918,7 +908,7 @@ pub fn gen_comprehension<'ctx, 'a, G: CodeGenerator>(
|
|||
let elem = generator.gen_expr(ctx, elt)?.unwrap();
|
||||
let i = ctx.builder.build_load(index, "i").into_int_value();
|
||||
let elem_ptr = unsafe { ctx.builder.build_gep(list_content, &[i], "elem_ptr") };
|
||||
let val = elem.to_basic_value_enum(ctx, generator, elt.custom.unwrap())?;
|
||||
let val = elem.to_basic_value_enum(ctx, generator)?;
|
||||
ctx.builder.build_store(elem_ptr, val);
|
||||
ctx.builder
|
||||
.build_store(index, ctx.builder.build_int_add(i, size_t.const_int(1, false), "inc"));
|
||||
|
@ -940,94 +930,41 @@ pub fn gen_binop_expr<'ctx, 'a, G: CodeGenerator>(
|
|||
left: &Expr<Option<Type>>,
|
||||
op: &Operator,
|
||||
right: &Expr<Option<Type>>,
|
||||
loc: Location,
|
||||
is_aug_assign: bool,
|
||||
) -> Result<Option<ValueEnum<'ctx>>, String> {
|
||||
) -> Result<ValueEnum<'ctx>, String> {
|
||||
let ty1 = ctx.unifier.get_representative(left.custom.unwrap());
|
||||
let ty2 = ctx.unifier.get_representative(right.custom.unwrap());
|
||||
let left_val = generator
|
||||
.gen_expr(ctx, left)?
|
||||
.unwrap()
|
||||
.to_basic_value_enum(ctx, generator, left.custom.unwrap())?;
|
||||
let right_val = generator
|
||||
.gen_expr(ctx, right)?
|
||||
.unwrap()
|
||||
.to_basic_value_enum(ctx, generator, right.custom.unwrap())?;
|
||||
let left = generator.gen_expr(ctx, left)?.unwrap().to_basic_value_enum(ctx, generator)?;
|
||||
let right = generator.gen_expr(ctx, right)?.unwrap().to_basic_value_enum(ctx, generator)?;
|
||||
|
||||
// we can directly compare the types, because we've got their representatives
|
||||
// which would be unchanged until further unification, which we would never do
|
||||
// when doing code generation for function instances
|
||||
if ty1 == ty2 && [ctx.primitives.int32, ctx.primitives.int64].contains(&ty1) {
|
||||
Ok(Some(ctx.gen_int_ops(generator, op, left_val, right_val, true).into()))
|
||||
Ok(if ty1 == ty2 && [ctx.primitives.int32, ctx.primitives.int64].contains(&ty1) {
|
||||
ctx.gen_int_ops(op, left, right, true)
|
||||
} else if ty1 == ty2 && [ctx.primitives.uint32, ctx.primitives.uint64].contains(&ty1) {
|
||||
Ok(Some(ctx.gen_int_ops(generator, op, left_val, right_val, false).into()))
|
||||
ctx.gen_int_ops(op, left, right, false)
|
||||
} else if ty1 == ty2 && ctx.primitives.float == ty1 {
|
||||
Ok(Some(ctx.gen_float_ops(op, left_val, right_val).into()))
|
||||
ctx.gen_float_ops(op, left, right)
|
||||
} else if ty1 == ctx.primitives.float && ty2 == ctx.primitives.int32 {
|
||||
// Pow is the only operator that would pass typecheck between float and int
|
||||
assert!(*op == Operator::Pow);
|
||||
let i32_t = ctx.ctx.i32_type();
|
||||
let pow_intr = ctx.module.get_function("llvm.powi.f64.i32").unwrap_or_else(|| {
|
||||
// TODO: throw exception when rhs is out of i16 bound
|
||||
// since llvm intrinsic only support to i16 for f64
|
||||
let i16_t = ctx.ctx.i16_type();
|
||||
let pow_intr = ctx.module.get_function("llvm.powi.f64.i16").unwrap_or_else(|| {
|
||||
let f64_t = ctx.ctx.f64_type();
|
||||
let ty = f64_t.fn_type(&[f64_t.into(), i32_t.into()], false);
|
||||
ctx.module.add_function("llvm.powi.f64.i32", ty, None)
|
||||
let ty = f64_t.fn_type(&[f64_t.into(), i16_t.into()], false);
|
||||
ctx.module.add_function("llvm.powi.f64.i16", ty, None)
|
||||
});
|
||||
let res = ctx.builder
|
||||
.build_call(pow_intr, &[left_val.into(), right_val.into()], "f_pow_i")
|
||||
let right = ctx.builder.build_int_truncate(right.into_int_value(), i16_t, "r_pow");
|
||||
ctx.builder
|
||||
.build_call(pow_intr, &[left.into(), right.into()], "f_pow_i")
|
||||
.try_as_basic_value()
|
||||
.unwrap_left();
|
||||
Ok(Some(res.into()))
|
||||
.unwrap_left()
|
||||
} else {
|
||||
let (op_name, id) = if let TypeEnum::TObj { fields, obj_id, .. } =
|
||||
ctx.unifier.get_ty_immutable(left.custom.unwrap()).as_ref()
|
||||
{
|
||||
let (binop_name, binop_assign_name) = (
|
||||
binop_name(op).into(),
|
||||
binop_assign_name(op).into()
|
||||
);
|
||||
// if is aug_assign, try aug_assign operator first
|
||||
if is_aug_assign && fields.contains_key(&binop_assign_name) {
|
||||
(binop_assign_name, *obj_id)
|
||||
} else {
|
||||
(binop_name, *obj_id)
|
||||
}
|
||||
} else {
|
||||
unreachable!("must be tobj")
|
||||
};
|
||||
let signature = match ctx.calls.get(&loc.into()) {
|
||||
Some(call) => ctx.unifier.get_call_signature(*call).unwrap(),
|
||||
None => {
|
||||
if let TypeEnum::TObj { fields, .. } =
|
||||
ctx.unifier.get_ty_immutable(left.custom.unwrap()).as_ref()
|
||||
{
|
||||
let fn_ty = fields.get(&op_name).unwrap().0;
|
||||
if let TypeEnum::TFunc(sig) = ctx.unifier.get_ty_immutable(fn_ty).as_ref() {
|
||||
sig.clone()
|
||||
} else {
|
||||
unreachable!("must be func sig")
|
||||
}
|
||||
} else {
|
||||
unreachable!("must be tobj")
|
||||
}
|
||||
},
|
||||
};
|
||||
let fun_id = {
|
||||
let defs = ctx.top_level.definitions.read();
|
||||
let obj_def = defs.get(id.0).unwrap().read();
|
||||
if let TopLevelDef::Class { methods, .. } = &*obj_def {
|
||||
methods.iter().find(|method| method.0 == op_name).unwrap().2
|
||||
} else {
|
||||
unreachable!()
|
||||
}
|
||||
};
|
||||
generator
|
||||
.gen_call(
|
||||
ctx,
|
||||
Some((left.custom.unwrap(), left_val.into())),
|
||||
(&signature, fun_id),
|
||||
vec![(None, right_val.into())],
|
||||
).map(|f| f.map(|f| f.into()))
|
||||
unimplemented!()
|
||||
}
|
||||
.into())
|
||||
}
|
||||
|
||||
pub fn gen_expr<'ctx, 'a, G: CodeGenerator>(
|
||||
|
@ -1035,19 +972,8 @@ pub fn gen_expr<'ctx, 'a, G: CodeGenerator>(
|
|||
ctx: &mut CodeGenContext<'ctx, 'a>,
|
||||
expr: &Expr<Option<Type>>,
|
||||
) -> Result<Option<ValueEnum<'ctx>>, String> {
|
||||
ctx.current_loc = expr.location;
|
||||
let int32 = ctx.ctx.i32_type();
|
||||
let zero = int32.const_int(0, false);
|
||||
|
||||
let loc = ctx.debug_info.0.create_debug_location(
|
||||
ctx.ctx,
|
||||
ctx.current_loc.row as u32,
|
||||
ctx.current_loc.column as u32,
|
||||
ctx.debug_info.2,
|
||||
None,
|
||||
);
|
||||
ctx.builder.set_current_debug_location(ctx.ctx, loc);
|
||||
|
||||
Ok(Some(match &expr.node {
|
||||
ExprKind::Constant { value, .. } => {
|
||||
let ty = expr.custom.unwrap();
|
||||
|
@ -1074,7 +1000,25 @@ pub fn gen_expr<'ctx, 'a, G: CodeGenerator>(
|
|||
Some((_, Some(static_value), _)) => ValueEnum::Static(static_value.clone()),
|
||||
None => {
|
||||
let resolver = ctx.resolver.clone();
|
||||
resolver.get_symbol_value(*id, ctx).unwrap()
|
||||
let val = resolver.get_symbol_value(*id, ctx).unwrap();
|
||||
// if is option, need to cast pointer to handle None
|
||||
match &*ctx.unifier.get_ty(expr.custom.unwrap()) {
|
||||
TypeEnum::TObj { obj_id, params, .. }
|
||||
if *obj_id == ctx.primitives.option.get_obj_id(&ctx.unifier) =>
|
||||
{
|
||||
if let BasicValueEnum::PointerValue(ptr) = val.to_basic_value_enum(ctx, generator)? {
|
||||
let actual_ptr_ty = ctx.get_llvm_type(
|
||||
generator,
|
||||
*params.iter().next().unwrap().1,
|
||||
)
|
||||
.ptr_type(AddressSpace::Generic);
|
||||
ctx.builder.build_bitcast(ptr, actual_ptr_ty, "option_ptr_cast").into()
|
||||
} else {
|
||||
unreachable!("option obj must be ptr")
|
||||
}
|
||||
}
|
||||
_ => val,
|
||||
}
|
||||
}
|
||||
},
|
||||
ExprKind::List { elts, .. } => {
|
||||
|
@ -1085,10 +1029,7 @@ pub fn gen_expr<'ctx, 'a, G: CodeGenerator>(
|
|||
.map(|x| {
|
||||
generator
|
||||
.gen_expr(ctx, x)
|
||||
.map_or_else(
|
||||
Err,
|
||||
|v| v.unwrap().to_basic_value_enum(ctx, generator, x.custom.unwrap())
|
||||
)
|
||||
.map_or_else(Err, |v| v.unwrap().to_basic_value_enum(ctx, generator))
|
||||
})
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
let ty = if elements.is_empty() {
|
||||
|
@ -1121,7 +1062,7 @@ pub fn gen_expr<'ctx, 'a, G: CodeGenerator>(
|
|||
.map(|x| {
|
||||
generator
|
||||
.gen_expr(ctx, x)
|
||||
.map_or_else(Err, |v| v.unwrap().to_basic_value_enum(ctx, generator, x.custom.unwrap()))
|
||||
.map_or_else(Err, |v| v.unwrap().to_basic_value_enum(ctx, generator))
|
||||
})
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
let element_ty = element_val.iter().map(BasicValueEnum::get_type).collect_vec();
|
||||
|
@ -1143,7 +1084,7 @@ pub fn gen_expr<'ctx, 'a, G: CodeGenerator>(
|
|||
// note that we would handle class methods directly in calls
|
||||
match generator.gen_expr(ctx, value)?.unwrap() {
|
||||
ValueEnum::Static(v) => v.get_field(*attr, ctx).map_or_else(|| {
|
||||
let v = v.to_basic_value_enum(ctx, generator, value.custom.unwrap())?;
|
||||
let v = v.to_basic_value_enum(ctx, generator)?;
|
||||
let index = ctx.get_attr_index(value.custom.unwrap(), *attr);
|
||||
Ok(ValueEnum::Dynamic(ctx.build_gep_and_load(
|
||||
v.into_pointer_value(),
|
||||
|
@ -1164,7 +1105,7 @@ pub fn gen_expr<'ctx, 'a, G: CodeGenerator>(
|
|||
let left = generator
|
||||
.gen_expr(ctx, &values[0])?
|
||||
.unwrap()
|
||||
.to_basic_value_enum(ctx, generator, values[0].custom.unwrap())?
|
||||
.to_basic_value_enum(ctx, generator)?
|
||||
.into_int_value();
|
||||
let current = ctx.builder.get_insert_block().unwrap().get_parent().unwrap();
|
||||
let a_bb = ctx.ctx.append_basic_block(current, "a");
|
||||
|
@ -1180,7 +1121,7 @@ pub fn gen_expr<'ctx, 'a, G: CodeGenerator>(
|
|||
let b = generator
|
||||
.gen_expr(ctx, &values[1])?
|
||||
.unwrap()
|
||||
.to_basic_value_enum(ctx, generator, values[1].custom.unwrap())?
|
||||
.to_basic_value_enum(ctx, generator)?
|
||||
.into_int_value();
|
||||
ctx.builder.build_unconditional_branch(cont_bb);
|
||||
(a, b)
|
||||
|
@ -1190,7 +1131,7 @@ pub fn gen_expr<'ctx, 'a, G: CodeGenerator>(
|
|||
let a = generator
|
||||
.gen_expr(ctx, &values[1])?
|
||||
.unwrap()
|
||||
.to_basic_value_enum(ctx, generator, values[1].custom.unwrap())?
|
||||
.to_basic_value_enum(ctx, generator)?
|
||||
.into_int_value();
|
||||
ctx.builder.build_unconditional_branch(cont_bb);
|
||||
ctx.builder.position_at_end(b_bb);
|
||||
|
@ -1204,15 +1145,11 @@ pub fn gen_expr<'ctx, 'a, G: CodeGenerator>(
|
|||
phi.add_incoming(&[(&a, a_bb), (&b, b_bb)]);
|
||||
phi.as_basic_value().into()
|
||||
}
|
||||
ExprKind::BinOp { op, left, right } => {
|
||||
return gen_binop_expr(generator, ctx, left, op, right, expr.location, false);
|
||||
}
|
||||
ExprKind::BinOp { op, left, right } => gen_binop_expr(generator, ctx, left, op, right)?,
|
||||
ExprKind::UnaryOp { op, operand } => {
|
||||
let ty = ctx.unifier.get_representative(operand.custom.unwrap());
|
||||
let val =
|
||||
generator.gen_expr(ctx, operand)?
|
||||
.unwrap()
|
||||
.to_basic_value_enum(ctx, generator, operand.custom.unwrap())?;
|
||||
generator.gen_expr(ctx, operand)?.unwrap().to_basic_value_enum(ctx, generator)?;
|
||||
if ty == ctx.primitives.bool {
|
||||
let val = val.into_int_value();
|
||||
match op {
|
||||
|
@ -1272,11 +1209,11 @@ pub fn gen_expr<'ctx, 'a, G: CodeGenerator>(
|
|||
generator
|
||||
.gen_expr(ctx, lhs)?
|
||||
.unwrap()
|
||||
.to_basic_value_enum(ctx, generator, lhs.custom.unwrap())?,
|
||||
.to_basic_value_enum(ctx, generator)?,
|
||||
generator
|
||||
.gen_expr(ctx, rhs)?
|
||||
.unwrap()
|
||||
.to_basic_value_enum(ctx, generator, rhs.custom.unwrap())?,
|
||||
.to_basic_value_enum(ctx, generator)?,
|
||||
) {
|
||||
(lhs, rhs)
|
||||
} else {
|
||||
|
@ -1300,11 +1237,11 @@ pub fn gen_expr<'ctx, 'a, G: CodeGenerator>(
|
|||
generator
|
||||
.gen_expr(ctx, lhs)?
|
||||
.unwrap()
|
||||
.to_basic_value_enum(ctx, generator, lhs.custom.unwrap())?,
|
||||
.to_basic_value_enum(ctx, generator)?,
|
||||
generator
|
||||
.gen_expr(ctx, rhs)?
|
||||
.unwrap()
|
||||
.to_basic_value_enum(ctx, generator, rhs.custom.unwrap())?,
|
||||
.to_basic_value_enum(ctx, generator)?,
|
||||
) {
|
||||
(lhs, rhs)
|
||||
} else {
|
||||
|
@ -1332,7 +1269,7 @@ pub fn gen_expr<'ctx, 'a, G: CodeGenerator>(
|
|||
let test = generator
|
||||
.gen_expr(ctx, test)?
|
||||
.unwrap()
|
||||
.to_basic_value_enum(ctx, generator, test.custom.unwrap())?
|
||||
.to_basic_value_enum(ctx, generator)?
|
||||
.into_int_value();
|
||||
let body_ty = body.custom.unwrap();
|
||||
let is_none = ctx.unifier.get_representative(body_ty) == ctx.primitives.none;
|
||||
|
@ -1352,7 +1289,7 @@ pub fn gen_expr<'ctx, 'a, G: CodeGenerator>(
|
|||
match result {
|
||||
None => None,
|
||||
Some(v) => {
|
||||
let a = a.unwrap().to_basic_value_enum(ctx, generator, body.custom.unwrap())?;
|
||||
let a = a.unwrap().to_basic_value_enum(ctx, generator)?;
|
||||
Some(ctx.builder.build_store(v, a))
|
||||
}
|
||||
};
|
||||
|
@ -1362,7 +1299,7 @@ pub fn gen_expr<'ctx, 'a, G: CodeGenerator>(
|
|||
match result {
|
||||
None => None,
|
||||
Some(v) => {
|
||||
let b = b.unwrap().to_basic_value_enum(ctx, generator, orelse.custom.unwrap())?;
|
||||
let b = b.unwrap().to_basic_value_enum(ctx, generator)?;
|
||||
Some(ctx.builder.build_store(v, b))
|
||||
}
|
||||
};
|
||||
|
@ -1423,72 +1360,35 @@ pub fn gen_expr<'ctx, 'a, G: CodeGenerator>(
|
|||
let defs = ctx.top_level.definitions.read();
|
||||
let obj_def = defs.get(id.0).unwrap().read();
|
||||
if let TopLevelDef::Class { methods, .. } = &*obj_def {
|
||||
methods.iter().find(|method| method.0 == *attr).unwrap().2
|
||||
let mut fun_id = None;
|
||||
for (name, _, id) in methods.iter() {
|
||||
if name == attr {
|
||||
fun_id = Some(*id);
|
||||
}
|
||||
}
|
||||
fun_id.unwrap()
|
||||
} else {
|
||||
unreachable!()
|
||||
}
|
||||
};
|
||||
// directly generate code for option.unwrap
|
||||
// since it needs to return static value to optimize for kernel invariant
|
||||
// since it needs location information from ast
|
||||
if attr == &"unwrap".into()
|
||||
&& id == ctx.primitives.option.get_obj_id(&ctx.unifier)
|
||||
{
|
||||
match val {
|
||||
ValueEnum::Static(v) => match v.get_field("_nac3_option".into(), ctx) {
|
||||
// if is none, raise exception directly
|
||||
None => {
|
||||
let err_msg = ctx.gen_string(generator, "");
|
||||
let current_fun = ctx
|
||||
.builder
|
||||
.get_insert_block()
|
||||
.unwrap()
|
||||
.get_parent()
|
||||
.unwrap();
|
||||
let unreachable_block = ctx.ctx.append_basic_block(
|
||||
current_fun,
|
||||
"unwrap_none_unreachable"
|
||||
);
|
||||
let exn_block = ctx.ctx.append_basic_block(
|
||||
current_fun,
|
||||
"unwrap_none_exception"
|
||||
);
|
||||
ctx.builder.build_unconditional_branch(exn_block);
|
||||
ctx.builder.position_at_end(exn_block);
|
||||
ctx.raise_exn(
|
||||
generator,
|
||||
"0:UnwrapNoneError",
|
||||
err_msg,
|
||||
[None, None, None],
|
||||
ctx.current_loc
|
||||
);
|
||||
ctx.builder.position_at_end(unreachable_block);
|
||||
let ptr = ctx
|
||||
.get_llvm_type(generator, value.custom.unwrap())
|
||||
.into_pointer_type()
|
||||
.const_null();
|
||||
return Ok(Some(ctx.builder.build_load(
|
||||
ptr,
|
||||
"unwrap_none_unreachable_load"
|
||||
).into()));
|
||||
}
|
||||
Some(v) => return Ok(Some(v)),
|
||||
}
|
||||
ValueEnum::Dynamic(BasicValueEnum::PointerValue(ptr)) => {
|
||||
let not_null = ctx.builder.build_is_not_null(ptr, "unwrap_not_null");
|
||||
ctx.make_assert(
|
||||
generator,
|
||||
not_null,
|
||||
"0:UnwrapNoneError",
|
||||
"",
|
||||
[None, None, None],
|
||||
expr.location,
|
||||
);
|
||||
return Ok(Some(ctx.builder.build_load(
|
||||
ptr,
|
||||
"unwrap_some_load"
|
||||
).into()))
|
||||
}
|
||||
_ => unreachable!("option must be static or ptr")
|
||||
if let BasicValueEnum::PointerValue(ptr) = val.to_basic_value_enum(ctx, generator)? {
|
||||
let not_null = ctx.builder.build_is_not_null(ptr, "unwrap_not_null");
|
||||
ctx.make_assert(
|
||||
generator,
|
||||
not_null,
|
||||
"0:UnwrapNoneError",
|
||||
"",
|
||||
[None, None, None],
|
||||
expr.location,
|
||||
);
|
||||
return Ok(Some(ctx.builder.build_load(ptr, "unwrap_some").into()))
|
||||
} else {
|
||||
unreachable!("option must be ptr")
|
||||
}
|
||||
}
|
||||
return Ok(generator
|
||||
|
@ -1508,7 +1408,7 @@ pub fn gen_expr<'ctx, 'a, G: CodeGenerator>(
|
|||
let v = generator
|
||||
.gen_expr(ctx, value)?
|
||||
.unwrap()
|
||||
.to_basic_value_enum(ctx, generator, value.custom.unwrap())?
|
||||
.to_basic_value_enum(ctx, generator)?
|
||||
.into_pointer_value();
|
||||
let ty = ctx.get_llvm_type(generator, *ty);
|
||||
let arr_ptr = ctx.build_gep_and_load(v, &[zero, zero]).into_pointer_value();
|
||||
|
@ -1517,7 +1417,6 @@ pub fn gen_expr<'ctx, 'a, G: CodeGenerator>(
|
|||
let (start, end, step) =
|
||||
handle_slice_indices(lower, upper, step, ctx, generator, v)?;
|
||||
let length = calculate_len_for_slice_range(
|
||||
generator,
|
||||
ctx,
|
||||
start,
|
||||
ctx.builder
|
||||
|
@ -1539,8 +1438,8 @@ pub fn gen_expr<'ctx, 'a, G: CodeGenerator>(
|
|||
let res_ind =
|
||||
handle_slice_indices(&None, &None, &None, ctx, generator, res_array_ret)?;
|
||||
list_slice_assignment(
|
||||
generator,
|
||||
ctx,
|
||||
generator.get_size_type(ctx.ctx),
|
||||
ty,
|
||||
res_array_ret,
|
||||
res_ind,
|
||||
|
@ -1555,7 +1454,7 @@ pub fn gen_expr<'ctx, 'a, G: CodeGenerator>(
|
|||
let raw_index = generator
|
||||
.gen_expr(ctx, slice)?
|
||||
.unwrap()
|
||||
.to_basic_value_enum(ctx, generator, slice.custom.unwrap())?
|
||||
.to_basic_value_enum(ctx, generator)?
|
||||
.into_int_value();
|
||||
let raw_index = ctx.builder.build_int_s_extend(
|
||||
raw_index,
|
||||
|
@ -1590,39 +1489,26 @@ pub fn gen_expr<'ctx, 'a, G: CodeGenerator>(
|
|||
[Some(raw_index), Some(len), None],
|
||||
expr.location,
|
||||
);
|
||||
ctx.build_gep_and_load(arr_ptr, &[index]).into()
|
||||
ctx.build_gep_and_load(arr_ptr, &[index])
|
||||
}
|
||||
} else if let TypeEnum::TTuple { .. } = &*ctx.unifier.get_ty(value.custom.unwrap()) {
|
||||
let v = generator
|
||||
.gen_expr(ctx, value)?
|
||||
.unwrap()
|
||||
.to_basic_value_enum(ctx, generator)?
|
||||
.into_struct_value();
|
||||
let index: u32 =
|
||||
if let ExprKind::Constant { value: ast::Constant::Int(v), .. } = &slice.node {
|
||||
(*v).try_into().unwrap()
|
||||
} else {
|
||||
unreachable!("tuple subscript must be const int after type check");
|
||||
};
|
||||
let v = generator
|
||||
.gen_expr(ctx, value)?
|
||||
.unwrap();
|
||||
match v {
|
||||
ValueEnum::Dynamic(v) => {
|
||||
let v = v.into_struct_value();
|
||||
ctx.builder.build_extract_value(v, index, "tup_elem").unwrap().into()
|
||||
}
|
||||
ValueEnum::Static(v) => {
|
||||
match v.get_tuple_element(index) {
|
||||
Some(v) => v,
|
||||
None => {
|
||||
let tup = v
|
||||
.to_basic_value_enum(ctx, generator, value.custom.unwrap())?
|
||||
.into_struct_value();
|
||||
ctx.builder.build_extract_value(tup, index, "tup_elem").unwrap().into()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
ctx.builder.build_extract_value(v, index, "tup_elem").unwrap()
|
||||
} else {
|
||||
unreachable!("should not be other subscriptable types after type check");
|
||||
}
|
||||
},
|
||||
}
|
||||
.into(),
|
||||
ExprKind::ListComp { .. } => gen_comprehension(generator, ctx, expr)?.into(),
|
||||
_ => unimplemented!(),
|
||||
}))
|
||||
|
|
|
@ -36,7 +36,7 @@ pub trait CodeGenerator {
|
|||
}
|
||||
|
||||
/// Generate object constructor and returns the constructed object.
|
||||
/// - signature: Function signature of the constructor.
|
||||
/// - signature: Function signature of the contructor.
|
||||
/// - def: Class definition for the constructor class.
|
||||
/// - params: Function parameters.
|
||||
fn gen_constructor<'ctx, 'a>(
|
||||
|
|
|
@ -6,7 +6,7 @@ use inkwell::{
|
|||
context::Context,
|
||||
memory_buffer::MemoryBuffer,
|
||||
module::Module,
|
||||
types::BasicTypeEnum,
|
||||
types::{BasicTypeEnum, IntType},
|
||||
values::{IntValue, PointerValue},
|
||||
AddressSpace, IntPredicate,
|
||||
};
|
||||
|
@ -34,7 +34,6 @@ pub fn load_irrt(ctx: &Context) -> Module {
|
|||
// repeated squaring method adapted from GNU Scientific Library:
|
||||
// https://git.savannah.gnu.org/cgit/gsl.git/tree/sys/pow_int.c
|
||||
pub fn integer_power<'ctx, 'a>(
|
||||
generator: &mut dyn CodeGenerator,
|
||||
ctx: &mut CodeGenContext<'ctx, 'a>,
|
||||
base: IntValue<'ctx>,
|
||||
exp: IntValue<'ctx>,
|
||||
|
@ -52,21 +51,7 @@ pub fn integer_power<'ctx, 'a>(
|
|||
let fn_type = base_type.fn_type(&[base_type.into(), base_type.into()], false);
|
||||
ctx.module.add_function(symbol, fn_type, None)
|
||||
});
|
||||
// throw exception when exp < 0
|
||||
let ge_zero = ctx.builder.build_int_compare(
|
||||
IntPredicate::SGE,
|
||||
exp,
|
||||
exp.get_type().const_zero(),
|
||||
"assert_int_pow_ge_0",
|
||||
);
|
||||
ctx.make_assert(
|
||||
generator,
|
||||
ge_zero,
|
||||
"0:ValueError",
|
||||
"integer power must be positive or zero",
|
||||
[None, None, None],
|
||||
ctx.current_loc,
|
||||
);
|
||||
// TODO: throw exception when exp < 0
|
||||
ctx.builder
|
||||
.build_call(pow_fun, &[base.into(), exp.into()], "call_int_pow")
|
||||
.try_as_basic_value()
|
||||
|
@ -75,7 +60,6 @@ pub fn integer_power<'ctx, 'a>(
|
|||
}
|
||||
|
||||
pub fn calculate_len_for_slice_range<'ctx, 'a>(
|
||||
generator: &mut dyn CodeGenerator,
|
||||
ctx: &mut CodeGenContext<'ctx, 'a>,
|
||||
start: IntValue<'ctx>,
|
||||
end: IntValue<'ctx>,
|
||||
|
@ -88,21 +72,7 @@ pub fn calculate_len_for_slice_range<'ctx, 'a>(
|
|||
ctx.module.add_function(SYMBOL, fn_t, None)
|
||||
});
|
||||
|
||||
// assert step != 0, throw exception if not
|
||||
let not_zero = ctx.builder.build_int_compare(
|
||||
IntPredicate::NE,
|
||||
step,
|
||||
step.get_type().const_zero(),
|
||||
"range_step_ne",
|
||||
);
|
||||
ctx.make_assert(
|
||||
generator,
|
||||
not_zero,
|
||||
"0:ValueError",
|
||||
"step must not be zero",
|
||||
[None, None, None],
|
||||
ctx.current_loc,
|
||||
);
|
||||
// TODO: assert step != 0, throw exception if not
|
||||
ctx.builder
|
||||
.build_call(len_func, &[start.into(), end.into(), step.into()], "calc_len")
|
||||
.try_as_basic_value()
|
||||
|
@ -159,6 +129,7 @@ pub fn handle_slice_indices<'a, 'ctx, G: CodeGenerator>(
|
|||
generator: &mut G,
|
||||
list: PointerValue<'ctx>,
|
||||
) -> Result<(IntValue<'ctx>, IntValue<'ctx>, IntValue<'ctx>), String> {
|
||||
// TODO: throw exception when step is 0
|
||||
let int32 = ctx.ctx.i32_type();
|
||||
let zero = int32.const_zero();
|
||||
let one = int32.const_int(1, false);
|
||||
|
@ -183,23 +154,8 @@ pub fn handle_slice_indices<'a, 'ctx, G: CodeGenerator>(
|
|||
let step = generator
|
||||
.gen_expr(ctx, step)?
|
||||
.unwrap()
|
||||
.to_basic_value_enum(ctx, generator, ctx.primitives.int32)?
|
||||
.to_basic_value_enum(ctx, generator)?
|
||||
.into_int_value();
|
||||
// assert step != 0, throw exception if not
|
||||
let not_zero = ctx.builder.build_int_compare(
|
||||
IntPredicate::NE,
|
||||
step,
|
||||
step.get_type().const_zero(),
|
||||
"range_step_ne",
|
||||
);
|
||||
ctx.make_assert(
|
||||
generator,
|
||||
not_zero,
|
||||
"0:ValueError",
|
||||
"slice step cannot be zero",
|
||||
[None, None, None],
|
||||
ctx.current_loc,
|
||||
);
|
||||
let len_id = ctx.builder.build_int_sub(length, one, "lenmin1");
|
||||
let neg = ctx.builder.build_int_compare(IntPredicate::SLT, step, zero, "step_is_neg");
|
||||
(
|
||||
|
@ -261,7 +217,7 @@ pub fn handle_slice_index_bound<'a, 'ctx, G: CodeGenerator>(
|
|||
ctx.module.add_function(SYMBOL, fn_t, None)
|
||||
});
|
||||
|
||||
let i = generator.gen_expr(ctx, i)?.unwrap().to_basic_value_enum(ctx, generator, i.custom.unwrap())?;
|
||||
let i = generator.gen_expr(ctx, i)?.unwrap().to_basic_value_enum(ctx, generator)?;
|
||||
Ok(ctx
|
||||
.builder
|
||||
.build_call(func, &[i.into(), length.into()], "bounded_ind")
|
||||
|
@ -275,15 +231,14 @@ pub fn handle_slice_index_bound<'a, 'ctx, G: CodeGenerator>(
|
|||
/// Order of tuples assign_idx and value_idx is ('start', 'end', 'step').
|
||||
/// Negative index should be handled before entering this function
|
||||
pub fn list_slice_assignment<'ctx, 'a>(
|
||||
generator: &mut dyn CodeGenerator,
|
||||
ctx: &mut CodeGenContext<'ctx, 'a>,
|
||||
size_ty: IntType<'ctx>,
|
||||
ty: BasicTypeEnum<'ctx>,
|
||||
dest_arr: PointerValue<'ctx>,
|
||||
dest_idx: (IntValue<'ctx>, IntValue<'ctx>, IntValue<'ctx>),
|
||||
src_arr: PointerValue<'ctx>,
|
||||
src_idx: (IntValue<'ctx>, IntValue<'ctx>, IntValue<'ctx>),
|
||||
) {
|
||||
let size_ty = generator.get_size_type(ctx.ctx);
|
||||
let int8_ptr = ctx.ctx.i8_type().ptr_type(AddressSpace::Generic);
|
||||
let int32 = ctx.ctx.i32_type();
|
||||
let (fun_symbol, elem_ptr_type) = ("__nac3_list_slice_assign_var_size", int8_ptr);
|
||||
|
@ -327,67 +282,8 @@ pub fn list_slice_assignment<'ctx, 'a>(
|
|||
let src_len = ctx.builder.build_int_truncate_or_bit_cast(src_len, int32, "srclen32");
|
||||
|
||||
// index in bound and positive should be done
|
||||
// assert if dest.step == 1 then len(src) <= len(dest) else len(src) == len(dest), and
|
||||
// TODO: assert if dest.step == 1 then len(src) <= len(dest) else len(src) == len(dest), and
|
||||
// throw exception if not satisfied
|
||||
let src_end = ctx.builder
|
||||
.build_select(
|
||||
ctx.builder.build_int_compare(
|
||||
inkwell::IntPredicate::SLT,
|
||||
src_idx.2,
|
||||
zero,
|
||||
"is_neg",
|
||||
),
|
||||
ctx.builder.build_int_sub(src_idx.1, one, "e_min_one"),
|
||||
ctx.builder.build_int_add(src_idx.1, one, "e_add_one"),
|
||||
"final_e",
|
||||
)
|
||||
.into_int_value();
|
||||
let dest_end = ctx.builder
|
||||
.build_select(
|
||||
ctx.builder.build_int_compare(
|
||||
inkwell::IntPredicate::SLT,
|
||||
dest_idx.2,
|
||||
zero,
|
||||
"is_neg",
|
||||
),
|
||||
ctx.builder.build_int_sub(dest_idx.1, one, "e_min_one"),
|
||||
ctx.builder.build_int_add(dest_idx.1, one, "e_add_one"),
|
||||
"final_e",
|
||||
)
|
||||
.into_int_value();
|
||||
let src_slice_len =
|
||||
calculate_len_for_slice_range(generator, ctx, src_idx.0, src_end, src_idx.2);
|
||||
let dest_slice_len =
|
||||
calculate_len_for_slice_range(generator, ctx, dest_idx.0, dest_end, dest_idx.2);
|
||||
let src_eq_dest = ctx.builder.build_int_compare(
|
||||
IntPredicate::EQ,
|
||||
src_slice_len,
|
||||
dest_slice_len,
|
||||
"slice_src_eq_dest",
|
||||
);
|
||||
let src_slt_dest = ctx.builder.build_int_compare(
|
||||
IntPredicate::SLT,
|
||||
src_slice_len,
|
||||
dest_slice_len,
|
||||
"slice_src_slt_dest",
|
||||
);
|
||||
let dest_step_eq_one = ctx.builder.build_int_compare(
|
||||
IntPredicate::EQ,
|
||||
dest_idx.2,
|
||||
dest_idx.2.get_type().const_int(1, false),
|
||||
"slice_dest_step_eq_one",
|
||||
);
|
||||
let cond_1 = ctx.builder.build_and(dest_step_eq_one, src_slt_dest, "slice_cond_1");
|
||||
let cond = ctx.builder.build_or(src_eq_dest, cond_1, "slice_cond");
|
||||
ctx.make_assert(
|
||||
generator,
|
||||
cond,
|
||||
"0:ValueError",
|
||||
"attempt to assign sequence of size {0} to slice of size {1} with step size {2}",
|
||||
[Some(src_slice_len), Some(dest_slice_len), Some(dest_idx.2)],
|
||||
ctx.current_loc,
|
||||
);
|
||||
|
||||
let new_len = {
|
||||
let args = vec![
|
||||
dest_idx.0.into(), // dest start idx
|
||||
|
|
|
@ -17,13 +17,10 @@ use inkwell::{
|
|||
module::Module,
|
||||
passes::{PassManager, PassManagerBuilder},
|
||||
types::{AnyType, BasicType, BasicTypeEnum},
|
||||
values::{BasicValueEnum, FunctionValue, PhiValue, PointerValue},
|
||||
debug_info::{
|
||||
DebugInfoBuilder, DICompileUnit, DISubprogram, AsDIScope, DIFlagsConstants, DIScope
|
||||
},
|
||||
values::{BasicValueEnum, FunctionValue, PhiValue, PointerValue}
|
||||
};
|
||||
use itertools::Itertools;
|
||||
use nac3parser::ast::{Stmt, StrRef, Location};
|
||||
use nac3parser::ast::{Stmt, StrRef};
|
||||
use parking_lot::{Condvar, Mutex};
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::sync::{
|
||||
|
@ -31,7 +28,6 @@ use std::sync::{
|
|||
Arc,
|
||||
};
|
||||
use std::thread;
|
||||
use lazy_static::lazy_static;
|
||||
|
||||
pub mod concrete_type;
|
||||
pub mod expr;
|
||||
|
@ -53,16 +49,9 @@ pub struct StaticValueStore {
|
|||
|
||||
pub type VarValue<'ctx> = (PointerValue<'ctx>, Option<Arc<dyn StaticValue + Send + Sync>>, i64);
|
||||
|
||||
lazy_static!(
|
||||
// HACK: The Mutex is a work-around for issue
|
||||
// https://git.m-labs.hk/M-Labs/nac3/issues/275
|
||||
static ref PASSES_INIT_LOCK: Mutex<AtomicBool> = Mutex::new(AtomicBool::new(true));
|
||||
);
|
||||
|
||||
pub struct CodeGenContext<'ctx, 'a> {
|
||||
pub ctx: &'ctx Context,
|
||||
pub builder: Builder<'ctx>,
|
||||
pub debug_info: (DebugInfoBuilder<'ctx>, DICompileUnit<'ctx>, DIScope<'ctx>),
|
||||
pub module: Module<'ctx>,
|
||||
pub top_level: &'a TopLevelContext,
|
||||
pub unifier: Unifier,
|
||||
|
@ -88,7 +77,6 @@ pub struct CodeGenContext<'ctx, 'a> {
|
|||
pub outer_catch_clauses:
|
||||
Option<(Vec<Option<BasicValueEnum<'ctx>>>, BasicBlock<'ctx>, PhiValue<'ctx>)>,
|
||||
pub need_sret: bool,
|
||||
pub current_loc: Location,
|
||||
}
|
||||
|
||||
impl<'ctx, 'a> CodeGenContext<'ctx, 'a> {
|
||||
|
@ -212,43 +200,25 @@ impl WorkerRegistry {
|
|||
fn worker_thread<G: CodeGenerator>(&self, generator: &mut G, f: Arc<WithCall>) {
|
||||
let context = Context::create();
|
||||
let mut builder = context.create_builder();
|
||||
let mut module = context.create_module(generator.get_name());
|
||||
|
||||
module.add_basic_value_flag(
|
||||
"Debug Info Version",
|
||||
inkwell::module::FlagBehavior::Warning,
|
||||
context.i32_type().const_int(3, false),
|
||||
);
|
||||
module.add_basic_value_flag(
|
||||
"Dwarf Version",
|
||||
inkwell::module::FlagBehavior::Warning,
|
||||
context.i32_type().const_int(4, false),
|
||||
);
|
||||
let module = context.create_module(generator.get_name());
|
||||
|
||||
let pass_builder = PassManagerBuilder::create();
|
||||
pass_builder.set_optimization_level(OptimizationLevel::Default);
|
||||
let passes = PassManager::create(&module);
|
||||
|
||||
// HACK: This critical section is a work-around for issue
|
||||
// https://git.m-labs.hk/M-Labs/nac3/issues/275
|
||||
{
|
||||
let _data = PASSES_INIT_LOCK.lock();
|
||||
let pass_builder = PassManagerBuilder::create();
|
||||
pass_builder.set_optimization_level(OptimizationLevel::Default);
|
||||
pass_builder.populate_function_pass_manager(&passes);
|
||||
}
|
||||
pass_builder.populate_function_pass_manager(&passes);
|
||||
|
||||
let mut errors = HashSet::new();
|
||||
while let Some(task) = self.receiver.recv().unwrap() {
|
||||
match gen_func(&context, generator, self, builder, module, task) {
|
||||
let tmp_module = context.create_module("tmp");
|
||||
match gen_func(&context, generator, self, builder, tmp_module, task) {
|
||||
Ok(result) => {
|
||||
builder = result.0;
|
||||
passes.run_on(&result.2);
|
||||
module = result.1;
|
||||
module.link_in_module(result.1).unwrap();
|
||||
}
|
||||
Err((old_builder, e)) => {
|
||||
builder = old_builder;
|
||||
errors.insert(e);
|
||||
// create a new empty module just to continue codegen and collect errors
|
||||
module = context.create_module(&format!("{}_recover", generator.get_name()));
|
||||
}
|
||||
}
|
||||
*self.task_count.lock() -= 1;
|
||||
|
@ -261,7 +231,7 @@ impl WorkerRegistry {
|
|||
let result = module.verify();
|
||||
if let Err(err) = result {
|
||||
println!("{}", module.print_to_string().to_str().unwrap());
|
||||
println!("{}", err.to_string());
|
||||
println!("{}", err);
|
||||
panic!()
|
||||
}
|
||||
f.run(&module);
|
||||
|
@ -285,7 +255,6 @@ pub struct CodeGenTask {
|
|||
|
||||
fn get_llvm_type<'ctx>(
|
||||
ctx: &'ctx Context,
|
||||
module: &Module<'ctx>,
|
||||
generator: &mut dyn CodeGenerator,
|
||||
unifier: &mut Unifier,
|
||||
top_level: &TopLevelContext,
|
||||
|
@ -310,7 +279,6 @@ fn get_llvm_type<'ctx>(
|
|||
) if *obj_id == *opt_id => {
|
||||
return get_llvm_type(
|
||||
ctx,
|
||||
module,
|
||||
generator,
|
||||
unifier,
|
||||
top_level,
|
||||
|
@ -327,37 +295,27 @@ fn get_llvm_type<'ctx>(
|
|||
// a struct with fields in the order of declaration
|
||||
let top_level_defs = top_level.definitions.read();
|
||||
let definition = top_level_defs.get(obj_id.0).unwrap();
|
||||
let ty = if let TopLevelDef::Class { fields: fields_list, .. } =
|
||||
let ty = if let TopLevelDef::Class { name, fields: fields_list, .. } =
|
||||
&*definition.read()
|
||||
{
|
||||
let name = unifier.stringify(ty);
|
||||
match module.get_struct_type(&name) {
|
||||
Some(t) => t.ptr_type(AddressSpace::Generic).into(),
|
||||
None => {
|
||||
let struct_type = ctx.opaque_struct_type(&name);
|
||||
type_cache.insert(
|
||||
unifier.get_representative(ty),
|
||||
struct_type.ptr_type(AddressSpace::Generic).into()
|
||||
);
|
||||
let fields = fields_list
|
||||
.iter()
|
||||
.map(|f| {
|
||||
get_llvm_type(
|
||||
ctx,
|
||||
module,
|
||||
generator,
|
||||
unifier,
|
||||
top_level,
|
||||
type_cache,
|
||||
primitives,
|
||||
fields[&f.0].0,
|
||||
)
|
||||
})
|
||||
.collect_vec();
|
||||
struct_type.set_body(&fields, false);
|
||||
struct_type.ptr_type(AddressSpace::Generic).into()
|
||||
}
|
||||
}
|
||||
let struct_type = ctx.opaque_struct_type(&name.to_string());
|
||||
type_cache.insert(unifier.get_representative(ty), struct_type.ptr_type(AddressSpace::Generic).into());
|
||||
let fields = fields_list
|
||||
.iter()
|
||||
.map(|f| {
|
||||
get_llvm_type(
|
||||
ctx,
|
||||
generator,
|
||||
unifier,
|
||||
top_level,
|
||||
type_cache,
|
||||
primitives,
|
||||
fields[&f.0].0,
|
||||
)
|
||||
})
|
||||
.collect_vec();
|
||||
struct_type.set_body(&fields, false);
|
||||
struct_type.ptr_type(AddressSpace::Generic).into()
|
||||
} else {
|
||||
unreachable!()
|
||||
};
|
||||
|
@ -367,19 +325,14 @@ fn get_llvm_type<'ctx>(
|
|||
// a struct with fields in the order present in the tuple
|
||||
let fields = ty
|
||||
.iter()
|
||||
.map(|ty| {
|
||||
get_llvm_type(
|
||||
ctx, module, generator, unifier, top_level, type_cache, primitives, *ty,
|
||||
)
|
||||
})
|
||||
.map(|ty| get_llvm_type(ctx, generator, unifier, top_level, type_cache, primitives, *ty))
|
||||
.collect_vec();
|
||||
ctx.struct_type(&fields, false).into()
|
||||
}
|
||||
TList { ty } => {
|
||||
// a struct with an integer and a pointer to an array
|
||||
let element_type = get_llvm_type(
|
||||
ctx, module, generator, unifier, top_level, type_cache, primitives, *ty,
|
||||
);
|
||||
let element_type =
|
||||
get_llvm_type(ctx, generator, unifier, top_level, type_cache, primitives, *ty);
|
||||
let fields = [
|
||||
element_type.ptr_type(AddressSpace::Generic).into(),
|
||||
generator.get_size_type(ctx).into(),
|
||||
|
@ -465,40 +418,28 @@ pub fn gen_func_impl<'ctx, G: CodeGenerator, F: FnOnce(&mut G, &mut CodeGenConte
|
|||
(primitives.float, context.f64_type().into()),
|
||||
(primitives.bool, context.bool_type().into()),
|
||||
(primitives.str, {
|
||||
let name = "str";
|
||||
match module.get_struct_type(name) {
|
||||
None => {
|
||||
let str_type = context.opaque_struct_type("str");
|
||||
let fields = [
|
||||
context.i8_type().ptr_type(AddressSpace::Generic).into(),
|
||||
generator.get_size_type(context).into(),
|
||||
];
|
||||
str_type.set_body(&fields, false);
|
||||
str_type.into()
|
||||
}
|
||||
Some(t) => t.as_basic_type_enum()
|
||||
}
|
||||
let str_type = context.opaque_struct_type("str");
|
||||
let fields = [
|
||||
context.i8_type().ptr_type(AddressSpace::Generic).into(),
|
||||
generator.get_size_type(context).into(),
|
||||
];
|
||||
str_type.set_body(&fields, false);
|
||||
str_type.into()
|
||||
}),
|
||||
(primitives.range, context.i32_type().array_type(3).ptr_type(AddressSpace::Generic).into()),
|
||||
(primitives.exception, {
|
||||
let name = "Exception";
|
||||
match module.get_struct_type(name) {
|
||||
Some(t) => t.ptr_type(AddressSpace::Generic).as_basic_type_enum(),
|
||||
None => {
|
||||
let exception = context.opaque_struct_type("Exception");
|
||||
let int32 = context.i32_type().into();
|
||||
let int64 = context.i64_type().into();
|
||||
let str_ty = module.get_struct_type("str").unwrap().as_basic_type_enum();
|
||||
let fields = [int32, str_ty, int32, int32, str_ty, str_ty, int64, int64, int64];
|
||||
exception.set_body(&fields, false);
|
||||
exception.ptr_type(AddressSpace::Generic).as_basic_type_enum()
|
||||
}
|
||||
}
|
||||
})
|
||||
]
|
||||
.iter()
|
||||
.cloned()
|
||||
.collect();
|
||||
type_cache.insert(primitives.exception, {
|
||||
let exception = context.opaque_struct_type("Exception");
|
||||
let int32 = context.i32_type().into();
|
||||
let int64 = context.i64_type().into();
|
||||
let str_ty = *type_cache.get(&primitives.str).unwrap();
|
||||
let fields = [int32, str_ty, int32, int32, str_ty, str_ty, int64, int64, int64];
|
||||
exception.set_body(&fields, false);
|
||||
exception.ptr_type(AddressSpace::Generic).into()
|
||||
});
|
||||
// NOTE: special handling of option cannot use this type cache since it contains type var,
|
||||
// handled inside get_llvm_type instead
|
||||
|
||||
|
@ -521,7 +462,7 @@ pub fn gen_func_impl<'ctx, G: CodeGenerator, F: FnOnce(&mut G, &mut CodeGenConte
|
|||
let ret_type = if unifier.unioned(ret, primitives.none) {
|
||||
None
|
||||
} else {
|
||||
Some(get_llvm_type(context, &module, generator, &mut unifier, top_level_ctx.as_ref(), &mut type_cache, &primitives, ret))
|
||||
Some(get_llvm_type(context, generator, &mut unifier, top_level_ctx.as_ref(), &mut type_cache, &primitives, ret))
|
||||
};
|
||||
|
||||
let has_sret = ret_type.map_or(false, |ty| need_sret(context, ty));
|
||||
|
@ -530,7 +471,6 @@ pub fn gen_func_impl<'ctx, G: CodeGenerator, F: FnOnce(&mut G, &mut CodeGenConte
|
|||
.map(|arg| {
|
||||
get_llvm_type(
|
||||
context,
|
||||
&module,
|
||||
generator,
|
||||
&mut unifier,
|
||||
top_level_ctx.as_ref(),
|
||||
|
@ -579,7 +519,6 @@ pub fn gen_func_impl<'ctx, G: CodeGenerator, F: FnOnce(&mut G, &mut CodeGenConte
|
|||
let alloca = builder.build_alloca(
|
||||
get_llvm_type(
|
||||
context,
|
||||
&module,
|
||||
generator,
|
||||
&mut unifier,
|
||||
top_level_ctx.as_ref(),
|
||||
|
@ -611,58 +550,6 @@ pub fn gen_func_impl<'ctx, G: CodeGenerator, F: FnOnce(&mut G, &mut CodeGenConte
|
|||
builder.build_unconditional_branch(body_bb);
|
||||
builder.position_at_end(body_bb);
|
||||
|
||||
let (dibuilder, compile_unit) = module.create_debug_info_builder(
|
||||
/* allow_unresolved */ true,
|
||||
/* language */ inkwell::debug_info::DWARFSourceLanguage::Python,
|
||||
/* filename */
|
||||
&task
|
||||
.body
|
||||
.get(0)
|
||||
.map_or_else(
|
||||
|| "<nac3_internal>".to_string(),
|
||||
|f| f.location.file.0.to_string(),
|
||||
),
|
||||
/* directory */ "",
|
||||
/* producer */ "NAC3",
|
||||
/* is_optimized */ true,
|
||||
/* compiler command line flags */ "",
|
||||
/* runtime_ver */ 0,
|
||||
/* split_name */ "",
|
||||
/* kind */ inkwell::debug_info::DWARFEmissionKind::Full,
|
||||
/* dwo_id */ 0,
|
||||
/* split_debug_inling */ true,
|
||||
/* debug_info_for_profiling */ false,
|
||||
/* sysroot */ "",
|
||||
/* sdk */ "",
|
||||
);
|
||||
let subroutine_type = dibuilder.create_subroutine_type(
|
||||
compile_unit.get_file(),
|
||||
Some(
|
||||
dibuilder
|
||||
.create_basic_type("_", 0_u64, 0x00, inkwell::debug_info::DIFlags::PUBLIC)
|
||||
.unwrap()
|
||||
.as_type(),
|
||||
),
|
||||
&[],
|
||||
inkwell::debug_info::DIFlags::PUBLIC,
|
||||
);
|
||||
let (row, col) =
|
||||
task.body.get(0).map_or_else(|| (0, 0), |b| (b.location.row, b.location.column));
|
||||
let func_scope: DISubprogram<'_> = dibuilder.create_function(
|
||||
/* scope */ compile_unit.as_debug_info_scope(),
|
||||
/* func name */ symbol,
|
||||
/* linkage_name */ None,
|
||||
/* file */ compile_unit.get_file(),
|
||||
/* line_no */ row as u32,
|
||||
/* DIType */ subroutine_type,
|
||||
/* is_local_to_unit */ false,
|
||||
/* is_definition */ true,
|
||||
/* scope_line */ row as u32,
|
||||
/* flags */ inkwell::debug_info::DIFlags::PUBLIC,
|
||||
/* is_optimized */ true,
|
||||
);
|
||||
fn_val.set_subprogram(func_scope);
|
||||
|
||||
let mut code_gen_context = CodeGenContext {
|
||||
ctx: context,
|
||||
resolver: task.resolver,
|
||||
|
@ -683,20 +570,9 @@ pub fn gen_func_impl<'ctx, G: CodeGenerator, F: FnOnce(&mut G, &mut CodeGenConte
|
|||
module,
|
||||
unifier,
|
||||
static_value_store,
|
||||
need_sret: has_sret,
|
||||
current_loc: Default::default(),
|
||||
debug_info: (dibuilder, compile_unit, func_scope.as_debug_info_scope()),
|
||||
need_sret: has_sret
|
||||
};
|
||||
|
||||
let loc = code_gen_context.debug_info.0.create_debug_location(
|
||||
context,
|
||||
row as u32,
|
||||
col as u32,
|
||||
func_scope.as_debug_info_scope(),
|
||||
None
|
||||
);
|
||||
code_gen_context.builder.set_current_debug_location(context, loc);
|
||||
|
||||
let result = codegen_function(generator, &mut code_gen_context);
|
||||
|
||||
// after static analysis, only void functions can have no return at the end.
|
||||
|
@ -704,9 +580,6 @@ pub fn gen_func_impl<'ctx, G: CodeGenerator, F: FnOnce(&mut G, &mut CodeGenConte
|
|||
code_gen_context.builder.build_return(None);
|
||||
}
|
||||
|
||||
code_gen_context.builder.unset_current_debug_location();
|
||||
code_gen_context.debug_info.0.finalize();
|
||||
|
||||
let CodeGenContext { builder, module, .. } = code_gen_context;
|
||||
if let Err(e) = result {
|
||||
return Err((builder, e));
|
||||
|
|
|
@ -42,26 +42,19 @@ pub fn gen_store_target<'ctx, 'a, G: CodeGenerator>(
|
|||
// very similar to gen_expr, but we don't do an extra load at the end
|
||||
// and we flatten nested tuples
|
||||
Ok(match &pattern.node {
|
||||
ExprKind::Name { id, .. } => match ctx.var_assignment.get(id) {
|
||||
None => {
|
||||
let ptr_ty = ctx.get_llvm_type(generator, pattern.custom.unwrap());
|
||||
let ptr = generator.gen_var_alloc(ctx, ptr_ty)?;
|
||||
ctx.var_assignment.insert(*id, (ptr, None, 0));
|
||||
ptr
|
||||
}
|
||||
Some(v) => {
|
||||
let (ptr, counter) = (v.0, v.2);
|
||||
ctx.var_assignment.insert(*id, (ptr, None, counter));
|
||||
ptr
|
||||
}
|
||||
ExprKind::Name { id, .. } => {
|
||||
ctx.var_assignment.get(id).map(|v| Ok(v.0) as Result<_, String>).unwrap_or_else(
|
||||
|| {
|
||||
let ptr_ty = ctx.get_llvm_type(generator, pattern.custom.unwrap());
|
||||
let ptr = generator.gen_var_alloc(ctx, ptr_ty)?;
|
||||
ctx.var_assignment.insert(*id, (ptr, None, 0));
|
||||
Ok(ptr)
|
||||
},
|
||||
)?
|
||||
}
|
||||
ExprKind::Attribute { value, attr, .. } => {
|
||||
let index = ctx.get_attr_index(value.custom.unwrap(), *attr);
|
||||
let val = generator.gen_expr(ctx, value)?.unwrap().to_basic_value_enum(
|
||||
ctx,
|
||||
generator,
|
||||
value.custom.unwrap(),
|
||||
)?;
|
||||
let val = generator.gen_expr(ctx, value)?.unwrap().to_basic_value_enum(ctx, generator)?;
|
||||
let ptr = if let BasicValueEnum::PointerValue(v) = val {
|
||||
v
|
||||
} else {
|
||||
|
@ -79,58 +72,17 @@ pub fn gen_store_target<'ctx, 'a, G: CodeGenerator>(
|
|||
}
|
||||
}
|
||||
ExprKind::Subscript { value, slice, .. } => {
|
||||
assert!(matches!(
|
||||
ctx.unifier.get_ty_immutable(value.custom.unwrap()).as_ref(),
|
||||
TypeEnum::TList { .. },
|
||||
));
|
||||
let i32_type = ctx.ctx.i32_type();
|
||||
let zero = i32_type.const_zero();
|
||||
let v = generator
|
||||
.gen_expr(ctx, value)?
|
||||
.unwrap()
|
||||
.to_basic_value_enum(ctx, generator, value.custom.unwrap())?
|
||||
.to_basic_value_enum(ctx, generator)?
|
||||
.into_pointer_value();
|
||||
let len = ctx
|
||||
.build_gep_and_load(v, &[zero, i32_type.const_int(1, false)])
|
||||
.into_int_value();
|
||||
let raw_index = generator
|
||||
let index = generator
|
||||
.gen_expr(ctx, slice)?
|
||||
.unwrap()
|
||||
.to_basic_value_enum(ctx, generator, slice.custom.unwrap())?
|
||||
.to_basic_value_enum(ctx, generator)?
|
||||
.into_int_value();
|
||||
let raw_index = ctx.builder.build_int_s_extend(
|
||||
raw_index,
|
||||
generator.get_size_type(ctx.ctx),
|
||||
"sext",
|
||||
);
|
||||
// handle negative index
|
||||
let is_negative = ctx.builder.build_int_compare(
|
||||
inkwell::IntPredicate::SLT,
|
||||
raw_index,
|
||||
generator.get_size_type(ctx.ctx).const_zero(),
|
||||
"is_neg",
|
||||
);
|
||||
let adjusted = ctx.builder.build_int_add(raw_index, len, "adjusted");
|
||||
let index = ctx
|
||||
.builder
|
||||
.build_select(is_negative, adjusted, raw_index, "index")
|
||||
.into_int_value();
|
||||
// unsigned less than is enough, because negative index after adjustment is
|
||||
// bigger than the length (for unsigned cmp)
|
||||
let bound_check = ctx.builder.build_int_compare(
|
||||
inkwell::IntPredicate::ULT,
|
||||
index,
|
||||
len,
|
||||
"inbound",
|
||||
);
|
||||
ctx.make_assert(
|
||||
generator,
|
||||
bound_check,
|
||||
"0:IndexError",
|
||||
"index {0} out of bounds 0:{1}",
|
||||
[Some(raw_index), Some(len), None],
|
||||
slice.location,
|
||||
);
|
||||
unsafe {
|
||||
let arr_ptr = ctx
|
||||
.build_gep_and_load(v, &[i32_type.const_zero(), i32_type.const_zero()])
|
||||
|
@ -150,9 +102,7 @@ pub fn gen_assign<'ctx, 'a, G: CodeGenerator>(
|
|||
) -> Result<(), String> {
|
||||
match &target.node {
|
||||
ExprKind::Tuple { elts, .. } => {
|
||||
if let BasicValueEnum::StructValue(v) =
|
||||
value.to_basic_value_enum(ctx, generator, target.custom.unwrap())?
|
||||
{
|
||||
if let BasicValueEnum::StructValue(v) = value.to_basic_value_enum(ctx, generator)? {
|
||||
for (i, elt) in elts.iter().enumerate() {
|
||||
let v = ctx
|
||||
.builder
|
||||
|
@ -171,13 +121,11 @@ pub fn gen_assign<'ctx, 'a, G: CodeGenerator>(
|
|||
let ls = generator
|
||||
.gen_expr(ctx, ls)?
|
||||
.unwrap()
|
||||
.to_basic_value_enum(ctx, generator, ls.custom.unwrap())?
|
||||
.to_basic_value_enum(ctx, generator)?
|
||||
.into_pointer_value();
|
||||
let (start, end, step) =
|
||||
handle_slice_indices(lower, upper, step, ctx, generator, ls)?;
|
||||
let value = value
|
||||
.to_basic_value_enum(ctx, generator, target.custom.unwrap())?
|
||||
.into_pointer_value();
|
||||
let value = value.to_basic_value_enum(ctx, generator)?.into_pointer_value();
|
||||
let ty =
|
||||
if let TypeEnum::TList { ty } = &*ctx.unifier.get_ty(target.custom.unwrap()) {
|
||||
ctx.get_llvm_type(generator, *ty)
|
||||
|
@ -185,7 +133,15 @@ pub fn gen_assign<'ctx, 'a, G: CodeGenerator>(
|
|||
unreachable!()
|
||||
};
|
||||
let src_ind = handle_slice_indices(&None, &None, &None, ctx, generator, value)?;
|
||||
list_slice_assignment(generator, ctx, ty, ls, (start, end, step), value, src_ind)
|
||||
list_slice_assignment(
|
||||
ctx,
|
||||
generator.get_size_type(ctx.ctx),
|
||||
ty,
|
||||
ls,
|
||||
(start, end, step),
|
||||
value,
|
||||
src_ind,
|
||||
)
|
||||
} else {
|
||||
unreachable!()
|
||||
}
|
||||
|
@ -199,7 +155,7 @@ pub fn gen_assign<'ctx, 'a, G: CodeGenerator>(
|
|||
*static_value = Some(s.clone());
|
||||
}
|
||||
}
|
||||
let val = value.to_basic_value_enum(ctx, generator, target.custom.unwrap())?;
|
||||
let val = value.to_basic_value_enum(ctx, generator)?;
|
||||
ctx.builder.build_store(ptr, val);
|
||||
}
|
||||
};
|
||||
|
@ -229,11 +185,7 @@ pub fn gen_for<'ctx, 'a, G: CodeGenerator>(
|
|||
// store loop bb information and restore it later
|
||||
let loop_bb = ctx.loop_target.replace((test_bb, cont_bb));
|
||||
|
||||
let iter_val = generator.gen_expr(ctx, iter)?.unwrap().to_basic_value_enum(
|
||||
ctx,
|
||||
generator,
|
||||
iter.custom.unwrap(),
|
||||
)?;
|
||||
let iter_val = generator.gen_expr(ctx, iter)?.unwrap().to_basic_value_enum(ctx, generator)?;
|
||||
if ctx.unifier.unioned(iter.custom.unwrap(), ctx.primitives.range) {
|
||||
// setup
|
||||
let iter_val = iter_val.into_pointer_value();
|
||||
|
@ -346,11 +298,7 @@ pub fn gen_while<'ctx, 'a, G: CodeGenerator>(
|
|||
let loop_bb = ctx.loop_target.replace((test_bb, cont_bb));
|
||||
ctx.builder.build_unconditional_branch(test_bb);
|
||||
ctx.builder.position_at_end(test_bb);
|
||||
let test = generator.gen_expr(ctx, test)?.unwrap().to_basic_value_enum(
|
||||
ctx,
|
||||
generator,
|
||||
test.custom.unwrap(),
|
||||
)?;
|
||||
let test = generator.gen_expr(ctx, test)?.unwrap().to_basic_value_enum(ctx, generator)?;
|
||||
if let BasicValueEnum::IntValue(test) = test {
|
||||
ctx.builder.build_conditional_branch(test, body_bb, orelse_bb);
|
||||
} else {
|
||||
|
@ -411,11 +359,7 @@ pub fn gen_if<'ctx, 'a, G: CodeGenerator>(
|
|||
};
|
||||
ctx.builder.build_unconditional_branch(test_bb);
|
||||
ctx.builder.position_at_end(test_bb);
|
||||
let test = generator.gen_expr(ctx, test)?.unwrap().to_basic_value_enum(
|
||||
ctx,
|
||||
generator,
|
||||
test.custom.unwrap(),
|
||||
)?;
|
||||
let test = generator.gen_expr(ctx, test)?.unwrap().to_basic_value_enum(ctx, generator)?;
|
||||
if let BasicValueEnum::IntValue(test) = test {
|
||||
ctx.builder.build_conditional_branch(test, body_bb, orelse_bb);
|
||||
} else {
|
||||
|
@ -478,8 +422,8 @@ pub fn final_proxy<'ctx, 'a>(
|
|||
final_paths.push(block);
|
||||
}
|
||||
|
||||
pub fn get_builtins<'ctx, 'a>(
|
||||
generator: &mut dyn CodeGenerator,
|
||||
pub fn get_builtins<'ctx, 'a, G: CodeGenerator>(
|
||||
generator: &mut G,
|
||||
ctx: &mut CodeGenContext<'ctx, 'a>,
|
||||
symbol: &str,
|
||||
) -> FunctionValue<'ctx> {
|
||||
|
@ -497,7 +441,7 @@ pub fn get_builtins<'ctx, 'a>(
|
|||
if symbol == "__nac3_raise" || symbol == "__nac3_resume" {
|
||||
fun.add_attribute(
|
||||
AttributeLoc::Function,
|
||||
ctx.ctx.create_enum_attribute(Attribute::get_named_enum_kind_id("noreturn"), 0),
|
||||
ctx.ctx.create_enum_attribute(Attribute::get_named_enum_kind_id("noreturn"), 1),
|
||||
);
|
||||
}
|
||||
fun
|
||||
|
@ -512,7 +456,7 @@ pub fn exn_constructor<'ctx, 'a>(
|
|||
generator: &mut dyn CodeGenerator,
|
||||
) -> Result<Option<BasicValueEnum<'ctx>>, String> {
|
||||
let (zelf_ty, zelf) = obj.unwrap();
|
||||
let zelf = zelf.to_basic_value_enum(ctx, generator, zelf_ty)?.into_pointer_value();
|
||||
let zelf = zelf.to_basic_value_enum(ctx, generator)?.into_pointer_value();
|
||||
let int32 = ctx.ctx.i32_type();
|
||||
let zero = int32.const_zero();
|
||||
let zelf_id = {
|
||||
|
@ -535,14 +479,14 @@ pub fn exn_constructor<'ctx, 'a>(
|
|||
let ptr =
|
||||
ctx.builder.build_in_bounds_gep(zelf, &[zero, int32.const_int(5, false)], "exn.msg");
|
||||
let msg = if !args.is_empty() {
|
||||
args.remove(0).1.to_basic_value_enum(ctx, generator, ctx.primitives.str)?
|
||||
args.remove(0).1.to_basic_value_enum(ctx, generator)?
|
||||
} else {
|
||||
empty_string
|
||||
};
|
||||
ctx.builder.build_store(ptr, msg);
|
||||
for i in [6, 7, 8].iter() {
|
||||
let value = if !args.is_empty() {
|
||||
args.remove(0).1.to_basic_value_enum(ctx, generator, ctx.primitives.int64)?
|
||||
args.remove(0).1.to_basic_value_enum(ctx, generator)?
|
||||
} else {
|
||||
ctx.ctx.i64_type().const_zero().into()
|
||||
};
|
||||
|
@ -575,8 +519,8 @@ pub fn exn_constructor<'ctx, 'a>(
|
|||
Ok(Some(zelf.into()))
|
||||
}
|
||||
|
||||
pub fn gen_raise<'ctx, 'a>(
|
||||
generator: &mut dyn CodeGenerator,
|
||||
pub fn gen_raise<'ctx, 'a, G: CodeGenerator>(
|
||||
generator: &mut G,
|
||||
ctx: &mut CodeGenContext<'ctx, 'a>,
|
||||
exception: Option<&BasicValueEnum<'ctx>>,
|
||||
loc: Location,
|
||||
|
@ -964,11 +908,7 @@ pub fn gen_return<'ctx, 'a, G: CodeGenerator>(
|
|||
) -> Result<(), String> {
|
||||
let value = value
|
||||
.as_ref()
|
||||
.map(|v_expr| {
|
||||
generator.gen_expr(ctx, v_expr).and_then(|v| {
|
||||
v.unwrap().to_basic_value_enum(ctx, generator, v_expr.custom.unwrap())
|
||||
})
|
||||
})
|
||||
.map(|v| generator.gen_expr(ctx, v).and_then(|v| v.unwrap().to_basic_value_enum(ctx, generator)))
|
||||
.transpose()?;
|
||||
if let Some(return_target) = ctx.return_target {
|
||||
if let Some(value) = value {
|
||||
|
@ -991,17 +931,6 @@ pub fn gen_stmt<'ctx, 'a, G: CodeGenerator>(
|
|||
ctx: &mut CodeGenContext<'ctx, 'a>,
|
||||
stmt: &Stmt<Option<Type>>,
|
||||
) -> Result<(), String> {
|
||||
ctx.current_loc = stmt.location;
|
||||
|
||||
let loc = ctx.debug_info.0.create_debug_location(
|
||||
ctx.ctx,
|
||||
ctx.current_loc.row as u32,
|
||||
ctx.current_loc.column as u32,
|
||||
ctx.debug_info.2,
|
||||
None,
|
||||
);
|
||||
ctx.builder.set_current_debug_location(ctx.ctx, loc);
|
||||
|
||||
match &stmt.node {
|
||||
StmtKind::Pass { .. } => {}
|
||||
StmtKind::Expr { value, .. } => {
|
||||
|
@ -1033,34 +962,26 @@ pub fn gen_stmt<'ctx, 'a, G: CodeGenerator>(
|
|||
StmtKind::For { .. } => generator.gen_for(ctx, stmt)?,
|
||||
StmtKind::With { .. } => generator.gen_with(ctx, stmt)?,
|
||||
StmtKind::AugAssign { target, op, value, .. } => {
|
||||
let value = gen_binop_expr(generator, ctx, target, op, value, stmt.location, true)?;
|
||||
generator.gen_assign(ctx, target, value.unwrap())?;
|
||||
let value = gen_binop_expr(generator, ctx, target, op, value)?;
|
||||
generator.gen_assign(ctx, target, value)?;
|
||||
}
|
||||
StmtKind::Try { .. } => gen_try(generator, ctx, stmt)?,
|
||||
StmtKind::Raise { exc, .. } => {
|
||||
if let Some(exc) = exc {
|
||||
let exc = generator.gen_expr(ctx, exc)?.unwrap().to_basic_value_enum(
|
||||
ctx,
|
||||
generator,
|
||||
exc.custom.unwrap(),
|
||||
)?;
|
||||
let exc =
|
||||
generator.gen_expr(ctx, exc)?.unwrap().to_basic_value_enum(ctx, generator)?;
|
||||
gen_raise(generator, ctx, Some(&exc), stmt.location);
|
||||
} else {
|
||||
gen_raise(generator, ctx, None, stmt.location);
|
||||
}
|
||||
}
|
||||
StmtKind::Assert { test, msg, .. } => {
|
||||
let test = generator.gen_expr(ctx, test)?.unwrap().to_basic_value_enum(
|
||||
ctx,
|
||||
generator,
|
||||
test.custom.unwrap(),
|
||||
)?;
|
||||
let test =
|
||||
generator.gen_expr(ctx, test)?.unwrap().to_basic_value_enum(ctx, generator)?;
|
||||
let err_msg = match msg {
|
||||
Some(msg) => generator.gen_expr(ctx, msg)?.unwrap().to_basic_value_enum(
|
||||
ctx,
|
||||
generator,
|
||||
msg.custom.unwrap(),
|
||||
)?,
|
||||
Some(msg) => {
|
||||
generator.gen_expr(ctx, msg)?.unwrap().to_basic_value_enum(ctx, generator)?
|
||||
}
|
||||
None => ctx.gen_string(generator, ""),
|
||||
};
|
||||
ctx.make_assert_impl(
|
||||
|
|
|
@ -181,42 +181,23 @@ fn test_primitives() {
|
|||
; ModuleID = 'test'
|
||||
source_filename = \"test\"
|
||||
|
||||
define i32 @testing(i32 %0, i32 %1) !dbg !4 {
|
||||
define i32 @testing(i32 %0, i32 %1) {
|
||||
init:
|
||||
%add = add i32 %0, %1, !dbg !9
|
||||
%cmp = icmp eq i32 %add, 1, !dbg !10
|
||||
br i1 %cmp, label %then, label %else, !dbg !10
|
||||
%add = add i32 %0, %1
|
||||
%cmp = icmp eq i32 %add, 1
|
||||
br i1 %cmp, label %then, label %else
|
||||
|
||||
then: ; preds = %init
|
||||
br label %cont, !dbg !11
|
||||
br label %cont
|
||||
|
||||
else: ; preds = %init
|
||||
br label %cont, !dbg !12
|
||||
br label %cont
|
||||
|
||||
cont: ; preds = %else, %then
|
||||
%if_exp_result.0 = phi i32 [ %0, %then ], [ 0, %else ], !dbg !13
|
||||
ret i32 %if_exp_result.0, !dbg !14
|
||||
%if_exp_result.0 = phi i32 [ %0, %then ], [ 0, %else ]
|
||||
ret i32 %if_exp_result.0
|
||||
}
|
||||
|
||||
!llvm.module.flags = !{!0, !1}
|
||||
!llvm.dbg.cu = !{!2}
|
||||
|
||||
!0 = !{i32 2, !\"Debug Info Version\", i32 3}
|
||||
!1 = !{i32 2, !\"Dwarf Version\", i32 4}
|
||||
!2 = distinct !DICompileUnit(language: DW_LANG_Python, file: !3, producer: \"NAC3\", isOptimized: true, runtimeVersion: 0, emissionKind: FullDebug)
|
||||
!3 = !DIFile(filename: \"unknown\", directory: \"\")
|
||||
!4 = distinct !DISubprogram(name: \"testing\", linkageName: \"testing\", scope: null, file: !3, line: 1, type: !5, scopeLine: 1, flags: DIFlagPublic, spFlags: DISPFlagDefinition | DISPFlagOptimized, unit: !2, retainedNodes: !8)
|
||||
!5 = !DISubroutineType(flags: DIFlagPublic, types: !6)
|
||||
!6 = !{!7}
|
||||
!7 = !DIBasicType(name: \"_\", flags: DIFlagPublic)
|
||||
!8 = !{}
|
||||
!9 = !DILocation(line: 1, column: 9, scope: !4)
|
||||
!10 = !DILocation(line: 2, column: 15, scope: !4)
|
||||
!11 = !DILocation(line: 2, column: 5, scope: !4)
|
||||
!12 = !DILocation(line: 2, column: 22, scope: !4)
|
||||
!13 = !DILocation(line: 0, scope: !4)
|
||||
!14 = !DILocation(line: 3, column: 8, scope: !4)
|
||||
"}
|
||||
"}
|
||||
.trim();
|
||||
assert_eq!(expected, module.print_to_string().to_str().unwrap().trim());
|
||||
})));
|
||||
|
@ -361,37 +342,19 @@ fn test_simple_call() {
|
|||
; ModuleID = 'test'
|
||||
source_filename = \"test\"
|
||||
|
||||
define i32 @testing(i32 %0) !dbg !5 {
|
||||
define i32 @testing(i32 %0) {
|
||||
init:
|
||||
%call = call i32 @foo.0(i32 %0), !dbg !10
|
||||
%mul = mul i32 %call, 2, !dbg !11
|
||||
ret i32 %mul, !dbg !11
|
||||
%call = call i32 @foo.0(i32 %0)
|
||||
%mul = mul i32 %call, 2
|
||||
ret i32 %mul
|
||||
}
|
||||
|
||||
define i32 @foo.0(i32 %0) !dbg !12 {
|
||||
|
||||
define i32 @foo.0(i32 %0) {
|
||||
init:
|
||||
%add = add i32 %0, 1, !dbg !13
|
||||
ret i32 %add, !dbg !13
|
||||
%add = add i32 %0, 1
|
||||
ret i32 %add
|
||||
}
|
||||
|
||||
!llvm.module.flags = !{!0, !1}
|
||||
!llvm.dbg.cu = !{!2, !4}
|
||||
|
||||
!0 = !{i32 2, !\"Debug Info Version\", i32 3}
|
||||
!1 = !{i32 2, !\"Dwarf Version\", i32 4}
|
||||
!2 = distinct !DICompileUnit(language: DW_LANG_Python, file: !3, producer: \"NAC3\", isOptimized: true, runtimeVersion: 0, emissionKind: FullDebug)
|
||||
!3 = !DIFile(filename: \"unknown\", directory: \"\")
|
||||
!4 = distinct !DICompileUnit(language: DW_LANG_Python, file: !3, producer: \"NAC3\", isOptimized: true, runtimeVersion: 0, emissionKind: FullDebug)
|
||||
!5 = distinct !DISubprogram(name: \"testing\", linkageName: \"testing\", scope: null, file: !3, line: 1, type: !6, scopeLine: 1, flags: DIFlagPublic, spFlags: DISPFlagDefinition | DISPFlagOptimized, unit: !2, retainedNodes: !9)
|
||||
!6 = !DISubroutineType(flags: DIFlagPublic, types: !7)
|
||||
!7 = !{!8}
|
||||
!8 = !DIBasicType(name: \"_\", flags: DIFlagPublic)
|
||||
!9 = !{}
|
||||
!10 = !DILocation(line: 1, column: 9, scope: !5)
|
||||
!11 = !DILocation(line: 2, column: 12, scope: !5)
|
||||
!12 = distinct !DISubprogram(name: \"foo.0\", linkageName: \"foo.0\", scope: null, file: !3, line: 1, type: !6, scopeLine: 1, flags: DIFlagPublic, spFlags: DISPFlagDefinition | DISPFlagOptimized, unit: !4, retainedNodes: !9)
|
||||
!13 = !DILocation(line: 1, column: 12, scope: !12)
|
||||
"}
|
||||
"}
|
||||
.trim();
|
||||
assert_eq!(expected, module.print_to_string().to_str().unwrap().trim());
|
||||
})));
|
||||
|
|
|
@ -71,7 +71,6 @@ pub trait StaticValue {
|
|||
&self,
|
||||
ctx: &mut CodeGenContext<'ctx, 'a>,
|
||||
generator: &mut dyn CodeGenerator,
|
||||
expected_ty: Type,
|
||||
) -> Result<BasicValueEnum<'ctx>, String>;
|
||||
|
||||
fn get_field<'ctx, 'a>(
|
||||
|
@ -79,8 +78,6 @@ pub trait StaticValue {
|
|||
name: StrRef,
|
||||
ctx: &mut CodeGenContext<'ctx, 'a>,
|
||||
) -> Option<ValueEnum<'ctx>>;
|
||||
|
||||
fn get_tuple_element<'ctx>(&self, index: u32) -> Option<ValueEnum<'ctx>>;
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
|
@ -124,10 +121,9 @@ impl<'ctx> ValueEnum<'ctx> {
|
|||
self,
|
||||
ctx: &mut CodeGenContext<'ctx, 'a>,
|
||||
generator: &mut dyn CodeGenerator,
|
||||
expected_ty: Type,
|
||||
) -> Result<BasicValueEnum<'ctx>, String> {
|
||||
match self {
|
||||
ValueEnum::Static(v) => v.to_basic_value_enum(ctx, generator, expected_ty),
|
||||
ValueEnum::Static(v) => v.to_basic_value_enum(ctx, generator),
|
||||
ValueEnum::Dynamic(v) => Ok(v),
|
||||
}
|
||||
}
|
||||
|
@ -367,7 +363,7 @@ impl dyn SymbolResolver + Send + Sync {
|
|||
unreachable!("expected class definition")
|
||||
}
|
||||
},
|
||||
&mut |id| format!("typevar{}", id),
|
||||
&mut |id| format!("var{}", id),
|
||||
&mut None,
|
||||
)
|
||||
}
|
||||
|
|
|
@ -66,7 +66,6 @@ pub fn get_exn_constructor(
|
|||
object_id: DefinitionId(class_id),
|
||||
type_vars: Default::default(),
|
||||
fields: exception_fields,
|
||||
static_fields: Default::default(),
|
||||
methods: vec![("__init__".into(), signature, DefinitionId(cons_id))],
|
||||
ancestors: vec![
|
||||
TypeAnnotation::CustomClass { id: DefinitionId(class_id), params: Default::default() },
|
||||
|
@ -176,7 +175,6 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
|
|||
type_vars: Default::default(),
|
||||
fields: exception_fields,
|
||||
methods: Default::default(),
|
||||
static_fields: Default::default(),
|
||||
ancestors: vec![],
|
||||
constructor: None,
|
||||
resolver: None,
|
||||
|
@ -202,7 +200,6 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
|
|||
object_id: DefinitionId(10),
|
||||
type_vars: vec![option_ty_var],
|
||||
fields: vec![],
|
||||
static_fields: vec![],
|
||||
methods: vec![
|
||||
("is_some".into(), is_some_ty.0, DefinitionId(11)),
|
||||
("is_none".into(), is_some_ty.0, DefinitionId(12)),
|
||||
|
@ -227,12 +224,7 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
|
|||
resolver: None,
|
||||
codegen_callback: Some(Arc::new(GenCall::new(Box::new(
|
||||
|ctx, obj, _, _, generator| {
|
||||
let expect_ty = obj.clone().unwrap().0;
|
||||
let obj_val = obj.unwrap().1.clone().to_basic_value_enum(
|
||||
ctx,
|
||||
generator,
|
||||
expect_ty,
|
||||
)?;
|
||||
let obj_val = obj.unwrap().1.clone().to_basic_value_enum(ctx, generator)?;
|
||||
if let BasicValueEnum::PointerValue(ptr) = obj_val {
|
||||
Ok(Some(ctx.builder.build_is_not_null(ptr, "is_some").into()))
|
||||
} else {
|
||||
|
@ -252,12 +244,7 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
|
|||
resolver: None,
|
||||
codegen_callback: Some(Arc::new(GenCall::new(Box::new(
|
||||
|ctx, obj, _, _, generator| {
|
||||
let expect_ty = obj.clone().unwrap().0;
|
||||
let obj_val = obj.unwrap().1.clone().to_basic_value_enum(
|
||||
ctx,
|
||||
generator,
|
||||
expect_ty,
|
||||
)?;
|
||||
let obj_val = obj.unwrap().1.clone().to_basic_value_enum(ctx, generator)?;
|
||||
if let BasicValueEnum::PointerValue(ptr) = obj_val {
|
||||
Ok(Some(ctx.builder.build_is_null(ptr, "is_none").into()))
|
||||
} else {
|
||||
|
@ -303,7 +290,7 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
|
|||
let float = ctx.primitives.float;
|
||||
let boolean = ctx.primitives.bool;
|
||||
let arg_ty = fun.0.args[0].ty;
|
||||
let arg = args[0].1.clone().to_basic_value_enum(ctx, generator, arg_ty)?;
|
||||
let arg = args[0].1.clone().to_basic_value_enum(ctx, generator)?;
|
||||
Ok(if ctx.unifier.unioned(arg_ty, boolean) {
|
||||
Some(
|
||||
ctx.builder
|
||||
|
@ -368,7 +355,7 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
|
|||
let float = ctx.primitives.float;
|
||||
let boolean = ctx.primitives.bool;
|
||||
let arg_ty = fun.0.args[0].ty;
|
||||
let arg = args[0].1.clone().to_basic_value_enum(ctx, generator, arg_ty)?;
|
||||
let arg = args[0].1.clone().to_basic_value_enum(ctx, generator)?;
|
||||
Ok(
|
||||
if ctx.unifier.unioned(arg_ty, boolean)
|
||||
|| ctx.unifier.unioned(arg_ty, uint32)
|
||||
|
@ -435,7 +422,7 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
|
|||
let float = ctx.primitives.float;
|
||||
let boolean = ctx.primitives.bool;
|
||||
let arg_ty = fun.0.args[0].ty;
|
||||
let arg = args[0].1.clone().to_basic_value_enum(ctx, generator, arg_ty)?;
|
||||
let arg = args[0].1.clone().to_basic_value_enum(ctx, generator)?;
|
||||
let res = if ctx.unifier.unioned(arg_ty, boolean) {
|
||||
ctx.builder
|
||||
.build_int_z_extend(arg.into_int_value(), ctx.ctx.i64_type(), "zext")
|
||||
|
@ -487,7 +474,7 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
|
|||
let float = ctx.primitives.float;
|
||||
let boolean = ctx.primitives.bool;
|
||||
let arg_ty = fun.0.args[0].ty;
|
||||
let arg = args[0].1.clone().to_basic_value_enum(ctx, generator, arg_ty)?;
|
||||
let arg = args[0].1.clone().to_basic_value_enum(ctx, generator)?;
|
||||
let res = if ctx.unifier.unioned(arg_ty, int32)
|
||||
|| ctx.unifier.unioned(arg_ty, uint32)
|
||||
|| ctx.unifier.unioned(arg_ty, boolean)
|
||||
|
@ -534,7 +521,7 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
|
|||
let boolean = ctx.primitives.bool;
|
||||
let float = ctx.primitives.float;
|
||||
let arg_ty = fun.0.args[0].ty;
|
||||
let arg = args[0].1.clone().to_basic_value_enum(ctx, generator, arg_ty)?;
|
||||
let arg = args[0].1.clone().to_basic_value_enum(ctx, generator)?;
|
||||
Ok(
|
||||
if ctx.unifier.unioned(arg_ty, boolean)
|
||||
|| ctx.unifier.unioned(arg_ty, int32)
|
||||
|
@ -570,7 +557,7 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
|
|||
resolver: None,
|
||||
codegen_callback: Some(Arc::new(GenCall::new(Box::new(
|
||||
|ctx, _, _, args, generator| {
|
||||
let arg = args[0].1.clone().to_basic_value_enum(ctx, generator, ctx.primitives.float)?;
|
||||
let arg = args[0].1.clone().to_basic_value_enum(ctx, generator)?;
|
||||
let round_intrinsic =
|
||||
ctx.module.get_function("llvm.round.f64").unwrap_or_else(|| {
|
||||
let float = ctx.ctx.f64_type();
|
||||
|
@ -610,7 +597,7 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
|
|||
resolver: None,
|
||||
codegen_callback: Some(Arc::new(GenCall::new(Box::new(
|
||||
|ctx, _, _, args, generator| {
|
||||
let arg = args[0].1.clone().to_basic_value_enum(ctx, generator, ctx.primitives.float)?;
|
||||
let arg = args[0].1.clone().to_basic_value_enum(ctx, generator)?;
|
||||
let round_intrinsic =
|
||||
ctx.module.get_function("llvm.round.f64").unwrap_or_else(|| {
|
||||
let float = ctx.ctx.f64_type();
|
||||
|
@ -668,44 +655,23 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
|
|||
let mut step = None;
|
||||
let int32 = ctx.ctx.i32_type();
|
||||
let zero = int32.const_zero();
|
||||
let ty_i32 = ctx.primitives.int32;
|
||||
for (i, arg) in args.iter().enumerate() {
|
||||
if arg.0 == Some("start".into()) {
|
||||
start = Some(arg.1.clone().to_basic_value_enum(ctx, generator, ty_i32)?);
|
||||
start = Some(arg.1.clone().to_basic_value_enum(ctx, generator)?);
|
||||
} else if arg.0 == Some("stop".into()) {
|
||||
stop = Some(arg.1.clone().to_basic_value_enum(ctx, generator, ty_i32)?);
|
||||
stop = Some(arg.1.clone().to_basic_value_enum(ctx, generator)?);
|
||||
} else if arg.0 == Some("step".into()) {
|
||||
step = Some(arg.1.clone().to_basic_value_enum(ctx, generator, ty_i32)?);
|
||||
step = Some(arg.1.clone().to_basic_value_enum(ctx, generator)?);
|
||||
} else if i == 0 {
|
||||
start = Some(arg.1.clone().to_basic_value_enum(ctx, generator, ty_i32)?);
|
||||
start = Some(arg.1.clone().to_basic_value_enum(ctx, generator)?);
|
||||
} else if i == 1 {
|
||||
stop = Some(arg.1.clone().to_basic_value_enum(ctx, generator, ty_i32)?);
|
||||
stop = Some(arg.1.clone().to_basic_value_enum(ctx, generator)?);
|
||||
} else if i == 2 {
|
||||
step = Some(arg.1.clone().to_basic_value_enum(ctx, generator, ty_i32)?);
|
||||
step = Some(arg.1.clone().to_basic_value_enum(ctx, generator)?);
|
||||
}
|
||||
}
|
||||
let step = match step {
|
||||
Some(step) => {
|
||||
let step = step.into_int_value();
|
||||
// assert step != 0, throw exception if not
|
||||
let not_zero = ctx.builder.build_int_compare(
|
||||
IntPredicate::NE,
|
||||
step,
|
||||
step.get_type().const_zero(),
|
||||
"range_step_ne",
|
||||
);
|
||||
ctx.make_assert(
|
||||
generator,
|
||||
not_zero,
|
||||
"0:ValueError",
|
||||
"range() step must not be zero",
|
||||
[None, None, None],
|
||||
ctx.current_loc,
|
||||
);
|
||||
step
|
||||
}
|
||||
None => int32.const_int(1, false),
|
||||
};
|
||||
// TODO: error when step == 0
|
||||
let step = step.unwrap_or_else(|| int32.const_int(1, false).into());
|
||||
let stop = stop.unwrap_or_else(|| {
|
||||
let v = start.unwrap();
|
||||
start = None;
|
||||
|
@ -748,9 +714,8 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
|
|||
instance_to_stmt: Default::default(),
|
||||
resolver: None,
|
||||
codegen_callback: Some(Arc::new(GenCall::new(Box::new(
|
||||
|ctx, _, fun, args, generator| {
|
||||
let arg_ty = fun.0.args[0].ty;
|
||||
Ok(Some(args[0].1.clone().to_basic_value_enum(ctx, generator, arg_ty)?))
|
||||
|ctx, _, _, args, generator| {
|
||||
Ok(Some(args[0].1.clone().to_basic_value_enum(ctx, generator)?))
|
||||
},
|
||||
)))),
|
||||
loc: None,
|
||||
|
@ -774,7 +739,7 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
|
|||
let float = ctx.primitives.float;
|
||||
let boolean = ctx.primitives.bool;
|
||||
let arg_ty = fun.0.args[0].ty;
|
||||
let arg = args[0].1.clone().to_basic_value_enum(ctx, generator, arg_ty)?;
|
||||
let arg = args[0].1.clone().to_basic_value_enum(ctx, generator)?;
|
||||
Ok(if ctx.unifier.unioned(arg_ty, boolean) {
|
||||
Some(arg)
|
||||
} else if ctx.unifier.unioned(arg_ty, int32) {
|
||||
|
@ -832,7 +797,7 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
|
|||
resolver: None,
|
||||
codegen_callback: Some(Arc::new(GenCall::new(Box::new(
|
||||
|ctx, _, _, args, generator| {
|
||||
let arg = args[0].1.clone().to_basic_value_enum(ctx, generator, ctx.primitives.float)?;
|
||||
let arg = args[0].1.clone().to_basic_value_enum(ctx, generator)?;
|
||||
let floor_intrinsic =
|
||||
ctx.module.get_function("llvm.floor.f64").unwrap_or_else(|| {
|
||||
let float = ctx.ctx.f64_type();
|
||||
|
@ -872,7 +837,7 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
|
|||
resolver: None,
|
||||
codegen_callback: Some(Arc::new(GenCall::new(Box::new(
|
||||
|ctx, _, _, args, generator| {
|
||||
let arg = args[0].1.clone().to_basic_value_enum(ctx, generator, ctx.primitives.float)?;
|
||||
let arg = args[0].1.clone().to_basic_value_enum(ctx, generator)?;
|
||||
let floor_intrinsic =
|
||||
ctx.module.get_function("llvm.floor.f64").unwrap_or_else(|| {
|
||||
let float = ctx.ctx.f64_type();
|
||||
|
@ -912,7 +877,7 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
|
|||
resolver: None,
|
||||
codegen_callback: Some(Arc::new(GenCall::new(Box::new(
|
||||
|ctx, _, _, args, generator| {
|
||||
let arg = args[0].1.clone().to_basic_value_enum(ctx, generator, ctx.primitives.float)?;
|
||||
let arg = args[0].1.clone().to_basic_value_enum(ctx, generator)?;
|
||||
let ceil_intrinsic =
|
||||
ctx.module.get_function("llvm.ceil.f64").unwrap_or_else(|| {
|
||||
let float = ctx.ctx.f64_type();
|
||||
|
@ -952,7 +917,7 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
|
|||
resolver: None,
|
||||
codegen_callback: Some(Arc::new(GenCall::new(Box::new(
|
||||
|ctx, _, _, args, generator| {
|
||||
let arg = args[0].1.clone().to_basic_value_enum(ctx, generator, ctx.primitives.float)?;
|
||||
let arg = args[0].1.clone().to_basic_value_enum(ctx, generator)?;
|
||||
let ceil_intrinsic =
|
||||
ctx.module.get_function("llvm.ceil.f64").unwrap_or_else(|| {
|
||||
let float = ctx.ctx.f64_type();
|
||||
|
@ -1004,11 +969,11 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
|
|||
|ctx, _, fun, args, generator| {
|
||||
let range_ty = ctx.primitives.range;
|
||||
let arg_ty = fun.0.args[0].ty;
|
||||
let arg = args[0].1.clone().to_basic_value_enum(ctx, generator, arg_ty)?;
|
||||
let arg = args[0].1.clone().to_basic_value_enum(ctx, generator)?;
|
||||
Ok(if ctx.unifier.unioned(arg_ty, range_ty) {
|
||||
let arg = arg.into_pointer_value();
|
||||
let (start, end, step) = destructure_range(ctx, arg);
|
||||
Some(calculate_len_for_slice_range(generator, ctx, start, end, step).into())
|
||||
Some(calculate_len_for_slice_range(ctx, start, end, step).into())
|
||||
} else {
|
||||
let int32 = ctx.ctx.i32_type();
|
||||
let zero = int32.const_zero();
|
||||
|
@ -1058,8 +1023,8 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
|
|||
let llvm_f64 = ctx.ctx.f64_type().as_basic_type_enum();
|
||||
let m_ty = fun.0.args[0].ty;
|
||||
let n_ty = fun.0.args[1].ty;
|
||||
let m_val = args[0].1.clone().to_basic_value_enum(ctx, generator, m_ty)?;
|
||||
let n_val = args[1].1.clone().to_basic_value_enum(ctx, generator, n_ty)?;
|
||||
let m_val = args[0].1.clone().to_basic_value_enum(ctx, generator)?;
|
||||
let n_val = args[1].1.clone().to_basic_value_enum(ctx, generator)?;
|
||||
let mut is_type = |a: Type, b: Type| ctx.unifier.unioned(a, b);
|
||||
let (fun_name, arg_ty) = if is_type(m_ty, n_ty) && is_type(n_ty, boolean) {
|
||||
("llvm.umin.i1", llvm_i1)
|
||||
|
@ -1120,8 +1085,8 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
|
|||
let llvm_f64 = ctx.ctx.f64_type().as_basic_type_enum();
|
||||
let m_ty = fun.0.args[0].ty;
|
||||
let n_ty = fun.0.args[1].ty;
|
||||
let m_val = args[0].1.clone().to_basic_value_enum(ctx, generator, m_ty)?;
|
||||
let n_val = args[1].1.clone().to_basic_value_enum(ctx, generator, n_ty)?;
|
||||
let m_val = args[0].1.clone().to_basic_value_enum(ctx, generator)?;
|
||||
let n_val = args[1].1.clone().to_basic_value_enum(ctx, generator)?;
|
||||
let mut is_type = |a: Type, b: Type| ctx.unifier.unioned(a, b);
|
||||
let (fun_name, arg_ty) = if is_type(m_ty, n_ty) && is_type(n_ty, boolean) {
|
||||
("llvm.umax.i1", llvm_i1)
|
||||
|
@ -1178,7 +1143,7 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
|
|||
let llvm_i64 = ctx.ctx.i64_type().as_basic_type_enum();
|
||||
let llvm_f64 = ctx.ctx.f64_type().as_basic_type_enum();
|
||||
let n_ty = fun.0.args[0].ty;
|
||||
let n_val = args[0].1.clone().to_basic_value_enum(ctx, generator, n_ty)?;
|
||||
let n_val = args[0].1.clone().to_basic_value_enum(ctx, generator)?;
|
||||
let mut is_type = |a: Type, b: Type| ctx.unifier.unioned(a, b);
|
||||
let mut is_float = false;
|
||||
let (fun_name, arg_ty) =
|
||||
|
@ -1235,9 +1200,8 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
|
|||
instance_to_stmt: Default::default(),
|
||||
resolver: None,
|
||||
codegen_callback: Some(Arc::new(GenCall::new(Box::new(
|
||||
|ctx, _, fun, args, generator| {
|
||||
let arg_ty = fun.0.args[0].ty;
|
||||
let arg_val = args[0].1.clone().to_basic_value_enum(ctx, generator, arg_ty)?;
|
||||
|ctx, _, _fun, args, generator| {
|
||||
let arg_val = args[0].1.clone().to_basic_value_enum(ctx, generator)?;
|
||||
let alloca = ctx.builder.build_alloca(arg_val.get_type(), "alloca_some");
|
||||
ctx.builder.build_store(alloca, arg_val);
|
||||
Ok(Some(alloca.into()))
|
||||
|
|
|
@ -4,7 +4,7 @@ use std::rc::Rc;
|
|||
use crate::{
|
||||
codegen::{expr::get_subst_key, stmt::exn_constructor},
|
||||
symbol_resolver::SymbolValue,
|
||||
typecheck::type_inferencer::{FunctionData, Inferencer},
|
||||
typecheck::{type_inferencer::{FunctionData, Inferencer}, escape_analysis::EscapeAnalyzer},
|
||||
};
|
||||
|
||||
use super::*;
|
||||
|
@ -164,7 +164,6 @@ impl TopLevelComposer {
|
|||
ast: ast::Stmt<()>,
|
||||
resolver: Option<Arc<dyn SymbolResolver + Send + Sync>>,
|
||||
mod_path: String,
|
||||
allow_no_constructor: bool,
|
||||
) -> Result<(StrRef, DefinitionId, Option<Type>), String> {
|
||||
let defined_names = &mut self.defined_names;
|
||||
match &ast.node {
|
||||
|
@ -299,7 +298,7 @@ impl TopLevelComposer {
|
|||
self.definition_ast_list.push((def, Some(ast)));
|
||||
}
|
||||
|
||||
let result_ty = if allow_no_constructor || contains_constructor { Some(constructor_ty) } else { None };
|
||||
let result_ty = if contains_constructor { Some(constructor_ty) } else { None };
|
||||
Ok((class_name, DefinitionId(class_def_id), result_ty))
|
||||
}
|
||||
|
||||
|
@ -435,11 +434,11 @@ impl TopLevelComposer {
|
|||
|
||||
// check if all are unique type vars
|
||||
let all_unique_type_var = {
|
||||
let mut occurred_type_var_id: HashSet<u32> = HashSet::new();
|
||||
let mut occured_type_var_id: HashSet<u32> = HashSet::new();
|
||||
type_vars.iter().all(|x| {
|
||||
let ty = unifier.get_ty(*x);
|
||||
if let TypeEnum::TVar { id, .. } = ty.as_ref() {
|
||||
occurred_type_var_id.insert(*id)
|
||||
occured_type_var_id.insert(*id)
|
||||
} else {
|
||||
false
|
||||
}
|
||||
|
@ -537,7 +536,7 @@ impl TopLevelComposer {
|
|||
}
|
||||
has_base = true;
|
||||
|
||||
// the function parse_ast_to make sure that no type var occurred in
|
||||
// the function parse_ast_to make sure that no type var occured in
|
||||
// bast_ty if it is a CustomClassKind
|
||||
let base_ty = parse_ast_to_type_annotation_kinds(
|
||||
class_resolver,
|
||||
|
@ -697,7 +696,7 @@ impl TopLevelComposer {
|
|||
return Err(errors.into_iter().sorted().join("\n----------\n"));
|
||||
}
|
||||
|
||||
// handle the inherited methods and fields
|
||||
// handle the inheritanced methods and fields
|
||||
// Note: we cannot defer error handling til the end of the loop, because there is loop
|
||||
// carried dependency, ignoring the error (temporarily) will cause all assumptions to break
|
||||
// and produce weird error messages
|
||||
|
@ -826,9 +825,9 @@ impl TopLevelComposer {
|
|||
let mut function_var_map: HashMap<u32, Type> = HashMap::new();
|
||||
let arg_types = {
|
||||
// make sure no duplicate parameter
|
||||
let mut defined_parameter_name: HashSet<_> = HashSet::new();
|
||||
let mut defined_paramter_name: HashSet<_> = HashSet::new();
|
||||
for x in args.args.iter() {
|
||||
if !defined_parameter_name.insert(x.node.arg)
|
||||
if !defined_paramter_name.insert(x.node.arg)
|
||||
|| keyword_list.contains(&x.node.arg)
|
||||
{
|
||||
return Err(format!(
|
||||
|
@ -1040,7 +1039,6 @@ impl TopLevelComposer {
|
|||
class_body_ast,
|
||||
_class_ancestor_def,
|
||||
class_fields_def,
|
||||
class_static_fields_def,
|
||||
class_methods_def,
|
||||
class_type_vars_def,
|
||||
class_resolver,
|
||||
|
@ -1048,7 +1046,6 @@ impl TopLevelComposer {
|
|||
object_id,
|
||||
ancestors,
|
||||
fields,
|
||||
static_fields,
|
||||
methods,
|
||||
resolver,
|
||||
type_vars,
|
||||
|
@ -1056,7 +1053,7 @@ impl TopLevelComposer {
|
|||
} = &mut *class_def
|
||||
{
|
||||
if let ast::StmtKind::ClassDef { name, bases, body, .. } = &class_ast {
|
||||
(*object_id, *name, bases, body, ancestors, fields, static_fields, methods, type_vars, resolver)
|
||||
(*object_id, *name, bases, body, ancestors, fields, methods, type_vars, resolver)
|
||||
} else {
|
||||
unreachable!("here must be class def ast");
|
||||
}
|
||||
|
@ -1077,10 +1074,10 @@ impl TopLevelComposer {
|
|||
|
||||
let arg_types: Vec<FuncArg> = {
|
||||
// check method parameters cannot have same name
|
||||
let mut defined_parameter_name: HashSet<_> = HashSet::new();
|
||||
let mut defined_paramter_name: HashSet<_> = HashSet::new();
|
||||
let zelf: StrRef = "self".into();
|
||||
for x in args.args.iter() {
|
||||
if !defined_parameter_name.insert(x.node.arg)
|
||||
if !defined_paramter_name.insert(x.node.arg)
|
||||
|| (keyword_list.contains(&x.node.arg) && x.node.arg != zelf)
|
||||
{
|
||||
return Err(format!(
|
||||
|
@ -1091,13 +1088,13 @@ impl TopLevelComposer {
|
|||
}
|
||||
}
|
||||
|
||||
if name == &"__init__".into() && !defined_parameter_name.contains(&zelf) {
|
||||
if name == &"__init__".into() && !defined_paramter_name.contains(&zelf) {
|
||||
return Err(format!(
|
||||
"__init__ method must have a `self` parameter (at {})",
|
||||
b.location
|
||||
));
|
||||
}
|
||||
if !defined_parameter_name.contains(&zelf) {
|
||||
if !defined_paramter_name.contains(&zelf) {
|
||||
return Err(format!(
|
||||
"class method must have a `self` parameter (at {})",
|
||||
b.location
|
||||
|
@ -1230,7 +1227,7 @@ impl TopLevelComposer {
|
|||
dummy_return_type
|
||||
} else {
|
||||
// if do not have return annotation, return none
|
||||
// for uniform handling, still use type annotation
|
||||
// for uniform handling, still use type annoatation
|
||||
let dummy_return_type = unifier.get_dummy_var().0;
|
||||
type_var_to_concrete_def.insert(
|
||||
dummy_return_type,
|
||||
|
@ -1270,7 +1267,7 @@ impl TopLevelComposer {
|
|||
.unify(method_dummy_ty, method_type)
|
||||
.map_err(|e| e.to_display(unifier).to_string())?;
|
||||
}
|
||||
ast::StmtKind::AnnAssign { target, annotation, value, .. } => {
|
||||
ast::StmtKind::AnnAssign { target, annotation, value: None, .. } => {
|
||||
if let ast::ExprKind::Name { id: attr, .. } = &target.node {
|
||||
if defined_fields.insert(attr.to_string()) {
|
||||
let dummy_field_type = unifier.get_dummy_var().0;
|
||||
|
@ -1296,9 +1293,6 @@ impl TopLevelComposer {
|
|||
_ if core_config.kernel_ann.is_none() => (annotation, true),
|
||||
_ => continue, // ignore fields annotated otherwise
|
||||
};
|
||||
if let Option::Some(..) = &value{
|
||||
class_static_fields_def.push((*attr, dummy_field_type, mutable));
|
||||
}
|
||||
class_fields_def.push((*attr, dummy_field_type, mutable));
|
||||
|
||||
let parsed_annotation = parse_ast_to_type_annotation_kinds(
|
||||
|
@ -1329,7 +1323,7 @@ impl TopLevelComposer {
|
|||
type_var_to_concrete_def.insert(dummy_field_type, parsed_annotation);
|
||||
} else {
|
||||
return Err(format!(
|
||||
"same class field `{}` defined twice (at {})",
|
||||
"same class fields `{}` defined twice (at {})",
|
||||
attr, target.location
|
||||
));
|
||||
}
|
||||
|
@ -1340,27 +1334,7 @@ impl TopLevelComposer {
|
|||
));
|
||||
}
|
||||
}
|
||||
ast::StmtKind::Assign { targets, value, .. } => {
|
||||
for target in targets {
|
||||
if let ast::ExprKind::Name { id: attr, .. } = &target.node {
|
||||
if defined_fields.insert(attr.to_string()) {
|
||||
let dummy_field_type = unifier.get_dummy_var().0;
|
||||
class_static_fields_def.push((*attr, dummy_field_type, true));
|
||||
class_fields_def.push((*attr, dummy_field_type, true));
|
||||
} else {
|
||||
return Err(format!(
|
||||
"same class field `{}` defined twice (at {})",
|
||||
attr, target.location
|
||||
));
|
||||
}
|
||||
} else {
|
||||
return Err(format!(
|
||||
"unsupported statement type in class definition body (at {})",
|
||||
target.location
|
||||
));
|
||||
}
|
||||
}
|
||||
},
|
||||
ast::StmtKind::Assign { .. } => {}, // we don't class attributes
|
||||
ast::StmtKind::Pass { .. } => {}
|
||||
ast::StmtKind::Expr { value: _, .. } => {} // typically a docstring; ignoring all expressions matches CPython behavior
|
||||
_ => {
|
||||
|
@ -1494,9 +1468,9 @@ impl TopLevelComposer {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
/// step 5, analyze and call type inferencer to fill the `instance_to_stmt` of topleveldef::function
|
||||
/// step 5, analyze and call type inferecer to fill the `instance_to_stmt` of topleveldef::function
|
||||
fn analyze_function_instance(&mut self) -> Result<(), String> {
|
||||
// first get the class constructor type correct for the following type check in function body
|
||||
// first get the class contructor type correct for the following type check in function body
|
||||
// also do class field instantiation check
|
||||
let init_str_id = "__init__".into();
|
||||
let mut definition_extension = Vec::new();
|
||||
|
@ -1541,7 +1515,6 @@ impl TopLevelComposer {
|
|||
ancestors,
|
||||
methods,
|
||||
fields,
|
||||
static_fields,
|
||||
type_vars,
|
||||
name: class_name,
|
||||
object_id,
|
||||
|
@ -1608,7 +1581,7 @@ impl TopLevelComposer {
|
|||
return Ok(());
|
||||
}
|
||||
let mut init_id: Option<DefinitionId> = None;
|
||||
// get the class constructor type correct
|
||||
// get the class contructor type correct
|
||||
let (contor_args, contor_type_vars) = {
|
||||
let mut constructor_args: Vec<FuncArg> = Vec::new();
|
||||
let mut type_vars: HashMap<u32, Type> = HashMap::new();
|
||||
|
@ -1644,11 +1617,11 @@ impl TopLevelComposer {
|
|||
unreachable!("must be init function here")
|
||||
}
|
||||
let all_inited = Self::get_all_assigned_field(body.as_slice())?;
|
||||
for f in fields {
|
||||
if !all_inited.contains(&f.0) && !static_fields.contains(&f) {
|
||||
for (f, _, _) in fields {
|
||||
if !all_inited.contains(f) {
|
||||
return Err(format!(
|
||||
"fields `{}` of class `{}` not fully initialized in the initializer (at {})",
|
||||
&f.0,
|
||||
f,
|
||||
class_name,
|
||||
body[0].location,
|
||||
));
|
||||
|
@ -1819,6 +1792,7 @@ impl TopLevelComposer {
|
|||
result
|
||||
};
|
||||
let mut calls: HashMap<CodeLocation, CallId> = HashMap::new();
|
||||
let mut args = vec![];
|
||||
let mut inferencer = Inferencer {
|
||||
top_level: ctx.as_ref(),
|
||||
defined_identifiers: identifiers.clone(),
|
||||
|
@ -1839,6 +1813,7 @@ impl TopLevelComposer {
|
|||
result.insert("self".into(), self_ty);
|
||||
}
|
||||
result.extend(inst_args.iter().map(|x| (x.name, x.ty)));
|
||||
args.extend(result.iter().map(|(&a, &b)| (a, b)));
|
||||
result
|
||||
},
|
||||
primitives: primitives_ty,
|
||||
|
@ -1933,7 +1908,7 @@ impl TopLevelComposer {
|
|||
unreachable!("must be class id here")
|
||||
}
|
||||
},
|
||||
&mut |id| format!("typevar{}", id),
|
||||
&mut |id| format!("tvar{}", id),
|
||||
&mut None,
|
||||
);
|
||||
return Err(format!(
|
||||
|
@ -1944,6 +1919,18 @@ impl TopLevelComposer {
|
|||
));
|
||||
}
|
||||
|
||||
if simple_name.to_string() != "__init__" {
|
||||
EscapeAnalyzer::check_function_lifetime(
|
||||
unifier,
|
||||
&primitives_ty,
|
||||
resolver.as_ref().unwrap().clone(),
|
||||
ctx.as_ref(),
|
||||
&args,
|
||||
&fun_body,
|
||||
ast.as_ref().unwrap().location,
|
||||
).map_err(|e| format!("Escape analysis error: {}\n ... in function {}", e, name))?;
|
||||
}
|
||||
|
||||
instance_to_stmt.insert(
|
||||
get_subst_key(unifier, self_type, &subst, Some(&vars.keys().cloned().collect())),
|
||||
FunInstance {
|
||||
|
|
|
@ -84,7 +84,7 @@ impl TopLevelComposer {
|
|||
obj_id: DefinitionId(7),
|
||||
fields: vec![
|
||||
("__name__".into(), (int32, true)),
|
||||
("__file__".into(), (str, true)),
|
||||
("__file__".into(), (int32, true)),
|
||||
("__line__".into(), (int32, true)),
|
||||
("__col__".into(), (int32, true)),
|
||||
("__func__".into(), (str, true)),
|
||||
|
@ -149,7 +149,7 @@ impl TopLevelComposer {
|
|||
}
|
||||
|
||||
/// already include the definition_id of itself inside the ancestors vector
|
||||
/// when first registering, the type_vars, fields, methods, ancestors are invalid
|
||||
/// when first regitering, the type_vars, fields, methods, ancestors are invalid
|
||||
pub fn make_top_level_class_def(
|
||||
index: usize,
|
||||
resolver: Option<Arc<dyn SymbolResolver + Send + Sync>>,
|
||||
|
@ -162,7 +162,6 @@ impl TopLevelComposer {
|
|||
object_id: DefinitionId(index),
|
||||
type_vars: Default::default(),
|
||||
fields: Default::default(),
|
||||
static_fields: Default::default(),
|
||||
methods: Default::default(),
|
||||
ancestors: Default::default(),
|
||||
constructor,
|
||||
|
|
|
@ -91,8 +91,6 @@ pub enum TopLevelDef {
|
|||
// class fields
|
||||
// name, type, is mutable
|
||||
fields: Vec<(StrRef, Type, bool)>,
|
||||
// list of static data members
|
||||
static_fields: Vec<(StrRef, Type, bool)>,
|
||||
// class methods, pointing to the corresponding function definition.
|
||||
methods: Vec<(StrRef, Type, DefinitionId)>,
|
||||
// ancestor classes, including itself.
|
||||
|
|
|
@ -9,7 +9,7 @@ expression: res_vec
|
|||
"Function {\nname: \"A.__init__\",\nsig: \"fn[[t:T], none]\",\nvar_id: []\n}\n",
|
||||
"Function {\nname: \"A.fun\",\nsig: \"fn[[a:int32, b:T], list[virtual[B[bool]]]]\",\nvar_id: []\n}\n",
|
||||
"Function {\nname: \"A.foo\",\nsig: \"fn[[c:C], none]\",\nvar_id: []\n}\n",
|
||||
"Class {\nname: \"B\",\nancestors: [\"B[typevar7]\", \"A[float]\"],\nfields: [\"a\", \"b\", \"c\", \"d\"],\nmethods: [(\"__init__\", \"fn[[], none]\"), (\"fun\", \"fn[[a:int32, b:T], list[virtual[B[bool]]]]\"), (\"foo\", \"fn[[c:C], none]\")],\ntype_vars: [\"typevar7\"]\n}\n",
|
||||
"Class {\nname: \"B\",\nancestors: [\"B[var7]\", \"A[float]\"],\nfields: [\"a\", \"b\", \"c\", \"d\"],\nmethods: [(\"__init__\", \"fn[[], none]\"), (\"fun\", \"fn[[a:int32, b:T], list[virtual[B[bool]]]]\"), (\"foo\", \"fn[[c:C], none]\")],\ntype_vars: [\"var7\"]\n}\n",
|
||||
"Function {\nname: \"B.__init__\",\nsig: \"fn[[], none]\",\nvar_id: []\n}\n",
|
||||
"Function {\nname: \"B.fun\",\nsig: \"fn[[a:int32, b:T], list[virtual[B[bool]]]]\",\nvar_id: []\n}\n",
|
||||
"Class {\nname: \"C\",\nancestors: [\"C\", \"B[bool]\", \"A[float]\"],\nfields: [\"a\", \"b\", \"c\", \"d\", \"e\"],\nmethods: [(\"__init__\", \"fn[[], none]\"), (\"fun\", \"fn[[a:int32, b:T], list[virtual[B[bool]]]]\"), (\"foo\", \"fn[[c:C], none]\")],\ntype_vars: []\n}\n",
|
||||
|
|
|
@ -5,7 +5,7 @@ expression: res_vec
|
|||
|
||||
---
|
||||
[
|
||||
"Class {\nname: \"A\",\nancestors: [\"A[typevar6, typevar7]\"],\nfields: [\"a\", \"b\"],\nmethods: [(\"__init__\", \"fn[[a:A[bool, float], b:B], none]\"), (\"fun\", \"fn[[a:A[bool, float]], A[bool, int32]]\")],\ntype_vars: [\"typevar6\", \"typevar7\"]\n}\n",
|
||||
"Class {\nname: \"A\",\nancestors: [\"A[var6, var7]\"],\nfields: [\"a\", \"b\"],\nmethods: [(\"__init__\", \"fn[[a:A[bool, float], b:B], none]\"), (\"fun\", \"fn[[a:A[bool, float]], A[bool, int32]]\")],\ntype_vars: [\"var6\", \"var7\"]\n}\n",
|
||||
"Function {\nname: \"A.__init__\",\nsig: \"fn[[a:A[bool, float], b:B], none]\",\nvar_id: []\n}\n",
|
||||
"Function {\nname: \"A.fun\",\nsig: \"fn[[a:A[bool, float]], A[bool, int32]]\",\nvar_id: []\n}\n",
|
||||
"Class {\nname: \"B\",\nancestors: [\"B\", \"A[int64, bool]\"],\nfields: [\"a\", \"b\"],\nmethods: [(\"__init__\", \"fn[[], none]\"), (\"fun\", \"fn[[a:A[bool, float]], A[bool, int32]]\"), (\"foo\", \"fn[[b:B], B]\"), (\"bar\", \"fn[[a:A[int32, list[B]]], tuple[A[bool, virtual[A[B, int32]]], B]]\")],\ntype_vars: []\n}\n",
|
||||
|
|
|
@ -105,7 +105,7 @@ impl SymbolResolver for Resolver {
|
|||
def __init__(self):
|
||||
self.c: int32 = 4
|
||||
self.a: bool = True
|
||||
"},
|
||||
"}
|
||||
];
|
||||
"register"
|
||||
)]
|
||||
|
@ -116,26 +116,10 @@ fn test_simple_register(source: Vec<&str>) {
|
|||
let ast = parse_program(s, Default::default()).unwrap();
|
||||
let ast = ast[0].clone();
|
||||
|
||||
composer.register_top_level(ast, None, "".into(), false).unwrap();
|
||||
composer.register_top_level(ast, None, "".into()).unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
#[test_case(
|
||||
indoc! {"
|
||||
class A:
|
||||
def foo(self):
|
||||
pass
|
||||
a = A()
|
||||
"};
|
||||
"register"
|
||||
)]
|
||||
fn test_simple_register_without_constructor(source: &str) {
|
||||
let mut composer: TopLevelComposer = Default::default();
|
||||
let ast = parse_program(source, Default::default()).unwrap();
|
||||
let ast = ast[0].clone();
|
||||
composer.register_top_level(ast, None, "".into(), true).unwrap();
|
||||
}
|
||||
|
||||
#[test_case(
|
||||
vec![
|
||||
indoc! {"
|
||||
|
@ -179,7 +163,7 @@ fn test_simple_function_analyze(source: Vec<&str>, tys: Vec<&str>, names: Vec<&s
|
|||
let ast = ast[0].clone();
|
||||
|
||||
let (id, def_id, ty) =
|
||||
composer.register_top_level(ast, Some(resolver.clone()), "".into(), false).unwrap();
|
||||
composer.register_top_level(ast, Some(resolver.clone()), "".into()).unwrap();
|
||||
internal_resolver.add_id_def(id, def_id);
|
||||
if let Some(ty) = ty {
|
||||
internal_resolver.add_id_type(id, ty);
|
||||
|
@ -531,7 +515,7 @@ fn test_analyze(source: Vec<&str>, res: Vec<&str>) {
|
|||
let ast = ast[0].clone();
|
||||
|
||||
let (id, def_id, ty) = {
|
||||
match composer.register_top_level(ast, Some(resolver.clone()), "".into(), false) {
|
||||
match composer.register_top_level(ast, Some(resolver.clone()), "".into()) {
|
||||
Ok(x) => x,
|
||||
Err(msg) => {
|
||||
if print {
|
||||
|
@ -715,7 +699,7 @@ fn test_inference(source: Vec<&str>, res: Vec<&str>) {
|
|||
let ast = ast[0].clone();
|
||||
|
||||
let (id, def_id, ty) = {
|
||||
match composer.register_top_level(ast, Some(resolver.clone()), "".into(), false) {
|
||||
match composer.register_top_level(ast, Some(resolver.clone()), "".into()) {
|
||||
Ok(x) => x,
|
||||
Err(msg) => {
|
||||
if print {
|
||||
|
@ -779,7 +763,7 @@ fn make_internal_resolver_with_tvar(
|
|||
(name, {
|
||||
let (ty, id) = unifier.get_fresh_var_with_range(range.as_slice(), None, None);
|
||||
if print {
|
||||
println!("{}: {:?}, typevar{}", name, ty, id);
|
||||
println!("{}: {:?}, tvar{}", name, ty, id);
|
||||
}
|
||||
ty
|
||||
})
|
||||
|
@ -807,7 +791,7 @@ impl<'a> Fold<Option<Type>> for TypeToStringFolder<'a> {
|
|||
self.unifier.internal_stringify(
|
||||
ty,
|
||||
&mut |id| format!("class{}", id.to_string()),
|
||||
&mut |id| format!("typevar{}", id.to_string()),
|
||||
&mut |id| format!("tvar{}", id.to_string()),
|
||||
&mut None,
|
||||
)
|
||||
} else {
|
||||
|
|
|
@ -356,7 +356,7 @@ pub fn get_type_from_type_annotation_kinds(
|
|||
unifier.internal_stringify(
|
||||
p,
|
||||
&mut |id| format!("class{}", id),
|
||||
&mut |id| format!("typevar{}", id),
|
||||
&mut |id| format!("tvar{}", id),
|
||||
&mut None
|
||||
),
|
||||
*id
|
||||
|
@ -436,7 +436,7 @@ pub fn get_type_from_type_annotation_kinds(
|
|||
/// the type of `self` should be similar to `A[T, V]`, where `T`, `V`
|
||||
/// considered to be type variables associated with the class \
|
||||
/// \
|
||||
/// But note that here we do not make a duplication of `T`, `V`, we directly
|
||||
/// But note that here we do not make a duplication of `T`, `V`, we direclty
|
||||
/// use them as they are in the TopLevelDef::Class since those in the
|
||||
/// TopLevelDef::Class.type_vars will be substitute later when seeing applications/instantiations
|
||||
/// the Type of their fields and methods will also be subst when application/instantiation
|
||||
|
|
|
@ -0,0 +1,515 @@
|
|||
use slab::Slab;
|
||||
use std::borrow::Cow;
|
||||
use std::collections::{HashMap, HashSet};
|
||||
|
||||
use nac3parser::ast::{Location, StrRef};
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum LifetimeKind {
|
||||
Static,
|
||||
NonLocal,
|
||||
Unknown,
|
||||
PreciseLocal,
|
||||
ImpreciseLocal,
|
||||
}
|
||||
|
||||
impl std::ops::BitAnd for LifetimeKind {
|
||||
type Output = Self;
|
||||
|
||||
fn bitand(self, rhs: Self) -> Self::Output {
|
||||
use LifetimeKind::*;
|
||||
match (self, rhs) {
|
||||
(x, y) if x == y => x,
|
||||
(PreciseLocal, ImpreciseLocal) | (ImpreciseLocal, PreciseLocal) => ImpreciseLocal,
|
||||
(Static, NonLocal) | (NonLocal, Static) => NonLocal,
|
||||
_ => Unknown,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct LifetimeId(usize);
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct BasicBlockId(usize);
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum LifetimeIR {
|
||||
VarAssign { var: StrRef, lifetime: LifetimeId },
|
||||
VarAccess { var: StrRef },
|
||||
FieldAssign { obj: LifetimeId, field: StrRef, new: LifetimeId, is_init: bool },
|
||||
FieldAccess { obj: LifetimeId, field: StrRef },
|
||||
CreateLifetime { kind: LifetimeKind },
|
||||
PassedToFunc { param_lifetimes: Vec<LifetimeId> },
|
||||
UnifyLifetimes { lifetimes: Vec<LifetimeId> },
|
||||
Branch { targets: Vec<BasicBlockId> },
|
||||
Return { val: Option<LifetimeId> },
|
||||
}
|
||||
|
||||
pub struct LifetimeIRBuilder {
|
||||
irs: Vec<Option<(LifetimeIR, Location)>>,
|
||||
basic_blocks: Vec<Vec<usize>>,
|
||||
current_block: BasicBlockId,
|
||||
}
|
||||
|
||||
impl LifetimeIRBuilder {
|
||||
pub fn new() -> Self {
|
||||
LifetimeIRBuilder {
|
||||
irs: vec![None],
|
||||
basic_blocks: vec![vec![]],
|
||||
current_block: BasicBlockId(0),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn print_ir(&self) -> String {
|
||||
let mut lines = vec![];
|
||||
for (i, bb) in self.basic_blocks.iter().enumerate() {
|
||||
if bb.is_empty() {
|
||||
continue;
|
||||
}
|
||||
lines.push(format!("{}:", i));
|
||||
for ir in bb.iter() {
|
||||
if let Some((inst, loc)) = &self.irs[*ir] {
|
||||
lines.push(format!(" {}: {:?} ({})", *ir, inst, loc));
|
||||
}
|
||||
}
|
||||
}
|
||||
lines.join("\n")
|
||||
}
|
||||
|
||||
pub fn append_ir(&mut self, inst: LifetimeIR, loc: Location) -> LifetimeId {
|
||||
let id = self.irs.len();
|
||||
self.irs.push(Some((inst, loc)));
|
||||
self.basic_blocks[self.current_block.0].push(id);
|
||||
LifetimeId(id)
|
||||
}
|
||||
|
||||
pub fn append_block(&mut self) -> BasicBlockId {
|
||||
let id = self.basic_blocks.len();
|
||||
self.basic_blocks.push(vec![]);
|
||||
BasicBlockId(id)
|
||||
}
|
||||
|
||||
pub fn get_current_block(&self) -> BasicBlockId {
|
||||
self.current_block
|
||||
}
|
||||
|
||||
pub fn position_at_end(&mut self, id: BasicBlockId) {
|
||||
self.current_block = id;
|
||||
}
|
||||
|
||||
pub fn is_terminated(&self, id: BasicBlockId) -> bool {
|
||||
let bb = &self.basic_blocks[id.0];
|
||||
if bb.is_empty() {
|
||||
false
|
||||
} else {
|
||||
matches!(
|
||||
self.irs[*bb.last().unwrap()],
|
||||
Some((LifetimeIR::Return { .. }, _)) | Some((LifetimeIR::Branch { .. }, _))
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn remove_empty_bb(&mut self) {
|
||||
let mut destination_mapping = HashMap::new();
|
||||
let basic_blocks = &mut self.basic_blocks;
|
||||
let irs = &mut self.irs;
|
||||
for (i, bb) in basic_blocks.iter_mut().enumerate() {
|
||||
bb.retain(|&id| irs[id].is_some());
|
||||
if bb.len() == 1 {
|
||||
let id = bb.pop().unwrap();
|
||||
let ir = irs[id].take().unwrap();
|
||||
match ir.0 {
|
||||
LifetimeIR::Branch { targets } => {
|
||||
destination_mapping.insert(i, targets);
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
}
|
||||
let mut buffer = HashSet::new();
|
||||
for bb in basic_blocks.iter_mut() {
|
||||
if bb.is_empty() {
|
||||
continue;
|
||||
}
|
||||
if let LifetimeIR::Branch { targets } =
|
||||
&mut irs[*bb.last().unwrap()].as_mut().unwrap().0
|
||||
{
|
||||
buffer.clear();
|
||||
let mut updated = false;
|
||||
for target in targets.iter() {
|
||||
if let Some(dest) = destination_mapping.get(&target.0) {
|
||||
buffer.extend(dest.iter().cloned());
|
||||
updated = true;
|
||||
} else {
|
||||
buffer.insert(*target);
|
||||
}
|
||||
}
|
||||
if updated {
|
||||
targets.clear();
|
||||
targets.extend(buffer.iter().cloned());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn analyze(&self) -> Result<(), String> {
|
||||
let mut analyzers = HashMap::new();
|
||||
analyzers.insert(0, (0, true, LifetimeAnalyzer::new()));
|
||||
let mut worklist = vec![0];
|
||||
while let Some(bb) = worklist.pop() {
|
||||
let (counter, updated, analyzer) = analyzers.get_mut(&bb).unwrap();
|
||||
*counter += 1;
|
||||
if *counter > 100 {
|
||||
return Err(format!("infinite loop detected at basic block {}", bb));
|
||||
}
|
||||
*updated = false;
|
||||
let mut analyzer = analyzer.clone();
|
||||
let block = &self.basic_blocks[bb];
|
||||
let ir_iter = block.iter().filter_map(|&id| {
|
||||
self.irs[id].as_ref().map(|(ir, loc)| (LifetimeId(id), ir, *loc))
|
||||
});
|
||||
if let Some(branch) = analyzer.analyze_basic_block(ir_iter)? {
|
||||
for &target in branch.iter() {
|
||||
if let Some((_, updated, successor)) = analyzers.get_mut(&target.0) {
|
||||
if successor.merge(&analyzer) && !*updated {
|
||||
// changed
|
||||
worklist.push(target.0);
|
||||
*updated = true;
|
||||
}
|
||||
} else {
|
||||
analyzers.insert(target.0, (0, true, analyzer.clone()));
|
||||
worklist.push(target.0);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
struct LifetimeStore {
|
||||
kind: LifetimeKind,
|
||||
fields: HashMap<StrRef, LifetimeId>,
|
||||
lifetimes: HashSet<LifetimeId>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct LifetimeAnalyzer<'a> {
|
||||
lifetime_to_id: HashMap<LifetimeId, usize>,
|
||||
lifetime_stores: Slab<Cow<'a, LifetimeStore>>,
|
||||
variable_assignment: HashMap<StrRef, LifetimeId>,
|
||||
}
|
||||
|
||||
impl<'a> LifetimeAnalyzer<'a> {
|
||||
pub fn new() -> Self {
|
||||
let mut zelf = LifetimeAnalyzer {
|
||||
lifetime_to_id: HashMap::new(),
|
||||
lifetime_stores: Default::default(),
|
||||
variable_assignment: HashMap::new(),
|
||||
};
|
||||
zelf.add_lifetime(LifetimeId(0), LifetimeKind::Unknown);
|
||||
zelf
|
||||
}
|
||||
|
||||
pub fn merge(&mut self, other: &LifetimeAnalyzer) -> bool {
|
||||
let mut to_be_merged = other.lifetime_to_id.keys().cloned().collect::<Vec<_>>();
|
||||
let mut updated = false;
|
||||
|
||||
let mut lifetime_merge_list = vec![];
|
||||
for (&var_name, &lifetime) in other.variable_assignment.iter() {
|
||||
if let Some(&our_lifetime) = self.variable_assignment.get(&var_name) {
|
||||
if our_lifetime != lifetime {
|
||||
lifetime_merge_list.push((our_lifetime, lifetime));
|
||||
}
|
||||
} else {
|
||||
self.variable_assignment.insert(var_name, lifetime);
|
||||
updated = true;
|
||||
}
|
||||
}
|
||||
|
||||
while let Some(lifetime) = to_be_merged.pop() {
|
||||
let other_store_id = *other.lifetime_to_id.get(&lifetime).unwrap();
|
||||
if let Some(&self_store_id) = self.lifetime_to_id.get(&lifetime) {
|
||||
let self_store = self.lifetime_stores.get_mut(self_store_id).unwrap();
|
||||
let other_store = other.lifetime_stores.get(other_store_id).unwrap();
|
||||
let self_store = self_store.to_mut();
|
||||
// merge them
|
||||
for (&field, &other_lifetime) in other_store.fields.iter() {
|
||||
if let Some(&self_lifetime) = self_store.fields.get(&field) {
|
||||
if self_lifetime != other_lifetime {
|
||||
lifetime_merge_list.push((self_lifetime, other_lifetime));
|
||||
}
|
||||
} else {
|
||||
self_store.fields.insert(field, other_lifetime);
|
||||
updated = true;
|
||||
}
|
||||
}
|
||||
let zelf_lifetimes = &mut self_store.lifetimes;
|
||||
for &other_lifetime in other_store.lifetimes.iter() {
|
||||
if zelf_lifetimes.insert(other_lifetime) {
|
||||
lifetime_merge_list.push((lifetime, other_lifetime));
|
||||
}
|
||||
}
|
||||
let result_kind = self_store.kind & other_store.kind;
|
||||
if self_store.kind != result_kind {
|
||||
self_store.kind = result_kind;
|
||||
}
|
||||
} else {
|
||||
let store = other.lifetime_stores.get(other_store_id).unwrap().as_ref().clone();
|
||||
let store = self.lifetime_stores.insert(Cow::Owned(store));
|
||||
self.lifetime_to_id.insert(lifetime, store);
|
||||
updated = true;
|
||||
}
|
||||
}
|
||||
|
||||
for (a, b) in lifetime_merge_list.into_iter() {
|
||||
self.unify(a, b);
|
||||
}
|
||||
|
||||
updated
|
||||
}
|
||||
|
||||
pub fn add_lifetime(&mut self, lifetime: LifetimeId, kind: LifetimeKind) {
|
||||
let id = self.lifetime_stores.insert(Cow::Owned(LifetimeStore {
|
||||
kind,
|
||||
fields: HashMap::new(),
|
||||
lifetimes: [lifetime].iter().cloned().collect(),
|
||||
}));
|
||||
let old_store_id = self.lifetime_to_id.insert(lifetime, id);
|
||||
if let Some(old_store_id) = old_store_id {
|
||||
let old_lifetime_store = self.lifetime_stores.get_mut(old_store_id).unwrap().to_mut();
|
||||
old_lifetime_store.lifetimes.remove(&lifetime);
|
||||
if old_lifetime_store.lifetimes.is_empty() {
|
||||
self.lifetime_stores.remove(old_store_id);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_lifetime(&mut self, lifetime: LifetimeId, to: LifetimeId) {
|
||||
let id = *self.lifetime_to_id.get(&to).unwrap();
|
||||
let store = self.lifetime_stores.get_mut(id).unwrap();
|
||||
store.to_mut().lifetimes.insert(lifetime);
|
||||
let old_store_id = self.lifetime_to_id.insert(lifetime, id);
|
||||
if let Some(old_store_id) = old_store_id {
|
||||
let old_lifetime_store = self.lifetime_stores.get_mut(old_store_id).unwrap().to_mut();
|
||||
old_lifetime_store.lifetimes.remove(&lifetime);
|
||||
if old_lifetime_store.lifetimes.is_empty() {
|
||||
self.lifetime_stores.remove(old_store_id);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn unify(&mut self, lhs: LifetimeId, rhs: LifetimeId) {
|
||||
use LifetimeKind::{ImpreciseLocal, PreciseLocal};
|
||||
let lhs_id = *self.lifetime_to_id.get(&lhs).unwrap();
|
||||
let rhs_id = *self.lifetime_to_id.get(&rhs).unwrap();
|
||||
if lhs_id == rhs_id {
|
||||
return;
|
||||
}
|
||||
let lhs_store = self.lifetime_stores.get(lhs_id).unwrap();
|
||||
let rhs_store = self.lifetime_stores.get(rhs_id).unwrap();
|
||||
let all_lifetimes: HashSet<_> =
|
||||
lhs_store.lifetimes.union(&rhs_store.lifetimes).cloned().collect();
|
||||
let result_kind = lhs_store.kind & rhs_store.kind;
|
||||
let fields = if matches!(result_kind, PreciseLocal | ImpreciseLocal) {
|
||||
let mut need_union = vec![];
|
||||
let mut fields = lhs_store.fields.clone();
|
||||
for (k, v) in rhs_store.fields.iter() {
|
||||
if let Some(old) = fields.insert(*k, *v) {
|
||||
need_union.push((old, *v));
|
||||
}
|
||||
}
|
||||
drop(lhs_store);
|
||||
drop(rhs_store);
|
||||
for (lhs, rhs) in need_union {
|
||||
self.unify(lhs, rhs);
|
||||
}
|
||||
fields
|
||||
} else {
|
||||
Default::default()
|
||||
};
|
||||
// unify them, slow
|
||||
for lifetime in all_lifetimes.iter() {
|
||||
self.lifetime_to_id.insert(*lifetime, lhs_id);
|
||||
}
|
||||
*self.lifetime_stores.get_mut(lhs_id).unwrap() =
|
||||
Cow::Owned(LifetimeStore { kind: result_kind, fields, lifetimes: all_lifetimes });
|
||||
self.lifetime_stores.remove(rhs_id);
|
||||
}
|
||||
|
||||
fn get_field_lifetime(&mut self, obj: LifetimeId, field: StrRef) -> LifetimeId {
|
||||
use LifetimeKind::*;
|
||||
let id = *self.lifetime_to_id.get(&obj).unwrap();
|
||||
let store = self.lifetime_stores.get(id).unwrap();
|
||||
if matches!(store.kind, PreciseLocal | ImpreciseLocal) {
|
||||
if let Some(&lifetime) = store.fields.get(&field) {
|
||||
let field_lifetime_kind = self.get_lifetime_kind(lifetime);
|
||||
if field_lifetime_kind == PreciseLocal
|
||||
&& (store.kind == ImpreciseLocal || field == "$elem".into())
|
||||
{
|
||||
let id = *self.lifetime_to_id.get(&lifetime).unwrap();
|
||||
self.lifetime_stores.get_mut(id).unwrap().to_mut().kind = ImpreciseLocal;
|
||||
}
|
||||
lifetime
|
||||
} else {
|
||||
LifetimeId(0)
|
||||
}
|
||||
} else {
|
||||
obj
|
||||
}
|
||||
}
|
||||
|
||||
fn set_field_lifetime(
|
||||
&mut self,
|
||||
obj: LifetimeId,
|
||||
field: StrRef,
|
||||
field_lifetime: LifetimeId,
|
||||
is_init: bool,
|
||||
) -> Result<(), String> {
|
||||
use LifetimeKind::*;
|
||||
let obj_id = *self.lifetime_to_id.get(&obj).unwrap();
|
||||
let field_id = *self.lifetime_to_id.get(&field_lifetime).unwrap();
|
||||
let field_lifetime_kind = self.lifetime_stores.get(field_id).unwrap().kind;
|
||||
let obj_store = self.lifetime_stores.get_mut(obj_id).unwrap();
|
||||
if !matches!(
|
||||
(obj_store.kind, field_lifetime_kind),
|
||||
(PreciseLocal, _) | (ImpreciseLocal, _) | (_, Static)
|
||||
) {
|
||||
return Err("field lifetime error".into());
|
||||
}
|
||||
match obj_store.kind {
|
||||
// $elem means list elements
|
||||
PreciseLocal if field != "$elem".into() => {
|
||||
// strong update
|
||||
obj_store.to_mut().fields.insert(field, field_lifetime);
|
||||
}
|
||||
PreciseLocal | ImpreciseLocal => {
|
||||
// weak update
|
||||
let old_lifetime = obj_store.to_mut().fields.get(&field).copied();
|
||||
if let Some(old_lifetime) = old_lifetime {
|
||||
self.unify(old_lifetime, field_lifetime);
|
||||
} else {
|
||||
obj_store.to_mut().fields.insert(field, field_lifetime);
|
||||
if !is_init {
|
||||
// unify with unknown lifetime
|
||||
self.unify(LifetimeId(0), field_lifetime);
|
||||
}
|
||||
if field == "$elem".into() {
|
||||
let field_lifetime_id = *self.lifetime_to_id.get(&field_lifetime).unwrap();
|
||||
let field_lifetime = self.lifetime_stores.get_mut(field_lifetime_id).unwrap();
|
||||
if field_lifetime.kind == PreciseLocal {
|
||||
field_lifetime.to_mut().kind = ImpreciseLocal;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn get_lifetime_kind(&self, lifetime: LifetimeId) -> LifetimeKind {
|
||||
self.lifetime_stores.get(*self.lifetime_to_id.get(&lifetime).unwrap()).unwrap().kind
|
||||
}
|
||||
|
||||
fn pass_function_params(&mut self, lifetimes: &[LifetimeId]) {
|
||||
use LifetimeKind::*;
|
||||
let mut visited = HashSet::new();
|
||||
let mut worklist = vec![];
|
||||
|
||||
fn add_fields_to_worklist(
|
||||
visited: &mut HashSet<LifetimeId>,
|
||||
worklist: &mut Vec<(LifetimeId, bool)>,
|
||||
fields: &HashMap<StrRef, LifetimeId>,
|
||||
) {
|
||||
for (&name, &field) in fields.iter() {
|
||||
if visited.insert(field) {
|
||||
// not visited previously
|
||||
let name = name.to_string();
|
||||
let mutable = !(name.starts_with("$elem") && name.len() != "$elem".len());
|
||||
worklist.push((field, mutable));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for lifetime in lifetimes.iter() {
|
||||
let lifetime =
|
||||
self.lifetime_stores.get_mut(*self.lifetime_to_id.get(lifetime).unwrap()).unwrap();
|
||||
add_fields_to_worklist(&mut visited, &mut worklist, &lifetime.fields);
|
||||
}
|
||||
while let Some((item, mutable)) = worklist.pop() {
|
||||
let lifetime =
|
||||
self.lifetime_stores.get_mut(*self.lifetime_to_id.get(&item).unwrap()).unwrap();
|
||||
if matches!(lifetime.kind, Unknown | Static) {
|
||||
continue;
|
||||
}
|
||||
add_fields_to_worklist(&mut visited, &mut worklist, &lifetime.fields);
|
||||
if mutable {
|
||||
// we may assign values with static lifetime to function params
|
||||
lifetime.to_mut().kind = lifetime.kind & Static;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn analyze_basic_block<'b, I: Iterator<Item = (LifetimeId, &'b LifetimeIR, Location)>>(
|
||||
&mut self,
|
||||
instructions: I,
|
||||
) -> Result<Option<&'b [BasicBlockId]>, String> {
|
||||
use LifetimeIR::*;
|
||||
for (id, inst, loc) in instructions {
|
||||
match inst {
|
||||
VarAssign { var, lifetime } => {
|
||||
self.variable_assignment.insert(*var, *lifetime);
|
||||
}
|
||||
VarAccess { var } => {
|
||||
let lifetime = self.variable_assignment.get(var).cloned();
|
||||
if let Some(lifetime) = lifetime {
|
||||
self.set_lifetime(id, lifetime);
|
||||
} else {
|
||||
// should be static lifetime
|
||||
self.add_lifetime(id, LifetimeKind::Static)
|
||||
}
|
||||
}
|
||||
FieldAssign { obj, field, new, is_init } => {
|
||||
self.set_field_lifetime(*obj, *field, *new, *is_init)
|
||||
.map_err(|e| format!("{} in {}", e, loc))?;
|
||||
}
|
||||
FieldAccess { obj, field } => {
|
||||
let lifetime = self.get_field_lifetime(*obj, *field);
|
||||
self.set_lifetime(id, lifetime);
|
||||
}
|
||||
CreateLifetime { kind } => {
|
||||
if *kind == LifetimeKind::Unknown {
|
||||
self.set_lifetime(id, LifetimeId(0));
|
||||
} else {
|
||||
self.add_lifetime(id, *kind);
|
||||
}
|
||||
}
|
||||
PassedToFunc { param_lifetimes } => {
|
||||
self.pass_function_params(param_lifetimes);
|
||||
}
|
||||
UnifyLifetimes { lifetimes } => {
|
||||
assert!(!lifetimes.is_empty());
|
||||
let lhs = lifetimes[0];
|
||||
for rhs in lifetimes[1..].iter() {
|
||||
self.unify(lhs, *rhs);
|
||||
}
|
||||
self.set_lifetime(id, lhs);
|
||||
}
|
||||
Return { val } => {
|
||||
if let Some(val) = val {
|
||||
let kind = self.get_lifetime_kind(*val);
|
||||
if !matches!(kind, LifetimeKind::Static | LifetimeKind::NonLocal) {
|
||||
return Err(format!("return value lifetime error in {}", loc));
|
||||
}
|
||||
}
|
||||
return Ok(None);
|
||||
}
|
||||
Branch { targets } => return Ok(Some(targets)),
|
||||
}
|
||||
}
|
||||
Ok(None)
|
||||
}
|
||||
}
|
|
@ -0,0 +1,580 @@
|
|||
use std::sync::Arc;
|
||||
|
||||
use itertools::chain;
|
||||
use nac3parser::ast::{Comprehension, Constant, Expr, ExprKind, Location, Stmt, StmtKind, StrRef};
|
||||
|
||||
use lifetime::{BasicBlockId, LifetimeIR, LifetimeIRBuilder, LifetimeId, LifetimeKind};
|
||||
|
||||
use crate::{
|
||||
symbol_resolver::SymbolResolver,
|
||||
toplevel::{TopLevelContext, TopLevelDef},
|
||||
};
|
||||
|
||||
use super::{
|
||||
type_inferencer::PrimitiveStore,
|
||||
typedef::{Type, TypeEnum, Unifier},
|
||||
};
|
||||
|
||||
#[cfg(test)]
|
||||
mod test;
|
||||
|
||||
mod lifetime;
|
||||
|
||||
pub struct EscapeAnalyzer<'a> {
|
||||
builder: LifetimeIRBuilder,
|
||||
loop_head: Option<BasicBlockId>,
|
||||
loop_tail: Option<BasicBlockId>,
|
||||
unifier: &'a mut Unifier,
|
||||
primitive_store: &'a PrimitiveStore,
|
||||
resolver: Arc<dyn SymbolResolver + Send + Sync>,
|
||||
top_level: &'a TopLevelContext,
|
||||
}
|
||||
|
||||
impl<'a> EscapeAnalyzer<'a> {
|
||||
pub fn new(
|
||||
unifier: &'a mut Unifier,
|
||||
primitive_store: &'a PrimitiveStore,
|
||||
resolver: Arc<dyn SymbolResolver + Send + Sync>,
|
||||
top_level: &'a TopLevelContext,
|
||||
) -> Self {
|
||||
Self {
|
||||
builder: LifetimeIRBuilder::new(),
|
||||
loop_head: None,
|
||||
loop_tail: None,
|
||||
primitive_store,
|
||||
unifier,
|
||||
resolver,
|
||||
top_level,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn check_function_lifetime(
|
||||
unifier: &'a mut Unifier,
|
||||
primitive_store: &'a PrimitiveStore,
|
||||
resolver: Arc<dyn SymbolResolver + Send + Sync>,
|
||||
top_level: &'a TopLevelContext,
|
||||
args: &[(StrRef, Type)],
|
||||
body: &[Stmt<Option<Type>>],
|
||||
loc: Location,
|
||||
) -> Result<(), String> {
|
||||
use LifetimeIR::{CreateLifetime, VarAssign};
|
||||
let mut zelf = Self::new(unifier, primitive_store, resolver, top_level);
|
||||
let nonlocal_lifetime =
|
||||
zelf.builder.append_ir(CreateLifetime { kind: LifetimeKind::NonLocal }, loc);
|
||||
for (name, ty) in args.iter().copied() {
|
||||
if zelf.need_alloca(ty) {
|
||||
zelf.builder.append_ir(VarAssign { var: name, lifetime: nonlocal_lifetime }, loc);
|
||||
}
|
||||
}
|
||||
zelf.handle_statements(body)?;
|
||||
zelf.builder.remove_empty_bb();
|
||||
zelf.builder.analyze().map_err(|e| {
|
||||
format!("{}\nIR: {}", e, zelf.builder.print_ir())
|
||||
})
|
||||
}
|
||||
|
||||
fn need_alloca(&mut self, ty: Type) -> bool {
|
||||
!(self.unifier.unioned(ty, self.primitive_store.int32)
|
||||
|| self.unifier.unioned(ty, self.primitive_store.int64)
|
||||
|| self.unifier.unioned(ty, self.primitive_store.uint32)
|
||||
|| self.unifier.unioned(ty, self.primitive_store.uint64)
|
||||
|| self.unifier.unioned(ty, self.primitive_store.float)
|
||||
|| self.unifier.unioned(ty, self.primitive_store.bool)
|
||||
|| self.unifier.unioned(ty, self.primitive_store.none)
|
||||
|| self.unifier.unioned(ty, self.primitive_store.range))
|
||||
}
|
||||
|
||||
fn is_terminated(&self) -> bool {
|
||||
self.builder.is_terminated(self.builder.get_current_block())
|
||||
}
|
||||
|
||||
fn handle_unknown_function_call<P: std::borrow::Borrow<Expr<Option<Type>>>>(
|
||||
&mut self,
|
||||
params: &[P],
|
||||
ret_need_alloca: bool,
|
||||
loc: Location,
|
||||
) -> Result<Option<LifetimeId>, String> {
|
||||
let param_lifetimes = params
|
||||
.iter()
|
||||
.filter_map(|p| self.handle_expr(p.borrow()).transpose())
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
self.builder.append_ir(LifetimeIR::PassedToFunc { param_lifetimes }, loc);
|
||||
if ret_need_alloca {
|
||||
Ok(Some(
|
||||
self.builder
|
||||
.append_ir(LifetimeIR::CreateLifetime { kind: LifetimeKind::Unknown }, loc),
|
||||
))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_expr(&mut self, expr: &Expr<Option<Type>>) -> Result<Option<LifetimeId>, String> {
|
||||
use LifetimeIR::*;
|
||||
use LifetimeKind::*;
|
||||
let need_alloca = self.need_alloca(expr.custom.unwrap());
|
||||
let loc = expr.location;
|
||||
Ok(match &expr.node {
|
||||
ExprKind::Name { id, .. } => {
|
||||
if need_alloca {
|
||||
Some(self.builder.append_ir(VarAccess { var: *id }, loc))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
ExprKind::Attribute { value, attr, .. } => {
|
||||
if need_alloca {
|
||||
let val = self.handle_expr(value)?.unwrap();
|
||||
Some(self.builder.append_ir(FieldAccess { obj: val, field: *attr }, loc))
|
||||
} else {
|
||||
self.handle_expr(value)?;
|
||||
None
|
||||
}
|
||||
}
|
||||
ExprKind::Constant { .. } => {
|
||||
if need_alloca {
|
||||
Some(self.builder.append_ir(CreateLifetime { kind: Static }, loc))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
ExprKind::List { elts, .. } => {
|
||||
let elems =
|
||||
elts.iter().map(|e| self.handle_expr(e)).collect::<Result<Vec<_>, _>>()?;
|
||||
let list_lifetime =
|
||||
self.builder.append_ir(CreateLifetime { kind: PreciseLocal }, loc);
|
||||
if !elems.is_empty() {
|
||||
if elems[0].is_some() {
|
||||
let elems = elems.into_iter().map(|e| e.unwrap()).collect::<Vec<_>>();
|
||||
let elem_lifetime =
|
||||
self.builder.append_ir(UnifyLifetimes { lifetimes: elems }, loc);
|
||||
self.builder.append_ir(
|
||||
FieldAssign {
|
||||
obj: list_lifetime,
|
||||
field: "$elem".into(),
|
||||
new: elem_lifetime,
|
||||
is_init: true,
|
||||
},
|
||||
loc,
|
||||
);
|
||||
}
|
||||
} else {
|
||||
let elem_lifetime =
|
||||
self.builder.append_ir(CreateLifetime { kind: PreciseLocal }, loc);
|
||||
self.builder.append_ir(
|
||||
FieldAssign {
|
||||
obj: list_lifetime,
|
||||
field: "$elem".into(),
|
||||
new: elem_lifetime,
|
||||
is_init: true,
|
||||
},
|
||||
loc,
|
||||
);
|
||||
}
|
||||
Some(list_lifetime)
|
||||
}
|
||||
ExprKind::Tuple { elts, .. } => {
|
||||
let elems =
|
||||
elts.iter().map(|e| self.handle_expr(e)).collect::<Result<Vec<_>, _>>()?;
|
||||
let tuple_lifetime =
|
||||
self.builder.append_ir(CreateLifetime { kind: PreciseLocal }, loc);
|
||||
for (i, lifetime) in elems.into_iter().enumerate() {
|
||||
if let Some(lifetime) = lifetime {
|
||||
self.builder.append_ir(
|
||||
FieldAssign {
|
||||
obj: tuple_lifetime,
|
||||
field: format!("$elem{}", i).into(),
|
||||
new: lifetime,
|
||||
is_init: true,
|
||||
},
|
||||
loc,
|
||||
);
|
||||
}
|
||||
}
|
||||
Some(tuple_lifetime)
|
||||
}
|
||||
ExprKind::Subscript { value, slice, .. } => {
|
||||
let value_lifetime = self.handle_expr(value)?.unwrap();
|
||||
match &slice.node {
|
||||
ExprKind::Slice { lower, upper, step } => {
|
||||
for expr in [lower, upper, step].iter().filter_map(|x| x.as_ref()) {
|
||||
self.handle_expr(expr)?;
|
||||
}
|
||||
let slice_lifetime =
|
||||
self.builder.append_ir(CreateLifetime { kind: PreciseLocal }, loc);
|
||||
let slice_elem = self.builder.append_ir(
|
||||
FieldAccess { obj: value_lifetime, field: "$elem".into() },
|
||||
loc,
|
||||
);
|
||||
self.builder.append_ir(
|
||||
FieldAssign {
|
||||
obj: slice_lifetime,
|
||||
field: "$elem".into(),
|
||||
new: slice_elem,
|
||||
is_init: true
|
||||
},
|
||||
loc,
|
||||
);
|
||||
Some(slice_lifetime)
|
||||
}
|
||||
ExprKind::Constant { value: Constant::Int(v), .. }
|
||||
if matches!(
|
||||
&*self.unifier.get_ty(value.custom.unwrap()),
|
||||
TypeEnum::TTuple { .. }
|
||||
) =>
|
||||
{
|
||||
Some(self.builder.append_ir(
|
||||
FieldAccess {
|
||||
obj: value_lifetime,
|
||||
field: format!("$elem{}", v).into(),
|
||||
},
|
||||
loc,
|
||||
))
|
||||
}
|
||||
_ => {
|
||||
self.handle_expr(slice)?;
|
||||
if need_alloca {
|
||||
Some(self.builder.append_ir(
|
||||
FieldAccess { obj: value_lifetime, field: "$elem".into() },
|
||||
loc,
|
||||
))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
ExprKind::Call { func, args, keywords } => {
|
||||
let mut lifetimes = vec![];
|
||||
for arg in chain!(args.iter(), keywords.iter().map(|k| k.node.value.as_ref())) {
|
||||
if let Some(lifetime) = self.handle_expr(arg)? {
|
||||
lifetimes.push(lifetime);
|
||||
}
|
||||
}
|
||||
match &func.node {
|
||||
ExprKind::Name { id, .. } => {
|
||||
if !lifetimes.is_empty() {
|
||||
self.builder.append_ir(PassedToFunc { param_lifetimes: lifetimes }, loc);
|
||||
}
|
||||
if need_alloca {
|
||||
let id = self
|
||||
.resolver
|
||||
.get_identifier_def(*id)
|
||||
.map_err(|e| format!("{} (at {})", e, func.location))?;
|
||||
if let TopLevelDef::Class { .. } =
|
||||
&*self.top_level.definitions.read()[id.0].read()
|
||||
{
|
||||
Some(
|
||||
self.builder
|
||||
.append_ir(CreateLifetime { kind: PreciseLocal }, loc),
|
||||
)
|
||||
} else {
|
||||
Some(self.builder.append_ir(CreateLifetime { kind: Unknown }, loc))
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
ExprKind::Attribute { value, .. } => {
|
||||
let obj_lifetime = self.handle_expr(value)?.unwrap();
|
||||
lifetimes.push(obj_lifetime);
|
||||
self.builder.append_ir(PassedToFunc { param_lifetimes: lifetimes }, loc);
|
||||
if need_alloca {
|
||||
Some(self.builder.append_ir(CreateLifetime { kind: Unknown }, loc))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
_ => unimplemented!(),
|
||||
}
|
||||
}
|
||||
ExprKind::BinOp { left, right, .. } => self.handle_unknown_function_call(
|
||||
&[left.as_ref(), right.as_ref()],
|
||||
need_alloca,
|
||||
loc,
|
||||
)?,
|
||||
ExprKind::BoolOp { values, .. } => {
|
||||
self.handle_unknown_function_call(&values, need_alloca, loc)?
|
||||
}
|
||||
ExprKind::UnaryOp { operand, .. } => {
|
||||
self.handle_unknown_function_call(&[operand.as_ref()], need_alloca, loc)?
|
||||
}
|
||||
ExprKind::Compare { left, comparators, .. } => {
|
||||
self.handle_unknown_function_call(&[left.as_ref()], false, loc)?;
|
||||
self.handle_unknown_function_call(&comparators, need_alloca, loc)?
|
||||
}
|
||||
ExprKind::IfExp { test, body, orelse } => {
|
||||
self.handle_expr(test)?;
|
||||
let body_bb = self.builder.append_block();
|
||||
let else_bb = self.builder.append_block();
|
||||
let tail_bb = self.builder.append_block();
|
||||
self.builder.append_ir(Branch { targets: vec![body_bb, else_bb] }, test.location);
|
||||
self.builder.position_at_end(body_bb);
|
||||
let body_lifetime = self.handle_expr(body)?;
|
||||
self.builder.append_ir(Branch { targets: vec![tail_bb] }, body.location);
|
||||
self.builder.position_at_end(else_bb);
|
||||
let else_lifetime = self.handle_expr(body)?;
|
||||
self.builder.append_ir(Branch { targets: vec![tail_bb] }, orelse.location);
|
||||
self.builder.position_at_end(tail_bb);
|
||||
if let (Some(body_lifetime), Some(else_lifetime)) = (body_lifetime, else_lifetime) {
|
||||
Some(self.builder.append_ir(
|
||||
UnifyLifetimes { lifetimes: vec![body_lifetime, else_lifetime] },
|
||||
loc,
|
||||
))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
ExprKind::ListComp { elt, generators } => {
|
||||
let Comprehension { target, iter, ifs, .. } = &generators[0];
|
||||
let list_lifetime =
|
||||
self.builder.append_ir(CreateLifetime { kind: PreciseLocal }, loc);
|
||||
let iter_elem_lifetime = self.handle_expr(iter)?.map(|obj| {
|
||||
self.builder
|
||||
.append_ir(FieldAccess { obj, field: "$elem".into() }, iter.location)
|
||||
});
|
||||
let loop_body = self.builder.append_block();
|
||||
let loop_tail = self.builder.append_block();
|
||||
self.builder.append_ir(Branch { targets: vec![loop_body] }, loc);
|
||||
self.builder.position_at_end(loop_body);
|
||||
self.handle_assignment(target, iter_elem_lifetime)?;
|
||||
for ifexpr in ifs.iter() {
|
||||
self.handle_expr(ifexpr)?;
|
||||
}
|
||||
let elem_lifetime = self.handle_expr(elt)?;
|
||||
if let Some(elem_lifetime) = elem_lifetime {
|
||||
self.builder.append_ir(
|
||||
FieldAssign {
|
||||
obj: list_lifetime,
|
||||
field: "$elem".into(),
|
||||
new: elem_lifetime,
|
||||
is_init: true
|
||||
},
|
||||
elt.location,
|
||||
);
|
||||
}
|
||||
self.builder.append_ir(Branch { targets: vec![loop_body, loop_tail] }, loc);
|
||||
self.builder.position_at_end(loop_tail);
|
||||
Some(list_lifetime)
|
||||
}
|
||||
_ => unimplemented!(),
|
||||
})
|
||||
}
|
||||
|
||||
fn handle_assignment(
|
||||
&mut self,
|
||||
lhs: &Expr<Option<Type>>,
|
||||
rhs_lifetime: Option<LifetimeId>,
|
||||
) -> Result<(), String> {
|
||||
use LifetimeIR::*;
|
||||
match &lhs.node {
|
||||
ExprKind::Attribute { value, attr, .. } => {
|
||||
let value_lifetime = self.handle_expr(value)?.unwrap();
|
||||
if let Some(field_lifetime) = rhs_lifetime {
|
||||
self.builder.append_ir(
|
||||
FieldAssign { obj: value_lifetime, field: *attr, new: field_lifetime, is_init: false },
|
||||
lhs.location,
|
||||
);
|
||||
}
|
||||
}
|
||||
ExprKind::Subscript { value, slice, .. } => {
|
||||
let value_lifetime = self.handle_expr(value)?.unwrap();
|
||||
let elem_lifetime = if let ExprKind::Slice { lower, upper, step } = &slice.node {
|
||||
for expr in [lower, upper, step].iter().filter_map(|x| x.as_ref()) {
|
||||
self.handle_expr(expr)?;
|
||||
}
|
||||
if let Some(rhs_lifetime) = rhs_lifetime {
|
||||
// must be a list
|
||||
Some(self.builder.append_ir(
|
||||
FieldAccess { obj: rhs_lifetime, field: "$elem".into() },
|
||||
lhs.location,
|
||||
))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
self.handle_expr(slice)?;
|
||||
rhs_lifetime
|
||||
};
|
||||
// must be a list
|
||||
if let Some(elem_lifetime) = elem_lifetime {
|
||||
self.builder.append_ir(
|
||||
FieldAssign {
|
||||
obj: value_lifetime,
|
||||
field: "$elem".into(),
|
||||
new: elem_lifetime,
|
||||
is_init: false
|
||||
},
|
||||
lhs.location,
|
||||
);
|
||||
}
|
||||
}
|
||||
ExprKind::Name { id, .. } => {
|
||||
if let Some(lifetime) = rhs_lifetime {
|
||||
self.builder.append_ir(VarAssign { var: *id, lifetime }, lhs.location);
|
||||
}
|
||||
}
|
||||
ExprKind::Tuple { elts, .. } => {
|
||||
let rhs_lifetime = rhs_lifetime.unwrap();
|
||||
for (i, e) in elts.iter().enumerate() {
|
||||
let elem_lifetime = self.builder.append_ir(
|
||||
FieldAccess { obj: rhs_lifetime, field: format!("$elem{}", i).into() },
|
||||
e.location,
|
||||
);
|
||||
self.handle_assignment(e, Some(elem_lifetime))?;
|
||||
}
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn handle_statement(&mut self, stmt: &Stmt<Option<Type>>) -> Result<(), String> {
|
||||
use LifetimeIR::*;
|
||||
match &stmt.node {
|
||||
StmtKind::Expr { value, .. } => {
|
||||
self.handle_expr(value)?;
|
||||
}
|
||||
StmtKind::Assign { targets, value, .. } => {
|
||||
let rhs_lifetime = self.handle_expr(value)?;
|
||||
for target in targets {
|
||||
self.handle_assignment(target, rhs_lifetime)?;
|
||||
}
|
||||
}
|
||||
StmtKind::If { test, body, orelse, .. } => {
|
||||
// test should return bool
|
||||
self.handle_expr(test)?;
|
||||
let body_bb = self.builder.append_block();
|
||||
let else_bb = self.builder.append_block();
|
||||
self.builder.append_ir(Branch { targets: vec![body_bb, else_bb] }, stmt.location);
|
||||
self.builder.position_at_end(body_bb);
|
||||
self.handle_statements(&body)?;
|
||||
let body_terminated = self.is_terminated();
|
||||
if orelse.is_empty() {
|
||||
if !body_terminated {
|
||||
// else_bb is the basic block after this if statement
|
||||
self.builder.append_ir(Branch { targets: vec![else_bb] }, stmt.location);
|
||||
self.builder.position_at_end(else_bb);
|
||||
}
|
||||
} else {
|
||||
let tail_bb = self.builder.append_block();
|
||||
if !body_terminated {
|
||||
self.builder.append_ir(Branch { targets: vec![tail_bb] }, stmt.location);
|
||||
}
|
||||
self.builder.position_at_end(else_bb);
|
||||
self.handle_statements(&orelse)?;
|
||||
if !self.is_terminated() {
|
||||
self.builder.append_ir(Branch { targets: vec![tail_bb] }, stmt.location);
|
||||
}
|
||||
self.builder.position_at_end(tail_bb);
|
||||
}
|
||||
}
|
||||
StmtKind::While { test, body, orelse, .. } => {
|
||||
let old_loop_head = self.loop_head;
|
||||
let old_loop_tail = self.loop_tail;
|
||||
let loop_head = self.builder.append_block();
|
||||
let loop_body = self.builder.append_block();
|
||||
let loop_else =
|
||||
if orelse.is_empty() { None } else { Some(self.builder.append_block()) };
|
||||
let loop_tail = self.builder.append_block();
|
||||
self.loop_head = Some(loop_head);
|
||||
self.loop_tail = Some(loop_tail);
|
||||
self.builder.append_ir(Branch { targets: vec![loop_head] }, stmt.location);
|
||||
self.builder.position_at_end(loop_head);
|
||||
self.handle_expr(test)?;
|
||||
self.builder.append_ir(
|
||||
Branch { targets: vec![loop_body, loop_else.unwrap_or(loop_tail)] },
|
||||
stmt.location,
|
||||
);
|
||||
self.builder.position_at_end(loop_body);
|
||||
self.handle_statements(&body)?;
|
||||
if !self.is_terminated() {
|
||||
self.builder.append_ir(Branch { targets: vec![loop_head] }, stmt.location);
|
||||
}
|
||||
|
||||
self.loop_head = old_loop_head;
|
||||
self.loop_tail = old_loop_tail;
|
||||
if let Some(loop_else) = loop_else {
|
||||
self.builder.position_at_end(loop_else);
|
||||
self.handle_statements(&orelse)?;
|
||||
if !self.is_terminated() {
|
||||
self.builder.append_ir(Branch { targets: vec![loop_tail] }, stmt.location);
|
||||
}
|
||||
}
|
||||
self.builder.position_at_end(loop_tail);
|
||||
}
|
||||
StmtKind::For { target, iter, body, orelse, .. } => {
|
||||
let old_loop_head = self.loop_head;
|
||||
let old_loop_tail = self.loop_tail;
|
||||
let loop_head = self.builder.append_block();
|
||||
let loop_body = self.builder.append_block();
|
||||
let loop_else =
|
||||
if orelse.is_empty() { None } else { Some(self.builder.append_block()) };
|
||||
let loop_tail = self.builder.append_block();
|
||||
self.loop_head = Some(loop_head);
|
||||
self.loop_tail = Some(loop_tail);
|
||||
let iter_lifetime = self.handle_expr(iter)?.map(|obj| {
|
||||
self.builder
|
||||
.append_ir(FieldAccess { obj, field: "$elem".into() }, iter.location)
|
||||
});
|
||||
self.builder.append_ir(Branch { targets: vec![loop_head] }, stmt.location);
|
||||
self.builder.position_at_end(loop_head);
|
||||
if let Some(iter_lifetime) = iter_lifetime {
|
||||
self.handle_assignment(target, Some(iter_lifetime))?;
|
||||
}
|
||||
self.builder.append_ir(
|
||||
Branch { targets: vec![loop_body, loop_else.unwrap_or(loop_tail)] },
|
||||
stmt.location,
|
||||
);
|
||||
self.builder.position_at_end(loop_body);
|
||||
self.handle_statements(&body)?;
|
||||
if !self.is_terminated() {
|
||||
self.builder.append_ir(Branch { targets: vec![loop_head] }, stmt.location);
|
||||
}
|
||||
|
||||
self.loop_head = old_loop_head;
|
||||
self.loop_tail = old_loop_tail;
|
||||
if let Some(loop_else) = loop_else {
|
||||
self.builder.position_at_end(loop_else);
|
||||
self.handle_statements(&orelse)?;
|
||||
if !self.is_terminated() {
|
||||
self.builder.append_ir(Branch { targets: vec![loop_tail] }, stmt.location);
|
||||
}
|
||||
}
|
||||
self.builder.position_at_end(loop_tail);
|
||||
}
|
||||
|
||||
StmtKind::Continue { .. } => {
|
||||
if let Some(loop_head) = self.loop_head {
|
||||
self.builder.append_ir(Branch { targets: vec![loop_head] }, stmt.location);
|
||||
} else {
|
||||
return Err(format!("break outside loop"));
|
||||
}
|
||||
}
|
||||
StmtKind::Break { .. } => {
|
||||
if let Some(loop_tail) = self.loop_tail {
|
||||
self.builder.append_ir(Branch { targets: vec![loop_tail] }, stmt.location);
|
||||
} else {
|
||||
return Err(format!("break outside loop"));
|
||||
}
|
||||
}
|
||||
StmtKind::Return { value, .. } => {
|
||||
let val = if let Some(value) = value { self.handle_expr(value)? } else { None };
|
||||
self.builder.append_ir(Return { val }, stmt.location);
|
||||
}
|
||||
StmtKind::Pass { .. } => {}
|
||||
_ => unimplemented!("{:?}", stmt.node),
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn handle_statements(&mut self, stmts: &[Stmt<Option<Type>>]) -> Result<(), String> {
|
||||
for stmt in stmts.iter() {
|
||||
if self.builder.is_terminated(self.builder.get_current_block()) {
|
||||
break;
|
||||
}
|
||||
self.handle_statement(stmt)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
|
@ -0,0 +1,60 @@
|
|||
use super::EscapeAnalyzer;
|
||||
use crate::typecheck::{type_inferencer::test::TestEnvironment, typedef::TypeEnum};
|
||||
use indoc::indoc;
|
||||
use nac3parser::ast::fold::Fold;
|
||||
use std::collections::hash_set::HashSet;
|
||||
use test_case::test_case;
|
||||
|
||||
use nac3parser::parser::parse_program;
|
||||
|
||||
#[test_case(indoc! {"
|
||||
# a: list[list[int32]]
|
||||
b = [1]
|
||||
a[0] = b
|
||||
"}, Err("field lifetime error in unknown: line 3 column 2".into())
|
||||
; "assign global elem")]
|
||||
#[test_case(indoc! {"
|
||||
# a: list[list[int32]]
|
||||
b = [[], []]
|
||||
b[1] = a
|
||||
b[0][0] = [0]
|
||||
"}, Err("field lifetime error in unknown: line 4 column 5".into())
|
||||
; "global unify")]
|
||||
#[test_case(indoc! {"
|
||||
b = [1, 2, 3]
|
||||
c = [a]
|
||||
c[0][0] = b
|
||||
"}, Err("field lifetime error in unknown: line 3 column 5".into())
|
||||
; "global unify 2")]
|
||||
fn test_simple(source: &str, expected_result: Result<(), String>) {
|
||||
let mut env = TestEnvironment::basic_test_env();
|
||||
let mut defined_identifiers: HashSet<_> = env.identifier_mapping.keys().cloned().collect();
|
||||
defined_identifiers.insert("a".into());
|
||||
let mut inferencer = env.get_inferencer();
|
||||
inferencer.defined_identifiers = defined_identifiers.clone();
|
||||
|
||||
let list_int = inferencer.unifier.add_ty(TypeEnum::TList { ty: inferencer.primitives.int32 });
|
||||
let list_list_int = inferencer.unifier.add_ty(TypeEnum::TList { ty: list_int });
|
||||
|
||||
inferencer.variable_mapping.insert("a".into(), list_list_int);
|
||||
let statements = parse_program(source, Default::default()).unwrap();
|
||||
let statements = statements
|
||||
.into_iter()
|
||||
.map(|v| inferencer.fold_stmt(v))
|
||||
.collect::<Result<Vec<_>, _>>()
|
||||
.unwrap();
|
||||
|
||||
inferencer.check_block(&statements, &mut defined_identifiers).unwrap();
|
||||
|
||||
let mut lifetime_ctx = EscapeAnalyzer::new(
|
||||
&mut inferencer.unifier,
|
||||
&mut inferencer.primitives,
|
||||
inferencer.function_data.resolver.clone(),
|
||||
&inferencer.top_level,
|
||||
);
|
||||
lifetime_ctx.handle_statements(&statements).unwrap();
|
||||
lifetime_ctx.builder.remove_empty_bb();
|
||||
let result = lifetime_ctx.builder.analyze();
|
||||
assert_eq!(result, expected_result);
|
||||
|
||||
}
|
|
@ -83,7 +83,7 @@ where
|
|||
|
||||
pub fn impl_binop(
|
||||
unifier: &mut Unifier,
|
||||
_store: &PrimitiveStore,
|
||||
store: &PrimitiveStore,
|
||||
ty: Type,
|
||||
other_ty: &[Type],
|
||||
ret_ty: Type,
|
||||
|
@ -120,7 +120,7 @@ pub fn impl_binop(
|
|||
fields.insert(binop_assign_name(op).into(), {
|
||||
(
|
||||
unifier.add_ty(TypeEnum::TFunc(FunSignature {
|
||||
ret: ret_ty,
|
||||
ret: store.none,
|
||||
vars: function_vars.clone(),
|
||||
args: vec![FuncArg {
|
||||
ty: other_ty,
|
||||
|
|
|
@ -4,3 +4,4 @@ pub mod type_error;
|
|||
pub mod type_inferencer;
|
||||
pub mod typedef;
|
||||
mod unification_table;
|
||||
pub mod escape_analysis;
|
||||
|
|
|
@ -14,7 +14,7 @@ use nac3parser::ast::{
|
|||
};
|
||||
|
||||
#[cfg(test)]
|
||||
mod test;
|
||||
pub(crate) mod test;
|
||||
|
||||
#[derive(PartialEq, Eq, Hash, Copy, Clone, Debug)]
|
||||
pub struct CodeLocation {
|
||||
|
@ -423,7 +423,7 @@ impl<'a> fold::Fold<()> for Inferencer<'a> {
|
|||
(None, None) => {}
|
||||
},
|
||||
ast::StmtKind::AugAssign { target, op, value, .. } => {
|
||||
let res_ty = self.infer_bin_ops(stmt.location, target, op, value, true)?;
|
||||
let res_ty = self.infer_bin_ops(stmt.location, target, op, value)?;
|
||||
self.unify(res_ty, target.custom.unwrap(), &stmt.location)?;
|
||||
}
|
||||
ast::StmtKind::Assert { test, msg, .. } => {
|
||||
|
@ -505,7 +505,7 @@ impl<'a> fold::Fold<()> for Inferencer<'a> {
|
|||
}
|
||||
ast::ExprKind::BoolOp { values, .. } => Some(self.infer_bool_ops(values)?),
|
||||
ast::ExprKind::BinOp { left, op, right } => {
|
||||
Some(self.infer_bin_ops(expr.location, left, op, right, false)?)
|
||||
Some(self.infer_bin_ops(expr.location, left, op, right)?)
|
||||
}
|
||||
ast::ExprKind::UnaryOp { op, operand } => Some(self.infer_unary_ops(op, operand)?),
|
||||
ast::ExprKind::Compare { left, ops, comparators } => {
|
||||
|
@ -1028,24 +1028,8 @@ impl<'a> Inferencer<'a> {
|
|||
left: &ast::Expr<Option<Type>>,
|
||||
op: &ast::Operator,
|
||||
right: &ast::Expr<Option<Type>>,
|
||||
is_aug_assign: bool,
|
||||
) -> InferenceResult {
|
||||
let method = if let TypeEnum::TObj { fields, .. } =
|
||||
self.unifier.get_ty_immutable(left.custom.unwrap()).as_ref()
|
||||
{
|
||||
let (binop_name, binop_assign_name) = (
|
||||
binop_name(op).into(),
|
||||
binop_assign_name(op).into()
|
||||
);
|
||||
// if is aug_assign, try aug_assign operator first
|
||||
if is_aug_assign && fields.contains_key(&binop_assign_name) {
|
||||
binop_assign_name
|
||||
} else {
|
||||
binop_name
|
||||
}
|
||||
} else {
|
||||
binop_name(op).into()
|
||||
};
|
||||
let method = binop_name(op).into();
|
||||
self.build_method_call(
|
||||
location,
|
||||
method,
|
||||
|
|
|
@ -11,7 +11,7 @@ use nac3parser::parser::parse_program;
|
|||
use parking_lot::RwLock;
|
||||
use test_case::test_case;
|
||||
|
||||
struct Resolver {
|
||||
pub(crate) struct Resolver {
|
||||
id_to_type: HashMap<StrRef, Type>,
|
||||
id_to_def: HashMap<StrRef, DefinitionId>,
|
||||
class_names: HashMap<StrRef, Type>,
|
||||
|
@ -56,7 +56,7 @@ impl SymbolResolver for Resolver {
|
|||
}
|
||||
}
|
||||
|
||||
struct TestEnvironment {
|
||||
pub(crate) struct TestEnvironment {
|
||||
pub unifier: Unifier,
|
||||
pub function_data: FunctionData,
|
||||
pub primitives: PrimitiveStore,
|
||||
|
@ -192,7 +192,7 @@ impl TestEnvironment {
|
|||
}
|
||||
}
|
||||
|
||||
fn new() -> TestEnvironment {
|
||||
pub fn new() -> TestEnvironment {
|
||||
let mut unifier = Unifier::new();
|
||||
let mut identifier_mapping = HashMap::new();
|
||||
let mut top_level_defs: Vec<Arc<RwLock<TopLevelDef>>> = Vec::new();
|
||||
|
@ -447,7 +447,7 @@ impl TestEnvironment {
|
|||
}
|
||||
}
|
||||
|
||||
fn get_inferencer(&mut self) -> Inferencer {
|
||||
pub fn get_inferencer(&mut self) -> Inferencer {
|
||||
Inferencer {
|
||||
top_level: &self.top_level,
|
||||
function_data: &mut self.function_data,
|
||||
|
|
|
@ -16,7 +16,7 @@ use crate::toplevel::{DefinitionId, TopLevelContext, TopLevelDef};
|
|||
#[cfg(test)]
|
||||
mod test;
|
||||
|
||||
/// Handle for a type, implemented as a key in the unification table.
|
||||
/// Handle for a type, implementated as a key in the unification table.
|
||||
pub type Type = UnificationKey;
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
|
||||
|
@ -657,16 +657,12 @@ impl Unifier {
|
|||
return Err(TypeError::new(TypeErrorKind::IncompatibleTypes(a, b), None));
|
||||
}
|
||||
for (x, y) in ty1.iter().zip(ty2.iter()) {
|
||||
if self.unify_impl(*x, *y, false).is_err() {
|
||||
return Err(TypeError::new(TypeErrorKind::IncompatibleTypes(a, b), None));
|
||||
}
|
||||
self.unify_impl(*x, *y, false)?;
|
||||
}
|
||||
self.set_a_to_b(a, b);
|
||||
}
|
||||
(TList { ty: ty1 }, TList { ty: ty2 }) => {
|
||||
if self.unify_impl(*ty1, *ty2, false).is_err() {
|
||||
return Err(TypeError::new(TypeErrorKind::IncompatibleTypes(a, b), None));
|
||||
}
|
||||
self.unify_impl(*ty1, *ty2, false)?;
|
||||
self.set_a_to_b(a, b);
|
||||
}
|
||||
(TVar { fields: Some(map), range, .. }, TObj { fields, .. }) => {
|
||||
|
@ -747,16 +743,12 @@ impl Unifier {
|
|||
self.incompatible_types(a, b)?;
|
||||
}
|
||||
for (x, y) in zip(params1.values(), params2.values()) {
|
||||
if self.unify_impl(*x, *y, false).is_err() {
|
||||
return Err(TypeError::new(TypeErrorKind::IncompatibleTypes(a, b), None));
|
||||
};
|
||||
self.unify_impl(*x, *y, false)?;
|
||||
}
|
||||
self.set_a_to_b(a, b);
|
||||
}
|
||||
(TVirtual { ty: ty1 }, TVirtual { ty: ty2 }) => {
|
||||
if self.unify_impl(*ty1, *ty2, false).is_err() {
|
||||
return Err(TypeError::new(TypeErrorKind::IncompatibleTypes(a, b), None));
|
||||
};
|
||||
self.unify_impl(*ty1, *ty2, false)?;
|
||||
self.set_a_to_b(a, b);
|
||||
}
|
||||
(TCall(calls1), TCall(calls2)) => {
|
||||
|
@ -792,13 +784,9 @@ impl Unifier {
|
|||
if x.name != y.name || x.default_value != y.default_value {
|
||||
return Err(TypeError::new(TypeErrorKind::IncompatibleTypes(a, b), None));
|
||||
}
|
||||
if self.unify_impl(x.ty, y.ty, false).is_err() {
|
||||
return Err(TypeError::new(TypeErrorKind::IncompatibleTypes(a, b), None));
|
||||
};
|
||||
self.unify_impl(x.ty, y.ty, false)?;
|
||||
}
|
||||
if self.unify_impl(sign1.ret, sign2.ret, false).is_err() {
|
||||
return Err(TypeError::new(TypeErrorKind::IncompatibleTypes(a, b), None));
|
||||
};
|
||||
self.unify_impl(sign1.ret, sign2.ret, false)?;
|
||||
self.set_a_to_b(a, b);
|
||||
}
|
||||
(TVar { fields: Some(fields), .. }, _) => {
|
||||
|
@ -842,7 +830,7 @@ impl Unifier {
|
|||
},
|
||||
)
|
||||
},
|
||||
&mut |id| format!("typevar{}", id),
|
||||
&mut |id| format!("var{}", id),
|
||||
notes,
|
||||
)
|
||||
}
|
||||
|
|
|
@ -286,7 +286,7 @@ fn test_unify(
|
|||
("v1", "tuple[int]"),
|
||||
("v2", "tuple[float]"),
|
||||
],
|
||||
(("v1", "v2"), "Incompatible types: tuple[0] and tuple[1]")
|
||||
(("v1", "v2"), "Incompatible types: 0 and 1")
|
||||
; "tuple parameter mismatch"
|
||||
)]
|
||||
#[test_case(2,
|
||||
|
@ -302,14 +302,14 @@ fn test_unify(
|
|||
("v1", "Record[a=float,b=int]"),
|
||||
("v2", "Foo[v3]"),
|
||||
],
|
||||
(("v1", "v2"), "`3[typevar4]::b` field/method does not exist")
|
||||
(("v1", "v2"), "`3[var4]::b` field/method does not exist")
|
||||
; "record obj merge"
|
||||
)]
|
||||
/// Test cases for invalid unifications.
|
||||
fn test_invalid_unification(
|
||||
variable_count: u32,
|
||||
unify_pairs: &[(&'static str, &'static str)],
|
||||
erroneous_pair: ((&'static str, &'static str), &'static str),
|
||||
errornous_pair: ((&'static str, &'static str), &'static str),
|
||||
) {
|
||||
let mut env = TestEnvironment::new();
|
||||
let mut mapping = HashMap::new();
|
||||
|
@ -326,11 +326,11 @@ fn test_invalid_unification(
|
|||
pairs.push((t1, t2));
|
||||
}
|
||||
let (t1, t2) =
|
||||
(env.parse(erroneous_pair.0 .0, &mapping), env.parse(erroneous_pair.0 .1, &mapping));
|
||||
(env.parse(errornous_pair.0 .0, &mapping), env.parse(errornous_pair.0 .1, &mapping));
|
||||
for (a, b) in pairs {
|
||||
env.unifier.unify(a, b).unwrap();
|
||||
}
|
||||
assert_eq!(env.unify(t1, t2), Err(erroneous_pair.1.to_string()));
|
||||
assert_eq!(env.unify(t1, t2), Err(errornous_pair.1.to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -445,7 +445,7 @@ fn test_typevar_range() {
|
|||
// where v in (int, list[v1]), v1 in (int, bool)
|
||||
assert_eq!(
|
||||
env.unify(float_list, v),
|
||||
Err("Expected any one of these types: 0, list[typevar5], but got list[1]\n\nNotes:\n typevar5 ∈ {0, 2}".to_string())
|
||||
Err("Expected any one of these types: 0, list[var5], but got list[1]\n\nNotes:\n var5 ∈ {0, 2}".to_string())
|
||||
);
|
||||
|
||||
let a = env.unifier.get_fresh_var_with_range(&[int, float], None, None).0;
|
||||
|
@ -478,8 +478,7 @@ fn test_typevar_range() {
|
|||
let int_list = env.unifier.add_ty(TypeEnum::TList { ty: int });
|
||||
assert_eq!(
|
||||
env.unify(a_list, int_list),
|
||||
Err("Incompatible types: list[typevar22] and list[0]\
|
||||
\n\nNotes:\n typevar22 ∈ {1}".into())
|
||||
Err("Expected any one of these types: 1, but got 0".into())
|
||||
);
|
||||
|
||||
let a = env.unifier.get_fresh_var_with_range(&[int, float], None, None).0;
|
||||
|
@ -505,9 +504,9 @@ fn test_rigid_var() {
|
|||
let int = env.parse("int", &HashMap::new());
|
||||
let list_int = env.parse("list[int]", &HashMap::new());
|
||||
|
||||
assert_eq!(env.unify(a, b), Err("Incompatible types: typevar3 and typevar2".to_string()));
|
||||
assert_eq!(env.unify(a, b), Err("Incompatible types: var3 and var2".to_string()));
|
||||
env.unifier.unify(list_a, list_x).unwrap();
|
||||
assert_eq!(env.unify(list_x, list_int), Err("Incompatible types: list[typevar2] and list[0]".to_string()));
|
||||
assert_eq!(env.unify(list_x, list_int), Err("Incompatible types: 0 and var2".to_string()));
|
||||
|
||||
env.unifier.replace_rigid_var(a, int);
|
||||
env.unifier.unify(list_x, list_int).unwrap();
|
||||
|
|
|
@ -3,7 +3,7 @@ use std::rc::Rc;
|
|||
use itertools::izip;
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)]
|
||||
pub struct UnificationKey(usize);
|
||||
pub struct UnificationKey(pub(crate) usize);
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct UnificationTable<V> {
|
||||
|
@ -44,6 +44,12 @@ impl<V> UnificationTable<V> {
|
|||
UnificationTable { parents: Vec::new(), ranks: Vec::new(), values: Vec::new(), log: Vec::new(), generation: 0 }
|
||||
}
|
||||
|
||||
fn log_action(&mut self, action: Action<V>) {
|
||||
if !self.log.is_empty() {
|
||||
self.log.push(action);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new_key(&mut self, v: V) -> UnificationKey {
|
||||
let index = self.parents.len();
|
||||
self.parents.push(index);
|
||||
|
@ -61,10 +67,10 @@ impl<V> UnificationTable<V> {
|
|||
if self.ranks[a] < self.ranks[b] {
|
||||
std::mem::swap(&mut a, &mut b);
|
||||
}
|
||||
self.log.push(Action::Parent { key: b, original_parent: self.parents[b] });
|
||||
self.log_action(Action::Parent { key: b, original_parent: self.parents[b] });
|
||||
self.parents[b] = a;
|
||||
if self.ranks[a] == self.ranks[b] {
|
||||
self.log.push(Action::Rank { key: a, original_rank: self.ranks[a] });
|
||||
self.log_action(Action::Rank { key: a, original_rank: self.ranks[a] });
|
||||
self.ranks[a] += 1;
|
||||
}
|
||||
}
|
||||
|
@ -88,7 +94,7 @@ impl<V> UnificationTable<V> {
|
|||
pub fn set_value(&mut self, a: UnificationKey, v: V) {
|
||||
let index = self.find(a);
|
||||
let original_value = self.values[index].replace(v);
|
||||
self.log.push(Action::Value { key: index, original_value });
|
||||
self.log_action(Action::Value { key: index, original_value });
|
||||
}
|
||||
|
||||
pub fn unioned(&mut self, a: UnificationKey, b: UnificationKey) -> bool {
|
||||
|
@ -106,7 +112,7 @@ impl<V> UnificationTable<V> {
|
|||
// a = parent.parent
|
||||
let a = self.parents[parent];
|
||||
// root.parent = parent.parent
|
||||
self.log.push(Action::Parent { key: root, original_parent: self.parents[root] });
|
||||
self.log_action(Action::Parent { key: root, original_parent: self.parents[root] });
|
||||
self.parents[root] = a;
|
||||
root = parent;
|
||||
// parent = root.parent
|
||||
|
|
|
@ -1,8 +0,0 @@
|
|||
[package]
|
||||
name = "nac3ld"
|
||||
version = "0.1.0"
|
||||
authors = ["M-Labs"]
|
||||
edition = "2018"
|
||||
|
||||
[dependencies]
|
||||
byteorder = { version = "1.4", default-features = false }
|
|
@ -1,392 +0,0 @@
|
|||
#![allow(non_camel_case_types, non_upper_case_globals)]
|
||||
|
||||
use std::mem;
|
||||
|
||||
use byteorder::{ByteOrder, LittleEndian};
|
||||
|
||||
pub const DW_EH_PE_omit: u8 = 0xFF;
|
||||
pub const DW_EH_PE_absptr: u8 = 0x00;
|
||||
|
||||
pub const DW_EH_PE_uleb128: u8 = 0x01;
|
||||
pub const DW_EH_PE_udata2: u8 = 0x02;
|
||||
pub const DW_EH_PE_udata4: u8 = 0x03;
|
||||
pub const DW_EH_PE_udata8: u8 = 0x04;
|
||||
pub const DW_EH_PE_sleb128: u8 = 0x09;
|
||||
pub const DW_EH_PE_sdata2: u8 = 0x0A;
|
||||
pub const DW_EH_PE_sdata4: u8 = 0x0B;
|
||||
pub const DW_EH_PE_sdata8: u8 = 0x0C;
|
||||
|
||||
pub const DW_EH_PE_pcrel: u8 = 0x10;
|
||||
pub const DW_EH_PE_textrel: u8 = 0x20;
|
||||
pub const DW_EH_PE_datarel: u8 = 0x30;
|
||||
pub const DW_EH_PE_funcrel: u8 = 0x40;
|
||||
pub const DW_EH_PE_aligned: u8 = 0x50;
|
||||
|
||||
pub const DW_EH_PE_indirect: u8 = 0x80;
|
||||
|
||||
pub struct DwarfReader<'a> {
|
||||
pub slice: &'a [u8],
|
||||
pub virt_addr: u32,
|
||||
}
|
||||
|
||||
impl<'a> DwarfReader<'a> {
|
||||
pub fn new(slice: &[u8], virt_addr: u32) -> DwarfReader {
|
||||
DwarfReader { slice, virt_addr }
|
||||
}
|
||||
|
||||
pub fn offset(&mut self, offset: i32) {
|
||||
self.slice = &self.slice[offset as usize..];
|
||||
self.virt_addr = self.virt_addr.wrapping_add(offset as u32);
|
||||
}
|
||||
|
||||
// ULEB128 and SLEB128 encodings are defined in Section 7.6 - "Variable
|
||||
// Length Data".
|
||||
pub fn read_uleb128(&mut self) -> u64 {
|
||||
let mut shift: usize = 0;
|
||||
let mut result: u64 = 0;
|
||||
let mut byte: u8;
|
||||
loop {
|
||||
byte = self.read_u8();
|
||||
result |= ((byte & 0x7F) as u64) << shift;
|
||||
shift += 7;
|
||||
if byte & 0x80 == 0 {
|
||||
break;
|
||||
}
|
||||
}
|
||||
result
|
||||
}
|
||||
|
||||
pub fn read_sleb128(&mut self) -> i64 {
|
||||
let mut shift: u32 = 0;
|
||||
let mut result: u64 = 0;
|
||||
let mut byte: u8;
|
||||
loop {
|
||||
byte = self.read_u8();
|
||||
result |= ((byte & 0x7F) as u64) << shift;
|
||||
shift += 7;
|
||||
if byte & 0x80 == 0 {
|
||||
break;
|
||||
}
|
||||
}
|
||||
// sign-extend
|
||||
if shift < u64::BITS && (byte & 0x40) != 0 {
|
||||
result |= (!0 as u64) << shift;
|
||||
}
|
||||
result as i64
|
||||
}
|
||||
|
||||
pub fn read_u8(&mut self) -> u8 {
|
||||
let val = self.slice[0];
|
||||
self.slice = &self.slice[1..];
|
||||
val
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! impl_read_fn {
|
||||
( $($type: ty, $byteorder_fn: ident);* ) => {
|
||||
impl<'a> DwarfReader<'a> {
|
||||
$(
|
||||
pub fn $byteorder_fn(&mut self) -> $type {
|
||||
let val = LittleEndian::$byteorder_fn(self.slice);
|
||||
self.slice = &self.slice[mem::size_of::<$type>()..];
|
||||
val
|
||||
}
|
||||
)*
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl_read_fn!(
|
||||
u16, read_u16;
|
||||
u32, read_u32;
|
||||
u64, read_u64;
|
||||
i16, read_i16;
|
||||
i32, read_i32;
|
||||
i64, read_i64
|
||||
);
|
||||
|
||||
pub struct DwarfWriter<'a> {
|
||||
pub slice: &'a mut [u8],
|
||||
pub offset: usize,
|
||||
}
|
||||
|
||||
impl<'a> DwarfWriter<'a> {
|
||||
pub fn new(slice: &mut [u8]) -> DwarfWriter {
|
||||
DwarfWriter { slice, offset: 0 }
|
||||
}
|
||||
|
||||
pub fn write_u8(&mut self, data: u8) {
|
||||
self.slice[self.offset] = data;
|
||||
self.offset += 1;
|
||||
}
|
||||
|
||||
pub fn write_u32(&mut self, data: u32) {
|
||||
LittleEndian::write_u32(&mut self.slice[self.offset..], data);
|
||||
self.offset += 4;
|
||||
}
|
||||
}
|
||||
|
||||
fn read_encoded_pointer(reader: &mut DwarfReader, encoding: u8) -> Result<usize, ()> {
|
||||
if encoding == DW_EH_PE_omit {
|
||||
return Err(());
|
||||
}
|
||||
|
||||
// DW_EH_PE_aligned implies it's an absolute pointer value
|
||||
// However, we are linking library for 32-bits architecture
|
||||
// The size of variable should be 4 bytes instead
|
||||
if encoding == DW_EH_PE_aligned {
|
||||
let shifted_virt_addr = round_up(reader.virt_addr as usize, mem::size_of::<u32>())?;
|
||||
let addr_inc = shifted_virt_addr - reader.virt_addr as usize;
|
||||
|
||||
reader.slice = &reader.slice[addr_inc..];
|
||||
reader.virt_addr = shifted_virt_addr as u32;
|
||||
return Ok(reader.read_u32() as usize);
|
||||
}
|
||||
|
||||
match encoding & 0x0F {
|
||||
DW_EH_PE_absptr => Ok(reader.read_u32() as usize),
|
||||
DW_EH_PE_uleb128 => Ok(reader.read_uleb128() as usize),
|
||||
DW_EH_PE_udata2 => Ok(reader.read_u16() as usize),
|
||||
DW_EH_PE_udata4 => Ok(reader.read_u32() as usize),
|
||||
DW_EH_PE_udata8 => Ok(reader.read_u64() as usize),
|
||||
DW_EH_PE_sleb128 => Ok(reader.read_sleb128() as usize),
|
||||
DW_EH_PE_sdata2 => Ok(reader.read_i16() as usize),
|
||||
DW_EH_PE_sdata4 => Ok(reader.read_i32() as usize),
|
||||
DW_EH_PE_sdata8 => Ok(reader.read_i64() as usize),
|
||||
_ => Err(()),
|
||||
}
|
||||
}
|
||||
|
||||
fn read_encoded_pointer_with_pc(
|
||||
reader: &mut DwarfReader,
|
||||
encoding: u8,
|
||||
) -> Result<usize, ()> {
|
||||
let entry_virt_addr = reader.virt_addr;
|
||||
let mut result = read_encoded_pointer(reader, encoding)?;
|
||||
|
||||
// DW_EH_PE_aligned implies it's an absolute pointer value
|
||||
if encoding == DW_EH_PE_aligned {
|
||||
return Ok(result);
|
||||
}
|
||||
|
||||
result = match encoding & 0x70 {
|
||||
DW_EH_PE_pcrel => result.wrapping_add(entry_virt_addr as usize),
|
||||
|
||||
// .eh_frame normally would not have these kinds of relocations
|
||||
// These would not be supported by a dedicated linker relocation schemes for RISC-V
|
||||
DW_EH_PE_textrel | DW_EH_PE_datarel | DW_EH_PE_funcrel | DW_EH_PE_aligned => {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
// Other values should be impossible
|
||||
_ => unreachable!(),
|
||||
};
|
||||
|
||||
if encoding & DW_EH_PE_indirect != 0 {
|
||||
// There should not be a need for indirect addressing, as assembly code from
|
||||
// the dynamic library should not be freely moved relative to the EH frame.
|
||||
unreachable!()
|
||||
}
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn round_up(unrounded: usize, align: usize) -> Result<usize, ()> {
|
||||
if align.is_power_of_two() {
|
||||
Ok((unrounded + align - 1) & !(align - 1))
|
||||
} else {
|
||||
Err(())
|
||||
}
|
||||
}
|
||||
|
||||
// Minimalistic structure to store everything needed for parsing FDEs to synthesize
|
||||
// .eh_frame_hdr section. Since we are only linking 1 object file, there should only be 1 call
|
||||
// frame information (CFI) record, so there should be only 1 common information entry (CIE).
|
||||
// So the class parses the only CIE on init, cache the encoding info, then parse the FDE on
|
||||
// iterations based on the cached encoding format.
|
||||
pub struct EH_Frame<'a> {
|
||||
// It refers to the augmentation data that corresponds to 'R' in the augmentation string
|
||||
pub fde_pointer_encoding: u8,
|
||||
pub fde_reader: DwarfReader<'a>,
|
||||
pub fde_sz: usize,
|
||||
}
|
||||
|
||||
impl<'a> EH_Frame<'a> {
|
||||
pub fn new(eh_frame_slice: &[u8], eh_frame_addr: u32) -> Result<EH_Frame, ()> {
|
||||
let mut cie_reader = DwarfReader::new(eh_frame_slice, eh_frame_addr);
|
||||
let eh_frame_size = eh_frame_slice.len();
|
||||
let length = cie_reader.read_u32();
|
||||
let fde_reader = match length {
|
||||
// eh_frame with 0 lengths means the CIE is terminated
|
||||
// while length == u32::MAX means that the length is only representable with 64 bits,
|
||||
// which does not make sense in a system with 32-bit address.
|
||||
0 | 0xFFFFFFFF => unimplemented!(),
|
||||
_ => {
|
||||
let mut fde_reader = DwarfReader::new(cie_reader.slice, cie_reader.virt_addr);
|
||||
fde_reader.offset(length as i32);
|
||||
fde_reader
|
||||
}
|
||||
};
|
||||
let fde_sz = eh_frame_size - mem::size_of::<u32>() - length as usize;
|
||||
|
||||
// Routine check on the .eh_frame well-formness, in terms of CIE ID & Version args.
|
||||
assert_eq!(cie_reader.read_u32(), 0);
|
||||
assert_eq!(cie_reader.read_u8(), 1);
|
||||
|
||||
// Parse augmentation string
|
||||
// The first character must be 'z', there is no way to proceed otherwise
|
||||
assert_eq!(cie_reader.read_u8(), b'z');
|
||||
|
||||
// Establish a pointer that skips ahead of the string
|
||||
// Skip code/data alignment factors & return address register along the way as well
|
||||
// We only tackle the case where 'z' and 'R' are part of the augmentation string, otherwise
|
||||
// we cannot get the addresses to make .eh_frame_hdr
|
||||
let mut aug_data_reader = DwarfReader::new(cie_reader.slice, cie_reader.virt_addr);
|
||||
let mut aug_str_len = 0;
|
||||
loop {
|
||||
if aug_data_reader.read_u8() == b'\0' {
|
||||
break;
|
||||
}
|
||||
aug_str_len += 1;
|
||||
}
|
||||
if aug_str_len == 0 {
|
||||
unimplemented!();
|
||||
}
|
||||
aug_data_reader.read_uleb128(); // Code alignment factor
|
||||
aug_data_reader.read_sleb128(); // Data alignment factor
|
||||
aug_data_reader.read_uleb128(); // Return address register
|
||||
aug_data_reader.read_uleb128(); // Augmentation data length
|
||||
let mut fde_pointer_encoding = DW_EH_PE_omit;
|
||||
for _ in 0..aug_str_len {
|
||||
match cie_reader.read_u8() {
|
||||
b'L' => {
|
||||
aug_data_reader.read_u8();
|
||||
}
|
||||
|
||||
b'P' => {
|
||||
let encoding = aug_data_reader.read_u8();
|
||||
read_encoded_pointer(&mut aug_data_reader, encoding)?;
|
||||
}
|
||||
|
||||
b'R' => {
|
||||
fde_pointer_encoding = aug_data_reader.read_u8();
|
||||
}
|
||||
|
||||
// Other characters are not supported
|
||||
_ => unimplemented!(),
|
||||
}
|
||||
}
|
||||
assert_ne!(fde_pointer_encoding, DW_EH_PE_omit);
|
||||
|
||||
Ok(EH_Frame { fde_pointer_encoding, fde_reader, fde_sz })
|
||||
}
|
||||
|
||||
pub fn iterate_fde(&self, callback: &mut dyn FnMut(u32, u32)) -> Result<(), ()> {
|
||||
// Parse each FDE to obtain the starting address that the FDE applies to
|
||||
// Send the FDE offset and the mentioned address to a callback that write up the
|
||||
// .eh_frame_hdr section
|
||||
let mut remaining_len = self.fde_sz;
|
||||
let mut reader = DwarfReader::new(self.fde_reader.slice, self.fde_reader.virt_addr);
|
||||
loop {
|
||||
if remaining_len == 0 {
|
||||
break;
|
||||
}
|
||||
|
||||
let fde_virt_addr = reader.virt_addr;
|
||||
let length = match reader.read_u32() {
|
||||
0 | 0xFFFFFFFF => unimplemented!(),
|
||||
other => other,
|
||||
};
|
||||
|
||||
// Remove the length of the header and the content from the counter
|
||||
remaining_len -= length as usize + mem::size_of::<u32>();
|
||||
let mut next_fde_reader = DwarfReader::new(reader.slice, reader.virt_addr);
|
||||
next_fde_reader.offset(length as i32);
|
||||
|
||||
// Skip CIE pointer offset
|
||||
reader.read_u32();
|
||||
|
||||
// Parse PC Begin using the encoding scheme mentioned in the CIE
|
||||
let pc_begin = read_encoded_pointer_with_pc(&mut reader, self.fde_pointer_encoding)?;
|
||||
|
||||
callback(pc_begin as u32, fde_virt_addr);
|
||||
|
||||
reader = next_fde_reader;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
pub struct EH_Frame_Hdr<'a> {
|
||||
fde_writer: DwarfWriter<'a>,
|
||||
eh_frame_hdr_addr: u32,
|
||||
fdes: Vec<(u32, u32)>,
|
||||
}
|
||||
|
||||
impl<'a> EH_Frame_Hdr<'a> {
|
||||
// Create a EH_Frame_Hdr object, and write out the fixed fields of .eh_frame_hdr to memory
|
||||
// eh_frame_ptr_enc will be 0x1B (PC-relative, 4 bytes)
|
||||
// table_enc will be 0x3B (Relative to the start of .eh_frame_hdr, 4 bytes)
|
||||
// Load address is not known at this point.
|
||||
pub fn new(
|
||||
eh_frame_hdr_slice: &mut [u8],
|
||||
eh_frame_hdr_addr: u32,
|
||||
eh_frame_addr: u32,
|
||||
) -> EH_Frame_Hdr {
|
||||
let mut writer = DwarfWriter::new(eh_frame_hdr_slice);
|
||||
writer.write_u8(1);
|
||||
writer.write_u8(0x1B);
|
||||
writer.write_u8(0x03);
|
||||
writer.write_u8(0x3B);
|
||||
|
||||
let eh_frame_offset =
|
||||
(eh_frame_addr).wrapping_sub(eh_frame_hdr_addr + ((mem::size_of::<u8>() as u32) * 4));
|
||||
writer.write_u32(eh_frame_offset);
|
||||
writer.write_u32(0);
|
||||
|
||||
EH_Frame_Hdr { fde_writer: writer, eh_frame_hdr_addr, fdes: Vec::new() }
|
||||
}
|
||||
|
||||
fn fde_count_offset() -> usize {
|
||||
8
|
||||
}
|
||||
|
||||
pub fn add_fde(&mut self, init_loc: u32, addr: u32) {
|
||||
self.fdes.push((
|
||||
init_loc.wrapping_sub(self.eh_frame_hdr_addr),
|
||||
addr.wrapping_sub(self.eh_frame_hdr_addr),
|
||||
));
|
||||
}
|
||||
|
||||
pub fn finalize_fde(mut self) {
|
||||
self.fdes
|
||||
.sort_by(|(left_init_loc, _), (right_init_loc, _)| left_init_loc.cmp(right_init_loc));
|
||||
for (init_loc, addr) in &self.fdes {
|
||||
self.fde_writer.write_u32(*init_loc);
|
||||
self.fde_writer.write_u32(*addr);
|
||||
}
|
||||
LittleEndian::write_u32(&mut self.fde_writer.slice[Self::fde_count_offset()..], self.fdes.len() as u32);
|
||||
}
|
||||
|
||||
pub fn size_from_eh_frame(eh_frame: &[u8]) -> usize {
|
||||
// The virtual address of the EH frame does not matter in this case
|
||||
// Calculation of size does not involve modifying any headers
|
||||
let mut reader = DwarfReader::new(eh_frame, 0);
|
||||
let mut fde_count = 0;
|
||||
while !reader.slice.is_empty() {
|
||||
// The original length field should be able to hold the entire value.
|
||||
// The device memory space is limited to 32-bits addresses anyway.
|
||||
let entry_length = reader.read_u32();
|
||||
if entry_length == 0 || entry_length == 0xFFFFFFFF {
|
||||
unimplemented!()
|
||||
}
|
||||
if reader.read_u32() != 0 {
|
||||
fde_count += 1;
|
||||
}
|
||||
reader.offset(entry_length as i32 - mem::size_of::<u32>() as i32)
|
||||
}
|
||||
12 + fde_count * 8
|
||||
}
|
||||
}
|
2893
nac3ld/src/elf.rs
2893
nac3ld/src/elf.rs
File diff suppressed because it is too large
Load Diff
1490
nac3ld/src/lib.rs
1490
nac3ld/src/lib.rs
File diff suppressed because it is too large
Load Diff
|
@ -8,17 +8,17 @@ license = "MIT"
|
|||
edition = "2018"
|
||||
|
||||
[build-dependencies]
|
||||
lalrpop = "0.19"
|
||||
lalrpop = "0.19.6"
|
||||
|
||||
[dependencies]
|
||||
nac3ast = { path = "../nac3ast" }
|
||||
lalrpop-util = "0.19"
|
||||
log = "0.4"
|
||||
lalrpop-util = "0.19.6"
|
||||
log = "0.4.1"
|
||||
unic-emoji-char = "0.9"
|
||||
unic-ucd-ident = "0.9"
|
||||
unicode_names2 = "0.5"
|
||||
phf = { version = "0.11", features = ["macros"] }
|
||||
ahash = "0.7"
|
||||
unicode_names2 = "0.4"
|
||||
phf = { version = "0.9", features = ["macros"] }
|
||||
ahash = "0.7.2"
|
||||
|
||||
[dev-dependencies]
|
||||
insta = "=1.11.0"
|
||||
|
|
|
@ -5,11 +5,11 @@ authors = ["M-Labs"]
|
|||
edition = "2018"
|
||||
|
||||
[dependencies]
|
||||
parking_lot = "0.12"
|
||||
parking_lot = "0.11.1"
|
||||
nac3parser = { path = "../nac3parser" }
|
||||
nac3core = { path = "../nac3core" }
|
||||
|
||||
[dependencies.inkwell]
|
||||
git = "https://github.com/TheDan64/inkwell.git"
|
||||
version = "0.1.0-beta.4"
|
||||
default-features = false
|
||||
features = ["llvm14-0", "target-x86", "target-arm", "target-riscv", "no-libffi-linking"]
|
||||
features = ["llvm13-0", "target-x86", "target-arm", "target-riscv", "no-libffi-linking"]
|
||||
|
|
|
@ -1,32 +0,0 @@
|
|||
from __future__ import annotations
|
||||
|
||||
@extern
|
||||
def output_int32(x: int32):
|
||||
...
|
||||
|
||||
class A:
|
||||
a: int32
|
||||
|
||||
def __init__(self, a: int32):
|
||||
self.a = a
|
||||
|
||||
def f1(self):
|
||||
self.f2()
|
||||
|
||||
def f2(self):
|
||||
output_int32(self.a)
|
||||
|
||||
class B(A):
|
||||
b: int32
|
||||
|
||||
def __init__(self, b: int32):
|
||||
self.a = b + 1
|
||||
self.b = b
|
||||
|
||||
|
||||
def run() -> int32:
|
||||
aaa = A(5)
|
||||
bbb = B(2)
|
||||
aaa.f1()
|
||||
bbb.f1()
|
||||
return 0
|
|
@ -229,7 +229,7 @@ def list_slice_assignment():
|
|||
bl5[3:-5] = []
|
||||
output_int32_list([int32(b) for b in bl5])
|
||||
bl6 = bl[:]
|
||||
bl6[3:-5] = [True, False]
|
||||
bl6[3:-5] = [True, False, False]
|
||||
output_int32_list([int32(b) for b in bl6])
|
||||
bl7 = bl[:]
|
||||
bl7[:-2] = [False]
|
||||
|
|
|
@ -1,257 +0,0 @@
|
|||
from __future__ import annotations
|
||||
|
||||
@extern
|
||||
def output_int32(x: int32):
|
||||
...
|
||||
@extern
|
||||
def output_uint32(x: uint32):
|
||||
...
|
||||
@extern
|
||||
def output_int64(x: int64):
|
||||
...
|
||||
@extern
|
||||
def output_uint64(x: uint64):
|
||||
...
|
||||
@extern
|
||||
def output_float64(x: float):
|
||||
...
|
||||
|
||||
def run() -> int32:
|
||||
test_int32()
|
||||
test_uint32()
|
||||
test_int64()
|
||||
test_uint64()
|
||||
test_A()
|
||||
test_B()
|
||||
return 0
|
||||
|
||||
def test_int32():
|
||||
a = 17
|
||||
b = 3
|
||||
output_int32(a + b)
|
||||
output_int32(a - b)
|
||||
output_int32(a * b)
|
||||
output_int32(a // b)
|
||||
output_int32(a % b)
|
||||
output_int32(a | b)
|
||||
output_int32(a ^ b)
|
||||
output_int32(a & b)
|
||||
output_int32(a << b)
|
||||
output_int32(a >> b)
|
||||
output_float64(a / b)
|
||||
a += b
|
||||
output_int32(a)
|
||||
a -= b
|
||||
output_int32(a)
|
||||
a *= b
|
||||
output_int32(a)
|
||||
a //= b
|
||||
output_int32(a)
|
||||
a %= b
|
||||
output_int32(a)
|
||||
a |= b
|
||||
output_int32(a)
|
||||
a ^= b
|
||||
output_int32(a)
|
||||
a &= b
|
||||
output_int32(a)
|
||||
a <<= b
|
||||
output_int32(a)
|
||||
a >>= b
|
||||
output_int32(a)
|
||||
# fail because (a / b) is float
|
||||
# a /= b
|
||||
|
||||
def test_uint32():
|
||||
a = uint32(17)
|
||||
b = uint32(3)
|
||||
output_uint32(a + b)
|
||||
output_uint32(a - b)
|
||||
output_uint32(a * b)
|
||||
output_uint32(a // b)
|
||||
output_uint32(a % b)
|
||||
output_uint32(a | b)
|
||||
output_uint32(a ^ b)
|
||||
output_uint32(a & b)
|
||||
output_uint32(a << b)
|
||||
output_uint32(a >> b)
|
||||
output_float64(a / b)
|
||||
a += b
|
||||
output_uint32(a)
|
||||
a -= b
|
||||
output_uint32(a)
|
||||
a *= b
|
||||
output_uint32(a)
|
||||
a //= b
|
||||
output_uint32(a)
|
||||
a %= b
|
||||
output_uint32(a)
|
||||
a |= b
|
||||
output_uint32(a)
|
||||
a ^= b
|
||||
output_uint32(a)
|
||||
a &= b
|
||||
output_uint32(a)
|
||||
a <<= b
|
||||
output_uint32(a)
|
||||
a >>= b
|
||||
output_uint32(a)
|
||||
|
||||
def test_int64():
|
||||
a = int64(17)
|
||||
b = int64(3)
|
||||
output_int64(a + b)
|
||||
output_int64(a - b)
|
||||
output_int64(a * b)
|
||||
output_int64(a // b)
|
||||
output_int64(a % b)
|
||||
output_int64(a | b)
|
||||
output_int64(a ^ b)
|
||||
output_int64(a & b)
|
||||
output_int64(a << b)
|
||||
output_int64(a >> b)
|
||||
output_float64(a / b)
|
||||
a += b
|
||||
output_int64(a)
|
||||
a -= b
|
||||
output_int64(a)
|
||||
a *= b
|
||||
output_int64(a)
|
||||
a //= b
|
||||
output_int64(a)
|
||||
a %= b
|
||||
output_int64(a)
|
||||
a |= b
|
||||
output_int64(a)
|
||||
a ^= b
|
||||
output_int64(a)
|
||||
a &= b
|
||||
output_int64(a)
|
||||
a <<= b
|
||||
output_int64(a)
|
||||
a >>= b
|
||||
output_int64(a)
|
||||
|
||||
def test_uint64():
|
||||
a = uint64(17)
|
||||
b = uint64(3)
|
||||
output_uint64(a + b)
|
||||
output_uint64(a - b)
|
||||
output_uint64(a * b)
|
||||
output_uint64(a // b)
|
||||
output_uint64(a % b)
|
||||
output_uint64(a | b)
|
||||
output_uint64(a ^ b)
|
||||
output_uint64(a & b)
|
||||
output_uint64(a << b)
|
||||
output_uint64(a >> b)
|
||||
output_float64(a / b)
|
||||
a += b
|
||||
output_uint64(a)
|
||||
a -= b
|
||||
output_uint64(a)
|
||||
a *= b
|
||||
output_uint64(a)
|
||||
a //= b
|
||||
output_uint64(a)
|
||||
a %= b
|
||||
output_uint64(a)
|
||||
a |= b
|
||||
output_uint64(a)
|
||||
a ^= b
|
||||
output_uint64(a)
|
||||
a &= b
|
||||
output_uint64(a)
|
||||
a <<= b
|
||||
output_uint64(a)
|
||||
a >>= b
|
||||
output_uint64(a)
|
||||
|
||||
class A:
|
||||
a: int32
|
||||
def __init__(self, a: int32):
|
||||
self.a = a
|
||||
|
||||
def __add__(self, other: A) -> A:
|
||||
output_int32(self.a + other.a)
|
||||
return A(self.a + other.a)
|
||||
|
||||
def __sub__(self, other: A) -> A:
|
||||
output_int32(self.a - other.a)
|
||||
return A(self.a - other.a)
|
||||
|
||||
def test_A():
|
||||
a = A(17)
|
||||
b = A(3)
|
||||
|
||||
c = a + b
|
||||
# fail due to alloca in __add__ function
|
||||
# output_int32(c.a)
|
||||
|
||||
a += b
|
||||
# fail due to alloca in __add__ function
|
||||
# output_int32(a.a)
|
||||
|
||||
a = A(17)
|
||||
b = A(3)
|
||||
d = a - b
|
||||
# fail due to alloca in __add__ function
|
||||
# output_int32(c.a)
|
||||
|
||||
a -= b
|
||||
# fail due to alloca in __add__ function
|
||||
# output_int32(a.a)
|
||||
|
||||
a = A(17)
|
||||
b = A(3)
|
||||
a.__add__(b)
|
||||
a.__sub__(b)
|
||||
|
||||
|
||||
class B:
|
||||
a: int32
|
||||
def __init__(self, a: int32):
|
||||
self.a = a
|
||||
|
||||
def __add__(self, other: B) -> B:
|
||||
output_int32(self.a + other.a)
|
||||
return B(self.a + other.a)
|
||||
|
||||
def __sub__(self, other: B) -> B:
|
||||
output_int32(self.a - other.a)
|
||||
return B(self.a - other.a)
|
||||
|
||||
def __iadd__(self, other: B) -> B:
|
||||
output_int32(self.a + other.a + 24)
|
||||
return B(self.a + other.a + 24)
|
||||
|
||||
def __isub__(self, other: B) -> B:
|
||||
output_int32(self.a - other.a - 24)
|
||||
return B(self.a - other.a - 24)
|
||||
|
||||
def test_B():
|
||||
a = B(17)
|
||||
b = B(3)
|
||||
|
||||
c = a + b
|
||||
# fail due to alloca in __add__ function
|
||||
# output_int32(c.a)
|
||||
|
||||
a += b
|
||||
# fail due to alloca in __add__ function
|
||||
# output_int32(a.a)
|
||||
|
||||
a = B(17)
|
||||
b = B(3)
|
||||
d = a - b
|
||||
# fail due to alloca in __add__ function
|
||||
# output_int32(c.a)
|
||||
|
||||
a -= b
|
||||
# fail due to alloca in __add__ function
|
||||
# output_int32(a.a)
|
||||
|
||||
a = B(17)
|
||||
b = B(3)
|
||||
a.__add__(b)
|
||||
a.__sub__(b)
|
|
@ -1,26 +0,0 @@
|
|||
@extern
|
||||
def output_float64(f: float):
|
||||
...
|
||||
|
||||
|
||||
def run() -> int32:
|
||||
output_float64(float(3 ** 1))
|
||||
output_float64(float(3 ** 0))
|
||||
output_float64(float(3 ** 19))
|
||||
output_float64(1.0 ** -100)
|
||||
output_float64(1.0 ** -2)
|
||||
output_float64(1.0 ** 0)
|
||||
output_float64(1.0 ** 1)
|
||||
output_float64(1.0 ** 100)
|
||||
output_float64(3.0 ** 0)
|
||||
output_float64(3.0 ** 1)
|
||||
output_float64(3.0 ** 2)
|
||||
output_float64(3.0 ** -1)
|
||||
output_float64(3.0 ** -2)
|
||||
output_float64(3.0 ** -32767)
|
||||
output_float64(3.0 ** -3.0)
|
||||
output_float64(3.0 ** -0.0)
|
||||
output_float64(3.0 ** 0.0)
|
||||
output_float64(4.0 ** 0.5)
|
||||
output_float64(4.0 ** -0.5)
|
||||
return 0
|
|
@ -1,36 +0,0 @@
|
|||
from __future__ import annotations
|
||||
|
||||
@extern
|
||||
def output_int32(a: int32):
|
||||
...
|
||||
|
||||
class A:
|
||||
d: int32
|
||||
a: list[B]
|
||||
def __init__(self, b: list[B]):
|
||||
self.d = 123
|
||||
self.a = b
|
||||
|
||||
def f(self):
|
||||
output_int32(self.d)
|
||||
|
||||
class B:
|
||||
a: A
|
||||
def __init__(self, a: A):
|
||||
self.a = a
|
||||
|
||||
def ff(self):
|
||||
self.a.f()
|
||||
|
||||
class Demo:
|
||||
a: A
|
||||
def __init__(self, a: A):
|
||||
self.a = a
|
||||
|
||||
def run() -> int32:
|
||||
aa = A([])
|
||||
bb = B(aa)
|
||||
aa.a = [bb]
|
||||
d = Demo(aa)
|
||||
d.a.a[0].ff()
|
||||
return 0
|
|
@ -34,9 +34,5 @@ def run() -> int32:
|
|||
insta = A()
|
||||
inst = C(insta)
|
||||
inst.foo()
|
||||
|
||||
insta2 = B()
|
||||
inst2 = C(insta2)
|
||||
inst2.foo()
|
||||
return 0
|
||||
|
||||
|
|
|
@ -30,130 +30,6 @@ use nac3parser::{
|
|||
mod basic_symbol_resolver;
|
||||
use basic_symbol_resolver::*;
|
||||
|
||||
fn handle_typevar_definition(
|
||||
var: &Expr,
|
||||
resolver: &(dyn SymbolResolver + Send + Sync),
|
||||
def_list: &[Arc<RwLock<TopLevelDef>>],
|
||||
unifier: &mut Unifier,
|
||||
primitives: &PrimitiveStore,
|
||||
) -> Result<Type, String> {
|
||||
if let ExprKind::Call { func, args, .. } = &var.node {
|
||||
if matches!(&func.node, ExprKind::Name { id, .. } if id == &"TypeVar".into()) {
|
||||
let constraints = args
|
||||
.iter()
|
||||
.skip(1)
|
||||
.map(|x| -> Result<Type, String> {
|
||||
let ty = parse_ast_to_type_annotation_kinds(
|
||||
resolver,
|
||||
def_list,
|
||||
unifier,
|
||||
primitives,
|
||||
x,
|
||||
Default::default(),
|
||||
)?;
|
||||
get_type_from_type_annotation_kinds(
|
||||
def_list, unifier, primitives, &ty, &mut None
|
||||
)
|
||||
})
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
Ok(unifier.get_fresh_var_with_range(&constraints, None, None).0)
|
||||
} else {
|
||||
Err(format!(
|
||||
"expression {:?} cannot be handled as a TypeVar in global scope",
|
||||
var
|
||||
))
|
||||
}
|
||||
} else {
|
||||
Err(format!(
|
||||
"expression {:?} cannot be handled as a TypeVar in global scope",
|
||||
var
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_assignment_pattern(
|
||||
targets: &[Expr],
|
||||
value: &Expr,
|
||||
resolver: &(dyn SymbolResolver + Send + Sync),
|
||||
internal_resolver: &ResolverInternal,
|
||||
def_list: &[Arc<RwLock<TopLevelDef>>],
|
||||
unifier: &mut Unifier,
|
||||
primitives: &PrimitiveStore,
|
||||
) -> Result<(), String> {
|
||||
if targets.len() == 1 {
|
||||
match &targets[0].node {
|
||||
ExprKind::Name { id, .. } => {
|
||||
if let Ok(var) = handle_typevar_definition(
|
||||
value.borrow(),
|
||||
resolver,
|
||||
def_list,
|
||||
unifier,
|
||||
primitives,
|
||||
) {
|
||||
internal_resolver.add_id_type(*id, var);
|
||||
Ok(())
|
||||
} else if let Ok(val) =
|
||||
parse_parameter_default_value(value.borrow(), resolver)
|
||||
{
|
||||
internal_resolver.add_module_global(*id, val);
|
||||
Ok(())
|
||||
} else {
|
||||
Err(format!("fails to evaluate this expression `{:?}` as a constant or TypeVar at {}",
|
||||
targets[0].node,
|
||||
targets[0].location,
|
||||
))
|
||||
}
|
||||
}
|
||||
ExprKind::List { elts, .. } | ExprKind::Tuple { elts, .. } => {
|
||||
handle_assignment_pattern(
|
||||
elts,
|
||||
value,
|
||||
resolver,
|
||||
internal_resolver,
|
||||
def_list,
|
||||
unifier,
|
||||
primitives,
|
||||
)?;
|
||||
Ok(())
|
||||
}
|
||||
_ => Err(format!(
|
||||
"assignment to {:?} is not supported at {}",
|
||||
targets[0], targets[0].location
|
||||
)),
|
||||
}
|
||||
} else {
|
||||
match &value.node {
|
||||
ExprKind::List { elts, .. } | ExprKind::Tuple { elts, .. } => {
|
||||
if elts.len() != targets.len() {
|
||||
Err(format!(
|
||||
"number of elements to unpack does not match (expect {}, found {}) at {}",
|
||||
targets.len(),
|
||||
elts.len(),
|
||||
value.location
|
||||
))
|
||||
} else {
|
||||
for (tar, val) in targets.iter().zip(elts) {
|
||||
handle_assignment_pattern(
|
||||
std::slice::from_ref(tar),
|
||||
val,
|
||||
resolver,
|
||||
internal_resolver,
|
||||
def_list,
|
||||
unifier,
|
||||
primitives,
|
||||
)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
_ => Err(format!(
|
||||
"unpack of this expression is not supported at {}",
|
||||
value.location
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn main() {
|
||||
let file_name = env::args().nth(1).unwrap();
|
||||
let threads: u32 = env::args().nth(2).map(|s| str::parse(&s).unwrap()).unwrap_or(1);
|
||||
|
@ -178,42 +54,163 @@ fn main() {
|
|||
class_names: Default::default(),
|
||||
module_globals: Default::default(),
|
||||
str_store: Default::default(),
|
||||
}.into();
|
||||
}
|
||||
.into();
|
||||
let resolver =
|
||||
Arc::new(Resolver(internal_resolver.clone())) as Arc<dyn SymbolResolver + Send + Sync>;
|
||||
|
||||
let parser_result = parser::parse_program(&program, file_name.into()).unwrap();
|
||||
|
||||
for stmt in parser_result.into_iter() {
|
||||
match &stmt.node {
|
||||
StmtKind::Assign { targets, value, .. } => {
|
||||
let def_list = composer.extract_def_list();
|
||||
let unifier = &mut composer.unifier;
|
||||
let primitives = &composer.primitives_ty;
|
||||
if let Err(err) = handle_assignment_pattern(
|
||||
targets,
|
||||
value,
|
||||
resolver.as_ref(),
|
||||
internal_resolver.as_ref(),
|
||||
&def_list,
|
||||
unifier,
|
||||
primitives,
|
||||
) {
|
||||
eprintln!("{}", err);
|
||||
return;
|
||||
}
|
||||
},
|
||||
// allow (and ignore) "from __future__ import annotations"
|
||||
StmtKind::ImportFrom { module, names, .. }
|
||||
if module == &Some("__future__".into()) && names.len() == 1 && names[0].name == "annotations".into() => (),
|
||||
_ => {
|
||||
let (name, def_id, ty) =
|
||||
composer.register_top_level(stmt, Some(resolver.clone()), "__main__".into(), true).unwrap();
|
||||
internal_resolver.add_id_def(name, def_id);
|
||||
if let Some(ty) = ty {
|
||||
internal_resolver.add_id_type(name, ty);
|
||||
if let StmtKind::Assign { targets, value, .. } = &stmt.node {
|
||||
fn handle_typevar_definition(
|
||||
var: &Expr,
|
||||
resolver: &(dyn SymbolResolver + Send + Sync),
|
||||
def_list: &[Arc<RwLock<TopLevelDef>>],
|
||||
unifier: &mut Unifier,
|
||||
primitives: &PrimitiveStore,
|
||||
) -> Result<Type, String> {
|
||||
if let ExprKind::Call { func, args, .. } = &var.node {
|
||||
if matches!(&func.node, ExprKind::Name { id, .. } if id == &"TypeVar".into()) {
|
||||
let constraints = args
|
||||
.iter()
|
||||
.skip(1)
|
||||
.map(|x| -> Result<Type, String> {
|
||||
let ty = parse_ast_to_type_annotation_kinds(
|
||||
resolver,
|
||||
def_list,
|
||||
unifier,
|
||||
primitives,
|
||||
x,
|
||||
Default::default(),
|
||||
)?;
|
||||
get_type_from_type_annotation_kinds(
|
||||
def_list, unifier, primitives, &ty, &mut None
|
||||
)
|
||||
})
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
Ok(unifier.get_fresh_var_with_range(&constraints, None, None).0)
|
||||
} else {
|
||||
Err(format!(
|
||||
"expression {:?} cannot be handled as a TypeVar in global scope",
|
||||
var
|
||||
))
|
||||
}
|
||||
} else {
|
||||
Err(format!(
|
||||
"expression {:?} cannot be handled as a TypeVar in global scope",
|
||||
var
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_assignment_pattern(
|
||||
targets: &[Expr],
|
||||
value: &Expr,
|
||||
resolver: &(dyn SymbolResolver + Send + Sync),
|
||||
internal_resolver: &ResolverInternal,
|
||||
def_list: &[Arc<RwLock<TopLevelDef>>],
|
||||
unifier: &mut Unifier,
|
||||
primitives: &PrimitiveStore,
|
||||
) -> Result<(), String> {
|
||||
if targets.len() == 1 {
|
||||
match &targets[0].node {
|
||||
ExprKind::Name { id, .. } => {
|
||||
if let Ok(var) = handle_typevar_definition(
|
||||
value.borrow(),
|
||||
resolver,
|
||||
def_list,
|
||||
unifier,
|
||||
primitives,
|
||||
) {
|
||||
internal_resolver.add_id_type(*id, var);
|
||||
Ok(())
|
||||
} else if let Ok(val) =
|
||||
parse_parameter_default_value(value.borrow(), resolver)
|
||||
{
|
||||
internal_resolver.add_module_global(*id, val);
|
||||
Ok(())
|
||||
} else {
|
||||
Err(format!("fails to evaluate this expression `{:?}` as a constant or TypeVar at {}",
|
||||
targets[0].node,
|
||||
targets[0].location,
|
||||
))
|
||||
}
|
||||
}
|
||||
ExprKind::List { elts, .. } | ExprKind::Tuple { elts, .. } => {
|
||||
handle_assignment_pattern(
|
||||
elts,
|
||||
value,
|
||||
resolver,
|
||||
internal_resolver,
|
||||
def_list,
|
||||
unifier,
|
||||
primitives,
|
||||
)?;
|
||||
Ok(())
|
||||
}
|
||||
_ => Err(format!(
|
||||
"assignment to {:?} is not supported at {}",
|
||||
targets[0], targets[0].location
|
||||
)),
|
||||
}
|
||||
} else {
|
||||
match &value.node {
|
||||
ExprKind::List { elts, .. } | ExprKind::Tuple { elts, .. } => {
|
||||
if elts.len() != targets.len() {
|
||||
Err(format!(
|
||||
"number of elements to unpack does not match (expect {}, found {}) at {}",
|
||||
targets.len(),
|
||||
elts.len(),
|
||||
value.location
|
||||
))
|
||||
} else {
|
||||
for (tar, val) in targets.iter().zip(elts) {
|
||||
handle_assignment_pattern(
|
||||
std::slice::from_ref(tar),
|
||||
val,
|
||||
resolver,
|
||||
internal_resolver,
|
||||
def_list,
|
||||
unifier,
|
||||
primitives,
|
||||
)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
_ => Err(format!(
|
||||
"unpack of this expression is not supported at {}",
|
||||
value.location
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let def_list = composer.extract_def_list();
|
||||
let unifier = &mut composer.unifier;
|
||||
let primitives = &composer.primitives_ty;
|
||||
if let Err(err) = handle_assignment_pattern(
|
||||
targets,
|
||||
value,
|
||||
resolver.as_ref(),
|
||||
internal_resolver.as_ref(),
|
||||
&def_list,
|
||||
unifier,
|
||||
primitives,
|
||||
) {
|
||||
eprintln!("{}", err);
|
||||
return;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
let (name, def_id, ty) =
|
||||
composer.register_top_level(stmt, Some(resolver.clone()), "__main__".into()).unwrap();
|
||||
|
||||
internal_resolver.add_id_def(name, def_id);
|
||||
if let Some(ty) = ty {
|
||||
internal_resolver.add_id_type(name, ty);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -223,7 +220,11 @@ fn main() {
|
|||
let signature = store.from_signature(&mut composer.unifier, &primitive, &signature, &mut cache);
|
||||
let signature = store.add_cty(signature);
|
||||
|
||||
composer.start_analysis(true).unwrap();
|
||||
if let Err(e) = composer.start_analysis(true) {
|
||||
eprintln!("{}", e);
|
||||
std::process::exit(1);
|
||||
}
|
||||
|
||||
|
||||
let top_level = Arc::new(composer.make_top_level_context());
|
||||
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
, ncurses
|
||||
, zlib
|
||||
, which
|
||||
, llvmPackages_13
|
||||
, debugVersion ? false
|
||||
, enableManpages ? false
|
||||
, enableSharedLibraries ? false
|
||||
|
@ -17,7 +18,7 @@
|
|||
let
|
||||
inherit (lib) optional optionals optionalString;
|
||||
|
||||
release_version = "14.0.6";
|
||||
release_version = "13.0.1";
|
||||
candidate = ""; # empty or "rcN"
|
||||
dash-candidate = lib.optionalString (candidate != "") "-${candidate}";
|
||||
version = "${release_version}${dash-candidate}"; # differentiating these (variables) is important for RCs
|
||||
|
@ -34,7 +35,7 @@ in stdenv.mkDerivation (rec {
|
|||
pname = "llvm";
|
||||
inherit version;
|
||||
|
||||
src = fetch pname "sha256-BQki7KrKV4H99mMeqSvHFRg/IC+dLxUUcibwI0FPYZo=";
|
||||
src = fetch pname "sha256-7GuA2Cw4SsrS3BkpA6bPLNuv+4ibhL+5janXHmMPyDQ=";
|
||||
|
||||
unpackPhase = ''
|
||||
unpackFile $src
|
||||
|
@ -49,12 +50,19 @@ in stdenv.mkDerivation (rec {
|
|||
|
||||
buildInputs = [ ];
|
||||
|
||||
propagatedBuildInputs = [ ncurses zlib ];
|
||||
propagatedBuildInputs = optionals (stdenv.buildPlatform == stdenv.hostPlatform) [ ncurses ]
|
||||
++ [ zlib ];
|
||||
|
||||
checkInputs = [ which ];
|
||||
|
||||
patches = [
|
||||
./gnu-install-dirs.patch
|
||||
# Fix random compiler crashes: https://bugs.llvm.org/show_bug.cgi?id=50611
|
||||
(fetchpatch {
|
||||
url = "https://raw.githubusercontent.com/archlinux/svntogit-packages/4764a4f8c920912a2bfd8b0eea57273acfe0d8a8/trunk/no-strict-aliasing-DwarfCompileUnit.patch";
|
||||
sha256 = "18l6mrvm2vmwm77ckcnbjvh6ybvn72rhrb799d4qzwac4x2ifl7g";
|
||||
stripLen = 1;
|
||||
})
|
||||
./llvm-future-riscv-abi.diff
|
||||
];
|
||||
|
||||
|
@ -113,8 +121,6 @@ in stdenv.mkDerivation (rec {
|
|||
"-DLLVM_DEFAULT_TARGET_TRIPLE=${stdenv.hostPlatform.config}"
|
||||
"-DLLVM_ENABLE_UNWIND_TABLES=OFF"
|
||||
"-DLLVM_ENABLE_THREADS=OFF"
|
||||
"-DLLVM_INCLUDE_BENCHMARKS=OFF"
|
||||
"-DLLVM_BUILD_TOOLS=OFF"
|
||||
"-DLLVM_TARGETS_TO_BUILD=X86;ARM;RISCV"
|
||||
] ++ optionals enableSharedLibraries [
|
||||
"-DLLVM_LINK_LLVM_DYLIB=ON"
|
||||
|
@ -131,6 +137,7 @@ in stdenv.mkDerivation (rec {
|
|||
"-DCAN_TARGET_i386=false"
|
||||
] ++ optionals (stdenv.hostPlatform != stdenv.buildPlatform) [
|
||||
"-DCMAKE_CROSSCOMPILING=True"
|
||||
"-DLLVM_TABLEGEN=${llvmPackages_13.tools.llvm}/bin/llvm-tblgen"
|
||||
(
|
||||
let
|
||||
nativeCC = pkgsBuildBuild.targetPackages.stdenv.cc;
|
||||
|
@ -147,7 +154,6 @@ in stdenv.mkDerivation (rec {
|
|||
] ++ extraCmakeFlags;
|
||||
|
||||
postBuild = ''
|
||||
make llvm-config
|
||||
rm -fR $out
|
||||
'';
|
||||
|
||||
|
@ -156,7 +162,6 @@ in stdenv.mkDerivation (rec {
|
|||
'';
|
||||
|
||||
postInstall = ''
|
||||
cp bin/llvm-config $out/bin
|
||||
mkdir -p $python/share
|
||||
mv $out/share/opt-viewer $python/share/opt-viewer
|
||||
moveToOutput "bin/llvm-config*" "$dev"
|
||||
|
|
|
@ -1,10 +1,23 @@
|
|||
diff --git a/CMakeLists.txt b/CMakeLists.txt
|
||||
index fec956091cd5..5a766f5c5d7c 100644
|
||||
index 135036f509d2..265c36f8211b 100644
|
||||
--- a/CMakeLists.txt
|
||||
+++ b/CMakeLists.txt
|
||||
@@ -303,6 +303,9 @@ set(LLVM_EXAMPLES_INSTALL_DIR "examples" CACHE STRING
|
||||
"Path for examples subdirectory (enabled by LLVM_BUILD_EXAMPLES=ON) (defaults to 'examples')")
|
||||
mark_as_advanced(LLVM_EXAMPLES_INSTALL_DIR)
|
||||
@@ -270,15 +270,21 @@ if (CMAKE_BUILD_TYPE AND
|
||||
message(FATAL_ERROR "Invalid value for CMAKE_BUILD_TYPE: ${CMAKE_BUILD_TYPE}")
|
||||
endif()
|
||||
|
||||
+include(GNUInstallDirs)
|
||||
+
|
||||
set(LLVM_LIBDIR_SUFFIX "" CACHE STRING "Define suffix of library directory name (32/64)" )
|
||||
|
||||
-set(LLVM_TOOLS_INSTALL_DIR "bin" CACHE STRING "Path for binary subdirectory (defaults to 'bin')")
|
||||
+set(LLVM_TOOLS_INSTALL_DIR "${CMAKE_INSTALL_BINDIR}" CACHE STRING
|
||||
+ "Path for binary subdirectory (defaults to 'bin')")
|
||||
mark_as_advanced(LLVM_TOOLS_INSTALL_DIR)
|
||||
|
||||
set(LLVM_UTILS_INSTALL_DIR "${LLVM_TOOLS_INSTALL_DIR}" CACHE STRING
|
||||
"Path to install LLVM utilities (enabled by LLVM_INSTALL_UTILS=ON) (defaults to LLVM_TOOLS_INSTALL_DIR)")
|
||||
mark_as_advanced(LLVM_UTILS_INSTALL_DIR)
|
||||
|
||||
+set(LLVM_INSTALL_CMAKE_DIR "${CMAKE_INSTALL_LIBDIR}${LLVM_LIBDIR_SUFFIX}/cmake/llvm" CACHE STRING
|
||||
+ "Path for CMake subdirectory (defaults to lib/cmake/llvm)" )
|
||||
|
@ -12,22 +25,70 @@ index fec956091cd5..5a766f5c5d7c 100644
|
|||
# They are used as destination of target generators.
|
||||
set(LLVM_RUNTIME_OUTPUT_INTDIR ${CMAKE_CURRENT_BINARY_DIR}/${CMAKE_CFG_INTDIR}/bin)
|
||||
set(LLVM_LIBRARY_OUTPUT_INTDIR ${CMAKE_CURRENT_BINARY_DIR}/${CMAKE_CFG_INTDIR}/lib${LLVM_LIBDIR_SUFFIX})
|
||||
@@ -581,9 +587,9 @@ option (LLVM_ENABLE_SPHINX "Use Sphinx to generate llvm documentation." OFF)
|
||||
option (LLVM_ENABLE_OCAMLDOC "Build OCaml bindings documentation." ON)
|
||||
option (LLVM_ENABLE_BINDINGS "Build bindings." ON)
|
||||
|
||||
-set(LLVM_INSTALL_DOXYGEN_HTML_DIR "share/doc/llvm/doxygen-html"
|
||||
+set(LLVM_INSTALL_DOXYGEN_HTML_DIR "${CMAKE_INSTALL_DOCDIR}/${project}/doxygen-html"
|
||||
CACHE STRING "Doxygen-generated HTML documentation install directory")
|
||||
-set(LLVM_INSTALL_OCAMLDOC_HTML_DIR "share/doc/llvm/ocaml-html"
|
||||
+set(LLVM_INSTALL_OCAMLDOC_HTML_DIR "${CMAKE_INSTALL_DOCDIR}/${project}/ocaml-html"
|
||||
CACHE STRING "OCamldoc-generated HTML documentation install directory")
|
||||
|
||||
option (LLVM_BUILD_EXTERNAL_COMPILER_RT
|
||||
@@ -1048,7 +1054,7 @@ endif()
|
||||
|
||||
if (NOT LLVM_INSTALL_TOOLCHAIN_ONLY)
|
||||
install(DIRECTORY include/llvm include/llvm-c
|
||||
- DESTINATION include
|
||||
+ DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}
|
||||
COMPONENT llvm-headers
|
||||
FILES_MATCHING
|
||||
PATTERN "*.def"
|
||||
@@ -1059,7 +1065,7 @@ if (NOT LLVM_INSTALL_TOOLCHAIN_ONLY)
|
||||
)
|
||||
|
||||
install(DIRECTORY ${LLVM_INCLUDE_DIR}/llvm ${LLVM_INCLUDE_DIR}/llvm-c
|
||||
- DESTINATION include
|
||||
+ DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}
|
||||
COMPONENT llvm-headers
|
||||
FILES_MATCHING
|
||||
PATTERN "*.def"
|
||||
@@ -1073,13 +1079,13 @@ if (NOT LLVM_INSTALL_TOOLCHAIN_ONLY)
|
||||
|
||||
if (LLVM_INSTALL_MODULEMAPS)
|
||||
install(DIRECTORY include/llvm include/llvm-c
|
||||
- DESTINATION include
|
||||
+ DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}
|
||||
COMPONENT llvm-headers
|
||||
FILES_MATCHING
|
||||
PATTERN "module.modulemap"
|
||||
)
|
||||
install(FILES include/llvm/module.install.modulemap
|
||||
- DESTINATION include/llvm
|
||||
+ DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}/llvm
|
||||
COMPONENT llvm-headers
|
||||
RENAME "module.extern.modulemap"
|
||||
)
|
||||
diff --git a/cmake/modules/AddLLVM.cmake b/cmake/modules/AddLLVM.cmake
|
||||
index fed1fec7d72e..4baed19b9e98 100644
|
||||
index 9c2b85374307..5531ceeb2eeb 100644
|
||||
--- a/cmake/modules/AddLLVM.cmake
|
||||
+++ b/cmake/modules/AddLLVM.cmake
|
||||
@@ -838,8 +838,8 @@ macro(add_llvm_library name)
|
||||
@@ -818,9 +818,9 @@ macro(add_llvm_library name)
|
||||
get_target_export_arg(${name} LLVM export_to_llvmexports ${umbrella})
|
||||
install(TARGETS ${name}
|
||||
${export_to_llvmexports}
|
||||
- LIBRARY DESTINATION lib${LLVM_LIBDIR_SUFFIX} COMPONENT ${name}
|
||||
- ARCHIVE DESTINATION lib${LLVM_LIBDIR_SUFFIX} COMPONENT ${name}
|
||||
+ LIBRARY DESTINATION "${CMAKE_INSTALL_LIBDIR}${LLVM_LIBDIR_SUFFIX}" COMPONENT ${name}
|
||||
+ ARCHIVE DESTINATION "${CMAKE_INSTALL_LIBDIR}${LLVM_LIBDIR_SUFFIX}" COMPONENT ${name}
|
||||
RUNTIME DESTINATION "${CMAKE_INSTALL_BINDIR}" COMPONENT ${name})
|
||||
- RUNTIME DESTINATION bin COMPONENT ${name})
|
||||
+ LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR}${LLVM_LIBDIR_SUFFIX} COMPONENT ${name}
|
||||
+ ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR}${LLVM_LIBDIR_SUFFIX} COMPONENT ${name}
|
||||
+ RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR} COMPONENT ${name})
|
||||
|
||||
if (NOT LLVM_ENABLE_IDE)
|
||||
@@ -1056,7 +1056,7 @@ function(process_llvm_pass_plugins)
|
||||
add_llvm_install_targets(install-${name}
|
||||
@@ -1036,7 +1036,7 @@ function(process_llvm_pass_plugins)
|
||||
"set(LLVM_STATIC_EXTENSIONS ${LLVM_STATIC_EXTENSIONS})")
|
||||
install(FILES
|
||||
${llvm_cmake_builddir}/LLVMConfigExtensions.cmake
|
||||
|
@ -36,7 +97,16 @@ index fed1fec7d72e..4baed19b9e98 100644
|
|||
COMPONENT cmake-exports)
|
||||
|
||||
set(ExtensionDef "${LLVM_BINARY_DIR}/include/llvm/Support/Extension.def")
|
||||
@@ -1902,7 +1902,7 @@ function(llvm_install_library_symlink name dest type)
|
||||
@@ -1250,7 +1250,7 @@ macro(add_llvm_example name)
|
||||
endif()
|
||||
add_llvm_executable(${name} ${ARGN})
|
||||
if( LLVM_BUILD_EXAMPLES )
|
||||
- install(TARGETS ${name} RUNTIME DESTINATION examples)
|
||||
+ install(TARGETS ${name} RUNTIME DESTINATION ${CMAKE_INSTALL_DOCDIR}/examples)
|
||||
endif()
|
||||
set_target_properties(${name} PROPERTIES FOLDER "Examples")
|
||||
endmacro(add_llvm_example name)
|
||||
@@ -1868,7 +1868,7 @@ function(llvm_install_library_symlink name dest type)
|
||||
set(full_name ${CMAKE_${type}_LIBRARY_PREFIX}${name}${CMAKE_${type}_LIBRARY_SUFFIX})
|
||||
set(full_dest ${CMAKE_${type}_LIBRARY_PREFIX}${dest}${CMAKE_${type}_LIBRARY_SUFFIX})
|
||||
|
||||
|
@ -45,7 +115,7 @@ index fed1fec7d72e..4baed19b9e98 100644
|
|||
if(WIN32 AND "${type}" STREQUAL "SHARED")
|
||||
set(output_dir bin)
|
||||
endif()
|
||||
@@ -1913,7 +1913,7 @@ function(llvm_install_library_symlink name dest type)
|
||||
@@ -1879,7 +1879,7 @@ function(llvm_install_library_symlink name dest type)
|
||||
|
||||
endfunction()
|
||||
|
||||
|
@ -54,7 +124,7 @@ index fed1fec7d72e..4baed19b9e98 100644
|
|||
cmake_parse_arguments(ARG "ALWAYS_GENERATE" "COMPONENT" "" ${ARGN})
|
||||
foreach(path ${CMAKE_MODULE_PATH})
|
||||
if(EXISTS ${path}/LLVMInstallSymlink.cmake)
|
||||
@@ -1936,7 +1936,7 @@ function(llvm_install_symlink name dest)
|
||||
@@ -1902,7 +1902,7 @@ function(llvm_install_symlink name dest)
|
||||
set(full_dest ${dest}${CMAKE_EXECUTABLE_SUFFIX})
|
||||
|
||||
install(SCRIPT ${INSTALL_SYMLINK}
|
||||
|
@ -63,7 +133,7 @@ index fed1fec7d72e..4baed19b9e98 100644
|
|||
COMPONENT ${component})
|
||||
|
||||
if (NOT LLVM_ENABLE_IDE AND NOT ARG_ALWAYS_GENERATE)
|
||||
@@ -2019,7 +2019,8 @@ function(add_llvm_tool_symlink link_name target)
|
||||
@@ -1985,7 +1985,8 @@ function(add_llvm_tool_symlink link_name target)
|
||||
endif()
|
||||
|
||||
if ((TOOL_IS_TOOLCHAIN OR NOT LLVM_INSTALL_TOOLCHAIN_ONLY) AND LLVM_BUILD_TOOLS)
|
||||
|
@ -73,7 +143,7 @@ index fed1fec7d72e..4baed19b9e98 100644
|
|||
endif()
|
||||
endif()
|
||||
endfunction()
|
||||
@@ -2148,9 +2149,9 @@ function(llvm_setup_rpath name)
|
||||
@@ -2114,9 +2115,9 @@ function(llvm_setup_rpath name)
|
||||
# Since BUILD_SHARED_LIBS is only recommended for use by developers,
|
||||
# hardcode the rpath to build/install lib dir first in this mode.
|
||||
# FIXME: update this when there is better solution.
|
||||
|
@ -86,10 +156,10 @@ index fed1fec7d72e..4baed19b9e98 100644
|
|||
set_property(TARGET ${name} APPEND_STRING PROPERTY
|
||||
LINK_FLAGS " -Wl,-z,origin ")
|
||||
diff --git a/cmake/modules/AddOCaml.cmake b/cmake/modules/AddOCaml.cmake
|
||||
index 891c9e6d618c..8d963f3b0069 100644
|
||||
index 554046b20edf..4d1ad980641e 100644
|
||||
--- a/cmake/modules/AddOCaml.cmake
|
||||
+++ b/cmake/modules/AddOCaml.cmake
|
||||
@@ -147,9 +147,9 @@ function(add_ocaml_library name)
|
||||
@@ -144,9 +144,9 @@ function(add_ocaml_library name)
|
||||
endforeach()
|
||||
|
||||
if( APPLE )
|
||||
|
@ -101,55 +171,86 @@ index 891c9e6d618c..8d963f3b0069 100644
|
|||
endif()
|
||||
list(APPEND ocaml_flags "-ldopt" "-Wl,-rpath,${ocaml_rpath}")
|
||||
|
||||
diff --git a/cmake/modules/AddSphinxTarget.cmake b/cmake/modules/AddSphinxTarget.cmake
|
||||
index e80c3b5c1cac..482f6d715ef5 100644
|
||||
--- a/cmake/modules/AddSphinxTarget.cmake
|
||||
+++ b/cmake/modules/AddSphinxTarget.cmake
|
||||
@@ -90,7 +90,7 @@ function (add_sphinx_target builder project)
|
||||
endif()
|
||||
elseif (builder STREQUAL html)
|
||||
string(TOUPPER "${project}" project_upper)
|
||||
- set(${project_upper}_INSTALL_SPHINX_HTML_DIR "share/doc/${project}/html"
|
||||
+ set(${project_upper}_INSTALL_SPHINX_HTML_DIR "${CMAKE_INSTALL_DOCDIR}/${project}/html"
|
||||
CACHE STRING "HTML documentation install directory for ${project}")
|
||||
|
||||
# '/.' indicates: copy the contents of the directory directly into
|
||||
diff --git a/cmake/modules/CMakeLists.txt b/cmake/modules/CMakeLists.txt
|
||||
index cea0c1df0a14..eedcd9450312 100644
|
||||
index 51b6a4fdc284..4adc2acfc074 100644
|
||||
--- a/cmake/modules/CMakeLists.txt
|
||||
+++ b/cmake/modules/CMakeLists.txt
|
||||
@@ -2,7 +2,7 @@ include(ExtendPath)
|
||||
@@ -1,6 +1,6 @@
|
||||
include(LLVMDistributionSupport)
|
||||
include(FindPrefixFromConfig)
|
||||
|
||||
-set(LLVM_INSTALL_PACKAGE_DIR lib${LLVM_LIBDIR_SUFFIX}/cmake/llvm)
|
||||
+set(LLVM_INSTALL_PACKAGE_DIR ${LLVM_INSTALL_CMAKE_DIR} CACHE STRING "Path for CMake subdirectory (defaults to 'cmake/llvm')")
|
||||
set(llvm_cmake_builddir "${LLVM_BINARY_DIR}/${LLVM_INSTALL_PACKAGE_DIR}")
|
||||
|
||||
# First for users who use an installed LLVM, create the LLVMExports.cmake file.
|
||||
@@ -122,7 +122,7 @@ set(LLVM_CONFIG_INCLUDE_DIRS
|
||||
)
|
||||
list(REMOVE_DUPLICATES LLVM_CONFIG_INCLUDE_DIRS)
|
||||
@@ -109,13 +109,13 @@ foreach(p ${_count})
|
||||
set(LLVM_CONFIG_CODE "${LLVM_CONFIG_CODE}
|
||||
get_filename_component(LLVM_INSTALL_PREFIX \"\${LLVM_INSTALL_PREFIX}\" PATH)")
|
||||
endforeach(p)
|
||||
-set(LLVM_CONFIG_INCLUDE_DIRS "\${LLVM_INSTALL_PREFIX}/include")
|
||||
+set(LLVM_CONFIG_INCLUDE_DIRS "\${LLVM_INSTALL_PREFIX}/${CMAKE_INSTALL_INCLUDEDIR}")
|
||||
set(LLVM_CONFIG_INCLUDE_DIR "${LLVM_CONFIG_INCLUDE_DIRS}")
|
||||
set(LLVM_CONFIG_MAIN_INCLUDE_DIR "${LLVM_CONFIG_INCLUDE_DIRS}")
|
||||
-set(LLVM_CONFIG_LIBRARY_DIRS "\${LLVM_INSTALL_PREFIX}/lib\${LLVM_LIBDIR_SUFFIX}")
|
||||
+set(LLVM_CONFIG_LIBRARY_DIRS "\${LLVM_INSTALL_PREFIX}/${CMAKE_INSTALL_LIBDIR}\${LLVM_LIBDIR_SUFFIX}")
|
||||
set(LLVM_CONFIG_CMAKE_DIR "\${LLVM_INSTALL_PREFIX}/${LLVM_INSTALL_PACKAGE_DIR}")
|
||||
set(LLVM_CONFIG_BINARY_DIR "\${LLVM_INSTALL_PREFIX}")
|
||||
-set(LLVM_CONFIG_TOOLS_BINARY_DIR "\${LLVM_INSTALL_PREFIX}/bin")
|
||||
+set(LLVM_CONFIG_TOOLS_BINARY_DIR "\${LLVM_INSTALL_PREFIX}/${CMAKE_INSTALL_BINDIR}")
|
||||
|
||||
-extend_path(LLVM_CONFIG_LIBRARY_DIR "\${LLVM_INSTALL_PREFIX}" "lib\${LLVM_LIBDIR_SUFFIX}")
|
||||
+extend_path(LLVM_CONFIG_LIBRARY_DIR "\${LLVM_INSTALL_PREFIX}" "${CMAKE_INSTALL_LIBDIR}\${LLVM_LIBDIR_SUFFIX}")
|
||||
set(LLVM_CONFIG_LIBRARY_DIRS
|
||||
"${LLVM_CONFIG_LIBRARY_DIR}"
|
||||
# FIXME: Should there be other entries here?
|
||||
# Generate a default location for lit
|
||||
if (LLVM_INSTALL_UTILS AND LLVM_BUILD_UTILS)
|
||||
diff --git a/cmake/modules/LLVMInstallSymlink.cmake b/cmake/modules/LLVMInstallSymlink.cmake
|
||||
index b5c35f706cb7..9261ab797de6 100644
|
||||
index 3e6a2c9a2648..52e14d955c60 100644
|
||||
--- a/cmake/modules/LLVMInstallSymlink.cmake
|
||||
+++ b/cmake/modules/LLVMInstallSymlink.cmake
|
||||
@@ -6,7 +6,7 @@ include(GNUInstallDirs)
|
||||
@@ -4,7 +4,7 @@
|
||||
|
||||
function(install_symlink name target outdir)
|
||||
set(DESTDIR $ENV{DESTDIR})
|
||||
- set(bindir "${DESTDIR}${CMAKE_INSTALL_PREFIX}/${outdir}")
|
||||
- set(bindir "${DESTDIR}${CMAKE_INSTALL_PREFIX}/${outdir}/")
|
||||
+ set(bindir "${DESTDIR}${outdir}/")
|
||||
|
||||
message(STATUS "Creating ${name}")
|
||||
|
||||
diff --git a/docs/CMake.rst b/docs/CMake.rst
|
||||
index 044ec8a4d39d..504d0eac3ade 100644
|
||||
index f1ac2c7d4934..c6e1469b5e13 100644
|
||||
--- a/docs/CMake.rst
|
||||
+++ b/docs/CMake.rst
|
||||
@@ -224,7 +224,7 @@ description is in `LLVM-related variables`_ below.
|
||||
@@ -202,7 +202,7 @@ CMake manual, or execute ``cmake --help-variable VARIABLE_NAME``.
|
||||
**LLVM_LIBDIR_SUFFIX**:STRING
|
||||
Extra suffix to append to the directory where libraries are to be
|
||||
installed. On a 64-bit architecture, one could use ``-DLLVM_LIBDIR_SUFFIX=64``
|
||||
- to install libraries to ``/usr/lib64``.
|
||||
+ to install libraries to ``/usr/lib64``. See also ``CMAKE_INSTALL_LIBDIR``.
|
||||
|
||||
**LLVM_PARALLEL_{COMPILE,LINK}_JOBS**:STRING
|
||||
Building the llvm toolchain can use a lot of resources, particularly
|
||||
@@ -910,9 +910,11 @@ the ``cmake`` command or by setting it directly in ``ccmake`` or ``cmake-gui``).
|
||||
Rarely-used CMake variables
|
||||
---------------------------
|
||||
@@ -551,8 +551,8 @@ LLVM-specific variables
|
||||
|
||||
**LLVM_INSTALL_DOXYGEN_HTML_DIR**:STRING
|
||||
The path to install Doxygen-generated HTML documentation to. This path can
|
||||
- either be absolute or relative to the CMAKE_INSTALL_PREFIX. Defaults to
|
||||
- `share/doc/llvm/doxygen-html`.
|
||||
+ either be absolute or relative to the ``CMAKE_INSTALL_PREFIX``. Defaults to
|
||||
+ `${CMAKE_INSTALL_DOCDIR}/${project}/doxygen-html`.
|
||||
|
||||
**LLVM_LINK_LLVM_DYLIB**:BOOL
|
||||
If enabled, tools will be linked with the libLLVM shared library. Defaults
|
||||
@@ -792,9 +792,11 @@ the ``cmake`` command or by setting it directly in ``ccmake`` or ``cmake-gui``).
|
||||
|
||||
This file is available in two different locations.
|
||||
|
||||
|
@ -164,6 +265,18 @@ index 044ec8a4d39d..504d0eac3ade 100644
|
|||
|
||||
* ``<LLVM_BUILD_ROOT>/lib/cmake/llvm/LLVMConfig.cmake`` where
|
||||
``<LLVM_BUILD_ROOT>`` is the root of the LLVM build tree. **Note: this is only
|
||||
diff --git a/examples/Bye/CMakeLists.txt b/examples/Bye/CMakeLists.txt
|
||||
index bb96edb4b4bf..678c22fb43c8 100644
|
||||
--- a/examples/Bye/CMakeLists.txt
|
||||
+++ b/examples/Bye/CMakeLists.txt
|
||||
@@ -14,6 +14,6 @@ if (NOT WIN32)
|
||||
BUILDTREE_ONLY
|
||||
)
|
||||
|
||||
- install(TARGETS ${name} RUNTIME DESTINATION examples)
|
||||
+ install(TARGETS ${name} RUNTIME DESTINATION ${CMAKE_INSTALL_DOCDIR}/examples)
|
||||
set_target_properties(${name} PROPERTIES FOLDER "Examples")
|
||||
endif()
|
||||
diff --git a/include/llvm/CMakeLists.txt b/include/llvm/CMakeLists.txt
|
||||
index b46319f24fc8..2feabd1954e4 100644
|
||||
--- a/include/llvm/CMakeLists.txt
|
||||
|
@ -176,45 +289,93 @@ index b46319f24fc8..2feabd1954e4 100644
|
|||
+ configure_file(module.modulemap.build ${LLVM_INCLUDE_DIR}/module.modulemap COPYONLY)
|
||||
endif (NOT "${CMAKE_SOURCE_DIR}" STREQUAL "${CMAKE_BINARY_DIR}")
|
||||
diff --git a/tools/llvm-config/BuildVariables.inc.in b/tools/llvm-config/BuildVariables.inc.in
|
||||
index abbb8a450da6..70c497be12f5 100644
|
||||
index ebe5b73a5c65..70c497be12f5 100644
|
||||
--- a/tools/llvm-config/BuildVariables.inc.in
|
||||
+++ b/tools/llvm-config/BuildVariables.inc.in
|
||||
@@ -23,7 +23,10 @@
|
||||
@@ -23,6 +23,10 @@
|
||||
#define LLVM_CXXFLAGS "@LLVM_CXXFLAGS@"
|
||||
#define LLVM_BUILDMODE "@LLVM_BUILDMODE@"
|
||||
#define LLVM_LIBDIR_SUFFIX "@LLVM_LIBDIR_SUFFIX@"
|
||||
+#define LLVM_INSTALL_BINDIR "@CMAKE_INSTALL_BINDIR@"
|
||||
+#define LLVM_INSTALL_LIBDIR "@CMAKE_INSTALL_LIBDIR@"
|
||||
#define LLVM_INSTALL_INCLUDEDIR "@CMAKE_INSTALL_INCLUDEDIR@"
|
||||
+#define LLVM_INSTALL_INCLUDEDIR "@CMAKE_INSTALL_INCLUDEDIR@"
|
||||
+#define LLVM_INSTALL_CMAKEDIR "@LLVM_INSTALL_CMAKE_DIR@"
|
||||
#define LLVM_TARGETS_BUILT "@LLVM_TARGETS_BUILT@"
|
||||
#define LLVM_SYSTEM_LIBS "@LLVM_SYSTEM_LIBS@"
|
||||
#define LLVM_BUILD_SYSTEM "@LLVM_BUILD_SYSTEM@"
|
||||
diff --git a/tools/llvm-config/llvm-config.cpp b/tools/llvm-config/llvm-config.cpp
|
||||
index 8ed88f33ead4..5e7184bab90d 100644
|
||||
index 1a2f04552d13..44fa7d3eec6b 100644
|
||||
--- a/tools/llvm-config/llvm-config.cpp
|
||||
+++ b/tools/llvm-config/llvm-config.cpp
|
||||
@@ -363,12 +363,20 @@ int main(int argc, char **argv) {
|
||||
ActiveIncludeDir = std::string(Path.str());
|
||||
}
|
||||
{
|
||||
- SmallString<256> Path(LLVM_TOOLS_INSTALL_DIR);
|
||||
+ SmallString<256> Path(LLVM_INSTALL_BINDIR);
|
||||
sys::fs::make_absolute(ActivePrefix, Path);
|
||||
ActiveBinDir = std::string(Path.str());
|
||||
}
|
||||
@@ -357,12 +357,26 @@ int main(int argc, char **argv) {
|
||||
("-I" + ActiveIncludeDir + " " + "-I" + ActiveObjRoot + "/include");
|
||||
} else {
|
||||
ActivePrefix = CurrentExecPrefix;
|
||||
- ActiveIncludeDir = ActivePrefix + "/include";
|
||||
- SmallString<256> path(StringRef(LLVM_TOOLS_INSTALL_DIR));
|
||||
- sys::fs::make_absolute(ActivePrefix, path);
|
||||
- ActiveBinDir = std::string(path.str());
|
||||
- ActiveLibDir = ActivePrefix + "/lib" + LLVM_LIBDIR_SUFFIX;
|
||||
- ActiveCMakeDir = ActiveLibDir + "/cmake/llvm";
|
||||
+ {
|
||||
+ SmallString<256> Path(LLVM_INSTALL_LIBDIR LLVM_LIBDIR_SUFFIX);
|
||||
+ sys::fs::make_absolute(ActivePrefix, Path);
|
||||
+ ActiveLibDir = std::string(Path.str());
|
||||
+ SmallString<256> path(StringRef(LLVM_INSTALL_INCLUDEDIR));
|
||||
+ sys::fs::make_absolute(ActivePrefix, path);
|
||||
+ ActiveIncludeDir = std::string(path.str());
|
||||
+ }
|
||||
+ {
|
||||
+ SmallString<256> Path(LLVM_INSTALL_CMAKEDIR);
|
||||
+ sys::fs::make_absolute(ActivePrefix, Path);
|
||||
+ ActiveCMakeDir = std::string(Path.str());
|
||||
+ SmallString<256> path(StringRef(LLVM_INSTALL_BINDIR));
|
||||
+ sys::fs::make_absolute(ActivePrefix, path);
|
||||
+ ActiveBinDir = std::string(path.str());
|
||||
+ }
|
||||
+ {
|
||||
+ SmallString<256> path(StringRef(LLVM_INSTALL_LIBDIR LLVM_LIBDIR_SUFFIX));
|
||||
+ sys::fs::make_absolute(ActivePrefix, path);
|
||||
+ ActiveLibDir = std::string(path.str());
|
||||
+ }
|
||||
+ {
|
||||
+ SmallString<256> path(StringRef(LLVM_INSTALL_CMAKEDIR));
|
||||
+ sys::fs::make_absolute(ActivePrefix, path);
|
||||
+ ActiveCMakeDir = std::string(path.str());
|
||||
+ }
|
||||
ActiveIncludeOption = "-I" + ActiveIncludeDir;
|
||||
}
|
||||
|
||||
diff --git a/tools/lto/CMakeLists.txt b/tools/lto/CMakeLists.txt
|
||||
index 0af29ad762c5..37b99b83e35c 100644
|
||||
--- a/tools/lto/CMakeLists.txt
|
||||
+++ b/tools/lto/CMakeLists.txt
|
||||
@@ -33,7 +33,7 @@ add_llvm_library(${LTO_LIBRARY_NAME} ${LTO_LIBRARY_TYPE} INSTALL_WITH_TOOLCHAIN
|
||||
${SOURCES} DEPENDS intrinsics_gen)
|
||||
|
||||
install(FILES ${LLVM_MAIN_INCLUDE_DIR}/llvm-c/lto.h
|
||||
- DESTINATION include/llvm-c
|
||||
+ DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}/llvm-c
|
||||
COMPONENT LTO)
|
||||
|
||||
if (APPLE)
|
||||
diff --git a/tools/opt-viewer/CMakeLists.txt b/tools/opt-viewer/CMakeLists.txt
|
||||
index ead73ec13a8f..250362021f17 100644
|
||||
--- a/tools/opt-viewer/CMakeLists.txt
|
||||
+++ b/tools/opt-viewer/CMakeLists.txt
|
||||
@@ -8,7 +8,7 @@ set (files
|
||||
|
||||
foreach (file ${files})
|
||||
install(PROGRAMS ${file}
|
||||
- DESTINATION share/opt-viewer
|
||||
+ DESTINATION ${CMAKE_INSTALL_DATADIR}/opt-viewer
|
||||
COMPONENT opt-viewer)
|
||||
endforeach (file)
|
||||
|
||||
diff --git a/tools/remarks-shlib/CMakeLists.txt b/tools/remarks-shlib/CMakeLists.txt
|
||||
index 865436247270..ce1daa62f6ab 100644
|
||||
--- a/tools/remarks-shlib/CMakeLists.txt
|
||||
+++ b/tools/remarks-shlib/CMakeLists.txt
|
||||
@@ -19,7 +19,7 @@ if(LLVM_ENABLE_PIC)
|
||||
endif()
|
||||
|
||||
install(FILES ${LLVM_MAIN_INCLUDE_DIR}/llvm-c/Remarks.h
|
||||
- DESTINATION include/llvm-c
|
||||
+ DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}/llvm-c
|
||||
COMPONENT Remarks)
|
||||
|
||||
if (APPLE)
|
||||
|
|
|
@ -1,26 +0,0 @@
|
|||
pkgbase="mingw-w64-nac3artiq"
|
||||
pkgname="mingw-w64-x86_64-nac3artiq"
|
||||
pkgver=1.0
|
||||
pkgrel=1
|
||||
pkgdesc="New ARTIQ compiler 3"
|
||||
arch=("any")
|
||||
mingw_arch=("mingw64")
|
||||
url="https://m-labs.hk"
|
||||
license=("LGPL")
|
||||
source=("nac3artiq.pyd")
|
||||
noextract=("nac3artiq.pyd")
|
||||
sha256sums=("SKIP")
|
||||
depends=("mingw-w64-x86_64-python")
|
||||
|
||||
prepare() {
|
||||
true
|
||||
}
|
||||
|
||||
build() {
|
||||
true
|
||||
}
|
||||
|
||||
package() {
|
||||
mkdir -p $pkgdir/mingw64/lib/python3.10/site-packages
|
||||
cp ${srcdir}/nac3artiq.pyd $pkgdir/mingw64/lib/python3.10/site-packages
|
||||
}
|
|
@ -21,10 +21,10 @@ let
|
|||
text =
|
||||
''
|
||||
implementation=CPython
|
||||
version=3.10
|
||||
version=3.9
|
||||
shared=true
|
||||
abi3=false
|
||||
lib_name=python3.10
|
||||
lib_name=python3.9
|
||||
lib_dir=${msys2-env}/mingw64/lib
|
||||
pointer_width=64
|
||||
build_flags=WITH_THREAD
|
||||
|
@ -34,14 +34,14 @@ let
|
|||
in rec {
|
||||
llvm-nac3 = pkgs.stdenvNoCC.mkDerivation rec {
|
||||
pname = "llvm-nac3-msys2";
|
||||
version = "14.0.6";
|
||||
version = "13.0.1";
|
||||
src-llvm = pkgs.fetchurl {
|
||||
url = "https://github.com/llvm/llvm-project/releases/download/llvmorg-${version}/llvm-${version}.src.tar.xz";
|
||||
sha256 = "sha256-BQki7KrKV4H99mMeqSvHFRg/IC+dLxUUcibwI0FPYZo=";
|
||||
sha256 = "sha256-7GuA2Cw4SsrS3BkpA6bPLNuv+4ibhL+5janXHmMPyDQ=";
|
||||
};
|
||||
src-clang = pkgs.fetchurl {
|
||||
url = "https://github.com/llvm/llvm-project/releases/download/llvmorg-${version}/clang-${version}.src.tar.xz";
|
||||
sha256 = "sha256-K1hHtqYxGLnv5chVSDY8gf/glrZsOzZ16VPiY0KuQDE=";
|
||||
sha256 = "sha256-eHqeLZn1yHIKoXc+S+AJRhzTDTvUD90kWR5HNGfJF8k=";
|
||||
};
|
||||
buildInputs = [ pkgs.wineWowPackages.stable ];
|
||||
phases = [ "unpackPhase" "patchPhase" "configurePhase" "buildPhase" "installPhase" ];
|
||||
|
@ -49,7 +49,6 @@ in rec {
|
|||
''
|
||||
mkdir llvm
|
||||
tar xf ${src-llvm} -C llvm --strip-components=1
|
||||
mv llvm/Modules/* llvm/cmake/modules # work around https://github.com/llvm/llvm-project/issues/53281
|
||||
mkdir clang
|
||||
tar xf ${src-clang} -C clang --strip-components=1
|
||||
cd llvm
|
||||
|
@ -65,11 +64,11 @@ in rec {
|
|||
${silenceFontconfig}
|
||||
mkdir build
|
||||
cd build
|
||||
wine64 cmake .. -DCMAKE_BUILD_TYPE=Release -DLLVM_ENABLE_UNWIND_TABLES=OFF -DLLVM_ENABLE_THREADS=OFF -DLLVM_TARGETS_TO_BUILD=X86\;ARM\;RISCV -DLLVM_LINK_LLVM_DYLIB=OFF -DLLVM_ENABLE_FFI=OFF -DFFI_INCLUDE_DIR=fck-cmake -DFFI_LIBRARY_DIR=fck-cmake -DLLVM_ENABLE_LIBXML2=OFF -DLLVM_INCLUDE_BENCHMARKS=OFF -DLLVM_ENABLE_PROJECTS=clang -DCMAKE_INSTALL_PREFIX=Z:$out
|
||||
wine64 cmake .. -DCMAKE_BUILD_TYPE=Release -DLLVM_ENABLE_UNWIND_TABLES=OFF -DLLVM_ENABLE_THREADS=OFF -DLLVM_TARGETS_TO_BUILD=X86\;ARM\;RISCV -DLLVM_LINK_LLVM_DYLIB=OFF -DLLVM_ENABLE_FFI=OFF -DFFI_INCLUDE_DIR=fck-cmake -DFFI_LIBRARY_DIR=fck-cmake -DLLVM_ENABLE_LIBXML2=OFF -DLLVM_ENABLE_PROJECTS=clang -DCMAKE_INSTALL_PREFIX=Z:$out
|
||||
'';
|
||||
buildPhase =
|
||||
''
|
||||
wine64 ninja -j $NIX_BUILD_CORES
|
||||
wine64 ninja
|
||||
'';
|
||||
installPhase =
|
||||
''
|
||||
|
@ -80,13 +79,8 @@ in rec {
|
|||
nac3artiq = pkgs.rustPlatform.buildRustPackage {
|
||||
name = "nac3artiq-msys2";
|
||||
src = ../../.;
|
||||
cargoLock = {
|
||||
lockFile = ../../Cargo.lock;
|
||||
outputHashes = {
|
||||
"inkwell-0.1.0" = "sha256-+ih3SO0n6YmZ/mcf+rLDwPAy/1MEZ/A+tI4pM1pUhvU=";
|
||||
};
|
||||
};
|
||||
nativeBuildInputs = [ pkgs.wineWowPackages.stable ];
|
||||
cargoLock = { lockFile = ../../Cargo.lock; };
|
||||
nativeBuildInputs = [ pkgs.wineWowPackages.stable pkgs.zip ];
|
||||
buildPhase =
|
||||
''
|
||||
export HOME=`mktemp -d`
|
||||
|
@ -98,9 +92,10 @@ in rec {
|
|||
'';
|
||||
installPhase =
|
||||
''
|
||||
mkdir $out $out/nix-support
|
||||
cp target/release/nac3artiq.dll $out/nac3artiq.pyd
|
||||
echo file binary-dist $out/nac3artiq.pyd >> $out/nix-support/hydra-build-products
|
||||
mkdir -p $out $out/nix-support
|
||||
ln -s target/release/nac3artiq.dll nac3artiq.pyd
|
||||
zip $out/nac3artiq.zip nac3artiq.pyd
|
||||
echo file binary-dist $out/nac3artiq.zip >> $out/nix-support/hydra-build-products
|
||||
'';
|
||||
checkPhase =
|
||||
''
|
||||
|
@ -108,23 +103,37 @@ in rec {
|
|||
'';
|
||||
dontFixup = true;
|
||||
};
|
||||
nac3artiq-pkg = pkgs.stdenvNoCC.mkDerivation {
|
||||
name = "nac3artiq-msys2-pkg";
|
||||
nativeBuildInputs = [ pkgs.pacman pkgs.fakeroot pkgs.libarchive pkgs.zstd ];
|
||||
src = nac3artiq;
|
||||
phases = [ "buildPhase" "installPhase" ];
|
||||
lld = pkgs.stdenvNoCC.mkDerivation rec {
|
||||
pname = "lld-msys2";
|
||||
version = "13.0.1";
|
||||
src = pkgs.fetchurl {
|
||||
url = "https://github.com/llvm/llvm-project/releases/download/llvmorg-${version}/lld-${version}.src.tar.xz";
|
||||
sha256 = "sha256-Zmr3Rei/e2gFM7TRi3ox3HyrV1sebk0mGSK7r9lkTPs=";
|
||||
};
|
||||
buildInputs = [ pkgs.wineWowPackages.stable ];
|
||||
phases = [ "unpackPhase" "patchPhase" "configurePhase" "buildPhase" "installPhase" ];
|
||||
patches = [ ./lld-disable-macho.diff ];
|
||||
configurePhase =
|
||||
''
|
||||
export HOME=`mktemp -d`
|
||||
export WINEDEBUG=-all
|
||||
export WINEPATH=Z:${msys2-env}/mingw64/bin\;Z:${llvm-nac3}/bin
|
||||
${silenceFontconfig}
|
||||
mkdir build
|
||||
cd build
|
||||
wine64 cmake .. -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=Z:$out
|
||||
'';
|
||||
buildPhase =
|
||||
''
|
||||
ln -s ${./PKGBUILD} PKGBUILD
|
||||
ln -s $src/nac3artiq.pyd nac3artiq.pyd
|
||||
makepkg --config ${./makepkg.conf} --nodeps
|
||||
wine64 ninja
|
||||
'';
|
||||
installPhase =
|
||||
''
|
||||
mkdir $out $out/nix-support
|
||||
cp *.pkg.tar.zst $out
|
||||
echo file msys2 $out/*.pkg.tar.zst >> $out/nix-support/hydra-build-products
|
||||
mkdir -p $out $out/nix-support
|
||||
cp bin/ld.lld.exe $out
|
||||
echo file binary-dist $out/ld.lld.exe >> $out/nix-support/hydra-build-products
|
||||
'';
|
||||
dontFixup = true;
|
||||
};
|
||||
wine-msys2 = pkgs.writeShellScriptBin "wine-msys2"
|
||||
''
|
||||
|
@ -133,12 +142,4 @@ in rec {
|
|||
export PYO3_CONFIG_FILE=Z:${pyo3-mingw-config}
|
||||
exec ${pkgs.wineWowPackages.stable}/bin/wine64 cmd
|
||||
'';
|
||||
wine-msys2-build = pkgs.writeShellScriptBin "wine-msys2-build"
|
||||
''
|
||||
export HOME=`mktemp -d`
|
||||
export WINEDEBUG=-all
|
||||
export WINEPATH=Z:${msys2-env}/mingw64/bin
|
||||
${silenceFontconfig}
|
||||
exec ${pkgs.wineWowPackages.stable}/bin/wine64 $@
|
||||
'';
|
||||
}
|
||||
|
|
|
@ -0,0 +1,36 @@
|
|||
diff '--color=auto' -Naur lld-13.0.1.src/CMakeLists.txt lld-13.0.1.src-new/CMakeLists.txt
|
||||
--- lld-13.0.1.src/CMakeLists.txt 2022-01-21 05:31:59.000000000 +0800
|
||||
+++ lld-13.0.1.src-new/CMakeLists.txt 2022-03-27 18:26:30.284921982 +0800
|
||||
@@ -206,7 +206,6 @@
|
||||
add_subdirectory(docs)
|
||||
add_subdirectory(COFF)
|
||||
add_subdirectory(ELF)
|
||||
-add_subdirectory(MachO)
|
||||
add_subdirectory(MinGW)
|
||||
add_subdirectory(wasm)
|
||||
|
||||
diff '--color=auto' -Naur lld-13.0.1.src/tools/lld/CMakeLists.txt lld-13.0.1.src-new/tools/lld/CMakeLists.txt
|
||||
--- lld-13.0.1.src/tools/lld/CMakeLists.txt 2022-01-21 05:31:59.000000000 +0800
|
||||
+++ lld-13.0.1.src-new/tools/lld/CMakeLists.txt 2022-03-27 18:26:40.805046295 +0800
|
||||
@@ -15,7 +15,6 @@
|
||||
lldCOFF
|
||||
lldDriver
|
||||
lldELF
|
||||
- lldMachO2
|
||||
lldMinGW
|
||||
lldWasm
|
||||
)
|
||||
diff '--color=auto' -Naur lld-13.0.1.src/tools/lld/lld.cpp lld-13.0.1.src-new/tools/lld/lld.cpp
|
||||
--- lld-13.0.1.src/tools/lld/lld.cpp 2022-01-21 05:31:59.000000000 +0800
|
||||
+++ lld-13.0.1.src-new/tools/lld/lld.cpp 2022-03-27 08:43:54.205524156 +0800
|
||||
@@ -148,10 +148,6 @@
|
||||
return !elf::link(args, exitEarly, stdoutOS, stderrOS);
|
||||
case WinLink:
|
||||
return !coff::link(args, exitEarly, stdoutOS, stderrOS);
|
||||
- case Darwin:
|
||||
- return !macho::link(args, exitEarly, stdoutOS, stderrOS);
|
||||
- case DarwinOld:
|
||||
- return !mach_o::link(args, exitEarly, stdoutOS, stderrOS);
|
||||
case Wasm:
|
||||
return !lld::wasm::link(args, exitEarly, stdoutOS, stderrOS);
|
||||
default:
|
|
@ -10,7 +10,7 @@ curl -L https://mirror.msys2.org/msys/x86_64/pacman-mirrors-20220205-1-any.pkg.t
|
|||
curl -L https://raw.githubusercontent.com/msys2/MSYS2-packages/master/pacman/pacman.conf | grep -v SigLevel | sed s\|/etc/pacman.d\|$MSYS2DIR/etc/pacman.d\|g > $MSYS2DIR/etc/pacman.conf
|
||||
|
||||
fakeroot pacman --root $MSYS2DIR --config $MSYS2DIR/etc/pacman.conf -Syy
|
||||
pacman --root $MSYS2DIR --config $MSYS2DIR/etc/pacman.conf --cachedir $MSYS2DIR/msys/cache -Sp mingw-w64-x86_64-rust mingw-w64-x86_64-cmake mingw-w64-x86_64-ninja mingw-w64-x86_64-python3.10 mingw-w64-x86_64-python-numpy mingw-w64-x86_64-python-setuptools > $MSYS2DIR/packages.txt
|
||||
pacman --root $MSYS2DIR --config $MSYS2DIR/etc/pacman.conf --cachedir $MSYS2DIR/msys/cache -Sp mingw-w64-x86_64-rust mingw-w64-x86_64-cmake mingw-w64-x86_64-ninja mingw-w64-x86_64-python3.9 mingw-w64-x86_64-python-numpy > $MSYS2DIR/packages.txt
|
||||
|
||||
echo "{ pkgs } : [" > msys2_packages.nix
|
||||
while read package; do
|
||||
|
|
|
@ -1,2 +0,0 @@
|
|||
PKGEXT='.pkg.tar.zst'
|
||||
SRCEXT='.src.tar.gz'
|
|
@ -1,28 +1,33 @@
|
|||
{ pkgs } : [
|
||||
|
||||
(pkgs.fetchurl {
|
||||
url = "https://mirror.msys2.org/mingw/mingw64/mingw-w64-x86_64-libiconv-1.17-1-any.pkg.tar.zst";
|
||||
sha256 = "1pb1x5wrlmmpjdpzsc7rs5xk6ydlsd5mval0fwrqq54jf6dxdzpz";
|
||||
url = "https://mirror.msys2.org/mingw/mingw64/mingw-w64-x86_64-libiconv-1.16-2-any.pkg.tar.zst";
|
||||
sha256 = "0nr8gaqz7vhjsqq8ys3z63bd62fz548r9n0sncz513ra04wg7la4";
|
||||
})
|
||||
|
||||
(pkgs.fetchurl {
|
||||
url = "https://mirror.msys2.org/mingw/mingw64/mingw-w64-x86_64-zlib-1.2.12-1-any.pkg.tar.zst";
|
||||
sha256 = "1b461ic5s3hjk3y70ldik82ny08rdywn1zfqa8d2jyyvnh4dya77";
|
||||
url = "https://mirror.msys2.org/mingw/mingw64/mingw-w64-x86_64-zlib-1.2.11-9-any.pkg.tar.zst";
|
||||
sha256 = "0fb3xbw9a0ah4viwp3a7hr5phnc7mvcl9ba2yjidncpqmspypacx";
|
||||
})
|
||||
|
||||
(pkgs.fetchurl {
|
||||
url = "https://mirror.msys2.org/mingw/mingw64/mingw-w64-x86_64-binutils-2.38-4-any.pkg.tar.zst";
|
||||
sha256 = "18cgs1cvhr8hrq46g2av9as589wxn76rrshhzvx8max8iqzwprm3";
|
||||
url = "https://mirror.msys2.org/mingw/mingw64/mingw-w64-x86_64-binutils-2.38-2-any.pkg.tar.zst";
|
||||
sha256 = "121jz2nmfk0qgkwjll8bg3kavmzpp14raid4az44p10vfdlla7f6";
|
||||
})
|
||||
|
||||
(pkgs.fetchurl {
|
||||
url = "https://mirror.msys2.org/mingw/mingw64/mingw-w64-x86_64-headers-git-10.0.0.r59.gaacb650be-1-any.pkg.tar.zst";
|
||||
sha256 = "0gq38zb880ar0xj62ddcggw8cqg7h6g1yw0x422i8cgak6x8qasp";
|
||||
url = "https://mirror.msys2.org/mingw/mingw64/mingw-w64-x86_64-headers-git-9.0.0.6451.a3f6d363d-1-any.pkg.tar.zst";
|
||||
sha256 = "1ngnjb9vgk295wlwqandm0nhqqdfrp584kx3nfshxkhwmj5gpzxk";
|
||||
})
|
||||
|
||||
(pkgs.fetchurl {
|
||||
url = "https://mirror.msys2.org/mingw/mingw64/mingw-w64-x86_64-crt-git-10.0.0.r59.gaacb650be-1-any.pkg.tar.zst";
|
||||
sha256 = "1safighnniwmjrklrig41m1kj1b40lrzaiv48xzf26ljb45fy6lq";
|
||||
url = "https://mirror.msys2.org/mingw/mingw64/mingw-w64-x86_64-crt-git-9.0.0.6451.a3f6d363d-1-any.pkg.tar.zst";
|
||||
sha256 = "1ccipidbsjncdhr48k50ia53dwn7v3ghdl8f1svgwvnh3mrx0bww";
|
||||
})
|
||||
|
||||
(pkgs.fetchurl {
|
||||
url = "https://mirror.msys2.org/mingw/mingw64/mingw-w64-x86_64-isl-0.24-1-any.pkg.tar.zst";
|
||||
sha256 = "0dngp6p1yw3i9mvwg9rl888dqa7fjs8xczx1lqacw7lj98q1396d";
|
||||
})
|
||||
|
||||
(pkgs.fetchurl {
|
||||
|
@ -30,11 +35,6 @@
|
|||
sha256 = "170640c8j81gl67kp85kr8kmg5axsl1vqwn9g7cx6vcr638qax9c";
|
||||
})
|
||||
|
||||
(pkgs.fetchurl {
|
||||
url = "https://mirror.msys2.org/mingw/mingw64/mingw-w64-x86_64-isl-0.25-1-any.pkg.tar.zst";
|
||||
sha256 = "0hky9gmd6iz1s3irmp9fk2j10cpqrrw8l810riwr58ynj3i10j2k";
|
||||
})
|
||||
|
||||
(pkgs.fetchurl {
|
||||
url = "https://mirror.msys2.org/mingw/mingw64/mingw-w64-x86_64-mpfr-4.1.0.p13-1-any.pkg.tar.zst";
|
||||
sha256 = "17klcf17mddd7hsrak920zglqh00drqjdh6dxh3v3c4y62xj1qr6";
|
||||
|
@ -46,13 +46,13 @@
|
|||
})
|
||||
|
||||
(pkgs.fetchurl {
|
||||
url = "https://mirror.msys2.org/mingw/mingw64/mingw-w64-x86_64-libwinpthread-git-10.0.0.r59.gaacb650be-1-any.pkg.tar.zst";
|
||||
sha256 = "0a9niq05s7ny0y1x625xy9p3dzakw5l4w8djajv09lkxqx36yp40";
|
||||
url = "https://mirror.msys2.org/mingw/mingw64/mingw-w64-x86_64-libwinpthread-git-9.0.0.6451.a3f6d363d-1-any.pkg.tar.zst";
|
||||
sha256 = "0qdy79l5y02lw2xa8i3j6yayhz8a7awfgyyd82pcmbzwx57q2xqb";
|
||||
})
|
||||
|
||||
(pkgs.fetchurl {
|
||||
url = "https://mirror.msys2.org/mingw/mingw64/mingw-w64-x86_64-gcc-libs-12.1.0-3-any.pkg.tar.zst";
|
||||
sha256 = "0gxifzjl9v72z5fbr89j47j2b7l7ba9cf4xf49wb3khppqb2q9by";
|
||||
url = "https://mirror.msys2.org/mingw/mingw64/mingw-w64-x86_64-gcc-libs-11.2.0-10-any.pkg.tar.zst";
|
||||
sha256 = "1n8q09dwh0ghaw3p3bgxi3q0848gsjzd210bgp7qy05hv73b8kc1";
|
||||
})
|
||||
|
||||
(pkgs.fetchurl {
|
||||
|
@ -61,8 +61,8 @@
|
|||
})
|
||||
|
||||
(pkgs.fetchurl {
|
||||
url = "https://mirror.msys2.org/mingw/mingw64/mingw-w64-x86_64-winpthreads-git-10.0.0.r59.gaacb650be-1-any.pkg.tar.zst";
|
||||
sha256 = "1mhy806hdx27w3fzpb4zv9ia0c2r6n53ljcpkpcanwbqc3hhmj9f";
|
||||
url = "https://mirror.msys2.org/mingw/mingw64/mingw-w64-x86_64-winpthreads-git-9.0.0.6451.a3f6d363d-1-any.pkg.tar.zst";
|
||||
sha256 = "08zwgkrp45y5ry8avz61krasjkk4k4a5rrdz4nd78bbbah84mpgz";
|
||||
})
|
||||
|
||||
(pkgs.fetchurl {
|
||||
|
@ -71,8 +71,8 @@
|
|||
})
|
||||
|
||||
(pkgs.fetchurl {
|
||||
url = "https://mirror.msys2.org/mingw/mingw64/mingw-w64-x86_64-gcc-12.1.0-3-any.pkg.tar.zst";
|
||||
sha256 = "0j2p4516r7r9igcnfjcxyzzgppy60hx76gp78lqk0331aj1c5d1d";
|
||||
url = "https://mirror.msys2.org/mingw/mingw64/mingw-w64-x86_64-gcc-11.2.0-10-any.pkg.tar.zst";
|
||||
sha256 = "0h6pi1xrxbrg27klbj5i5rjl8ydz78lpjfhb9pdayxjr8rs5nblq";
|
||||
})
|
||||
|
||||
(pkgs.fetchurl {
|
||||
|
@ -81,13 +81,13 @@
|
|||
})
|
||||
|
||||
(pkgs.fetchurl {
|
||||
url = "https://mirror.msys2.org/mingw/mingw64/mingw-w64-x86_64-brotli-1.0.9-5-any.pkg.tar.zst";
|
||||
sha256 = "044n36p4s2n73fxvac55cqqw6di19v4m92v2h0qnphazj6wcg1d0";
|
||||
url = "https://mirror.msys2.org/mingw/mingw64/mingw-w64-x86_64-brotli-1.0.9-4-any.pkg.tar.zst";
|
||||
sha256 = "0vn42aqam6m9755vy32qr626xqglb6vsbmywdyvvagzmm8s5fxrg";
|
||||
})
|
||||
|
||||
(pkgs.fetchurl {
|
||||
url = "https://mirror.msys2.org/mingw/mingw64/mingw-w64-x86_64-expat-2.4.8-1-any.pkg.tar.zst";
|
||||
sha256 = "1qkw4k61ddaflns5ms0xh0czbx99wxhs0dfbk8sv8by2rkshl51k";
|
||||
url = "https://mirror.msys2.org/mingw/mingw64/mingw-w64-x86_64-expat-2.4.7-1-any.pkg.tar.zst";
|
||||
sha256 = "19qh3kk2kmkkzxirpx1swgfsfb29gy9q8qgdmrzzbwrqrn8vs77j";
|
||||
})
|
||||
|
||||
(pkgs.fetchurl {
|
||||
|
@ -96,13 +96,13 @@
|
|||
})
|
||||
|
||||
(pkgs.fetchurl {
|
||||
url = "https://mirror.msys2.org/mingw/mingw64/mingw-w64-x86_64-libunistring-1.0-1-any.pkg.tar.zst";
|
||||
sha256 = "1qks1gm8jscnn93sr7n1azkzcq4a8fybsikpqcf920m9b66cym4k";
|
||||
url = "https://mirror.msys2.org/mingw/mingw64/mingw-w64-x86_64-libunistring-0.9.10-4-any.pkg.tar.zst";
|
||||
sha256 = "0fimgakzgxrnnqk5rkvnbwl6lyqdrpgl2jcbcagqjv1swdmav97m";
|
||||
})
|
||||
|
||||
(pkgs.fetchurl {
|
||||
url = "https://mirror.msys2.org/mingw/mingw64/mingw-w64-x86_64-libidn2-2.3.3-1-any.pkg.tar.zst";
|
||||
sha256 = "1m3qgnhgf0g389kglrai26x4k64gs2cy9b3mjwlkw5xcs2r3smww";
|
||||
url = "https://mirror.msys2.org/mingw/mingw64/mingw-w64-x86_64-libidn2-2.3.1-1-any.pkg.tar.zst";
|
||||
sha256 = "00vm6d56ldr1f4h0dn15j0ja17dif45qxlxaqv4x5nw555frklf5";
|
||||
})
|
||||
|
||||
(pkgs.fetchurl {
|
||||
|
@ -126,13 +126,13 @@
|
|||
})
|
||||
|
||||
(pkgs.fetchurl {
|
||||
url = "https://mirror.msys2.org/mingw/mingw64/mingw-w64-x86_64-ca-certificates-20211016-3-any.pkg.tar.zst";
|
||||
sha256 = "02x6dnbbyjm6mcl6ii61bc5rkwg3qsbaqd2lyzsp5732hxjcmmq4";
|
||||
url = "https://mirror.msys2.org/mingw/mingw64/mingw-w64-x86_64-ca-certificates-20210119-1-any.pkg.tar.zst";
|
||||
sha256 = "0ia48njn92shddlq3f707wz6dvj0j1k60iscs6vybybw33ijhgsq";
|
||||
})
|
||||
|
||||
(pkgs.fetchurl {
|
||||
url = "https://mirror.msys2.org/mingw/mingw64/mingw-w64-x86_64-openssl-1.1.1.q-1-any.pkg.tar.zst";
|
||||
sha256 = "0rfb3z9jd0y6xjhv4qx1qqyyqgnzzlchbm07icpb4slgwjbm7cjg";
|
||||
url = "https://mirror.msys2.org/mingw/mingw64/mingw-w64-x86_64-openssl-1.1.1.n-1-any.pkg.tar.zst";
|
||||
sha256 = "0gb6nswwm4b66w4x2zydha23fck967hzb5gckwlv05dws13hbh22";
|
||||
})
|
||||
|
||||
(pkgs.fetchurl {
|
||||
|
@ -151,28 +151,28 @@
|
|||
})
|
||||
|
||||
(pkgs.fetchurl {
|
||||
url = "https://mirror.msys2.org/mingw/mingw64/mingw-w64-x86_64-xz-5.2.5-3-any.pkg.tar.zst";
|
||||
sha256 = "099j96iv49b2xddfaq7a69l0j818hw7cxyas6g7cm7iw3crsykfr";
|
||||
url = "https://mirror.msys2.org/mingw/mingw64/mingw-w64-x86_64-xz-5.2.5-2-any.pkg.tar.zst";
|
||||
sha256 = "1w12cbn6zq2szfa1wgr019i4ayv9x29d1nnh5vlq26l6fmzd3j1g";
|
||||
})
|
||||
|
||||
(pkgs.fetchurl {
|
||||
url = "https://mirror.msys2.org/mingw/mingw64/mingw-w64-x86_64-libxml2-2.9.14-4-any.pkg.tar.zst";
|
||||
sha256 = "1d6v37k0hiznlv0qnr25cpjgwa7rphahiwcrc7jf44qwdmbdasrv";
|
||||
url = "https://mirror.msys2.org/mingw/mingw64/mingw-w64-x86_64-libxml2-2.9.13-1-any.pkg.tar.zst";
|
||||
sha256 = "093fm3i8018mig0wy2p53z8izm0bfqlviacray79j2kv1bq8xyn6";
|
||||
})
|
||||
|
||||
(pkgs.fetchurl {
|
||||
url = "https://mirror.msys2.org/mingw/mingw64/mingw-w64-x86_64-nghttp2-1.48.0-1-any.pkg.tar.zst";
|
||||
sha256 = "023lnhncm697sdbgnrzvc56c9lzcn29dsrl1m58hsxxjb7rrcrlf";
|
||||
url = "https://mirror.msys2.org/mingw/mingw64/mingw-w64-x86_64-nghttp2-1.47.0-1-any.pkg.tar.zst";
|
||||
sha256 = "0lwylch8s7blr2hdngs2v0syh1gnicm0z4wpi28ifwdp4kzrh7mh";
|
||||
})
|
||||
|
||||
(pkgs.fetchurl {
|
||||
url = "https://mirror.msys2.org/mingw/mingw64/mingw-w64-x86_64-curl-7.84.0-2-any.pkg.tar.zst";
|
||||
sha256 = "0j6b3arlcsyk5fn2nr7x92j2pqkn26zyrg1zy3pc0qcd3q8hlbr0";
|
||||
url = "https://mirror.msys2.org/mingw/mingw64/mingw-w64-x86_64-curl-7.82.0-1-any.pkg.tar.zst";
|
||||
sha256 = "0czysrplb3lgd59v4c4v8sihbcs3hdih9d8iqrhkf28n7vds859i";
|
||||
})
|
||||
|
||||
(pkgs.fetchurl {
|
||||
url = "https://mirror.msys2.org/mingw/mingw64/mingw-w64-x86_64-rust-1.62.1-1-any.pkg.tar.zst";
|
||||
sha256 = "1bxnjgf1vx1qyf0nzmmc6s096jbw7354fkb1khhmldi15yb2f8h8";
|
||||
url = "https://mirror.msys2.org/mingw/mingw64/mingw-w64-x86_64-rust-1.59.0-1-any.pkg.tar.zst";
|
||||
sha256 = "1c1yr7w3h2ybbx6cywqgpns4ii0dws7jqcix8fiv0rbkjq5hxlsv";
|
||||
})
|
||||
|
||||
(pkgs.fetchurl {
|
||||
|
@ -211,8 +211,13 @@
|
|||
})
|
||||
|
||||
(pkgs.fetchurl {
|
||||
url = "https://mirror.msys2.org/mingw/mingw64/mingw-w64-x86_64-libarchive-3.6.1-2-any.pkg.tar.zst";
|
||||
sha256 = "1wgv99pxk2pv4kr5cs111k7813bvlykphirksz8pr62kv8a1n47s";
|
||||
url = "https://mirror.msys2.org/mingw/mingw64/mingw-w64-x86_64-nettle-3.7.3-3-any.pkg.tar.zst";
|
||||
sha256 = "10p1jlik2zhqnpphk3k1q6k2my6j0zig49r5fs21a8f6l0gakj1x";
|
||||
})
|
||||
|
||||
(pkgs.fetchurl {
|
||||
url = "https://mirror.msys2.org/mingw/mingw64/mingw-w64-x86_64-libarchive-3.6.0-1-any.pkg.tar.zst";
|
||||
sha256 = "1jd0rj49il09a56dnvgyjzmjj37pdspqhkfm8smgakgsgn9wkm46";
|
||||
})
|
||||
|
||||
(pkgs.fetchurl {
|
||||
|
@ -226,13 +231,13 @@
|
|||
})
|
||||
|
||||
(pkgs.fetchurl {
|
||||
url = "https://mirror.msys2.org/mingw/mingw64/mingw-w64-x86_64-ninja-1.11.0-1-any.pkg.tar.zst";
|
||||
sha256 = "0s4zwj4cwzql5l7yx3rj6c8s9jkhjvqqfv5rg0a2grp4abcmv51m";
|
||||
url = "https://mirror.msys2.org/mingw/mingw64/mingw-w64-x86_64-cmake-3.22.3-2-any.pkg.tar.zst";
|
||||
sha256 = "1xp8n5s98va7a9cq36d9p49lk32yv9vkvlayc4d4j465xzm21hgp";
|
||||
})
|
||||
|
||||
(pkgs.fetchurl {
|
||||
url = "https://mirror.msys2.org/mingw/mingw64/mingw-w64-x86_64-cmake-3.24.0-1-any.pkg.tar.zst";
|
||||
sha256 = "0aykg8g07jnsf549ws293ykgsxy2czbnv2yjix1dwilwc9a11w86";
|
||||
url = "https://mirror.msys2.org/mingw/mingw64/mingw-w64-x86_64-ninja-1.10.2-3-any.pkg.tar.zst";
|
||||
sha256 = "1wsylz9m3hm6lq71qfyc3hc8rmxnv3kp602d1yvvh8nvb8pgza1y";
|
||||
})
|
||||
|
||||
(pkgs.fetchurl {
|
||||
|
@ -241,8 +246,8 @@
|
|||
})
|
||||
|
||||
(pkgs.fetchurl {
|
||||
url = "https://mirror.msys2.org/mingw/mingw64/mingw-w64-x86_64-ncurses-6.3-5-any.pkg.tar.zst";
|
||||
sha256 = "029z63bw9pwhamw1zi75fr112pxk934nh08by2l54lwdais0vjq8";
|
||||
url = "https://mirror.msys2.org/mingw/mingw64/mingw-w64-x86_64-ncurses-6.3-3-any.pkg.tar.zst";
|
||||
sha256 = "1kicmq9xy4mh5rzzf107jikpmql0h78618b3xpks5l59c1hm8ril";
|
||||
})
|
||||
|
||||
(pkgs.fetchurl {
|
||||
|
@ -251,8 +256,8 @@
|
|||
})
|
||||
|
||||
(pkgs.fetchurl {
|
||||
url = "https://mirror.msys2.org/mingw/mingw64/mingw-w64-x86_64-readline-8.1.002-2-any.pkg.tar.zst";
|
||||
sha256 = "136fp0cymxqzgs4s8dmal1f4v6ns2mw8jn4cbfihxqb2cmf9yil8";
|
||||
url = "https://mirror.msys2.org/mingw/mingw64/mingw-w64-x86_64-readline-8.1.001-1-any.pkg.tar.zst";
|
||||
sha256 = "0q04sz2ibvcd69hb681j4s6zyakm4i7zpk12qajj38l17v9qmlac";
|
||||
})
|
||||
|
||||
(pkgs.fetchurl {
|
||||
|
@ -261,8 +266,8 @@
|
|||
})
|
||||
|
||||
(pkgs.fetchurl {
|
||||
url = "https://mirror.msys2.org/mingw/mingw64/mingw-w64-x86_64-sqlite3-3.39.1-1-any.pkg.tar.zst";
|
||||
sha256 = "0nabw7iy5za5hdpflkgn1s1v93786h9zz6sxzjxm23wymfk1yxlg";
|
||||
url = "https://mirror.msys2.org/mingw/mingw64/mingw-w64-x86_64-sqlite3-3.38.1-1-any.pkg.tar.zst";
|
||||
sha256 = "04h4m72mwmad82nzrl5qj9wlsinjs7z7bsbbq17dxkx92aj75p26";
|
||||
})
|
||||
|
||||
(pkgs.fetchurl {
|
||||
|
@ -276,27 +281,22 @@
|
|||
})
|
||||
|
||||
(pkgs.fetchurl {
|
||||
url = "https://mirror.msys2.org/mingw/mingw64/mingw-w64-x86_64-python-3.10.5-3-any.pkg.tar.zst";
|
||||
sha256 = "1198p71k30c6kspi8mx6kmsk48fdblfr75291s0gmbmdgba7gfw4";
|
||||
url = "https://mirror.msys2.org/mingw/mingw64/mingw-w64-x86_64-python-3.9.11-2-any.pkg.tar.zst";
|
||||
sha256 = "05znxaybrm8affs83a51872iksa9yd4qk692lr3rjdjy3cbxkhca";
|
||||
})
|
||||
|
||||
(pkgs.fetchurl {
|
||||
url = "https://mirror.msys2.org/mingw/mingw64/mingw-w64-x86_64-gcc-libgfortran-12.1.0-3-any.pkg.tar.zst";
|
||||
sha256 = "11mawrmxp4habwsvbmfsalb136m4dmzlrjy3pcwp7rq8wxx2vnah";
|
||||
url = "https://mirror.msys2.org/mingw/mingw64/mingw-w64-x86_64-gcc-libgfortran-11.2.0-10-any.pkg.tar.zst";
|
||||
sha256 = "13qdcb614sz7w10b2snmp05qh4c7wf24qmfmzssxjjz8ld6p8b90";
|
||||
})
|
||||
|
||||
(pkgs.fetchurl {
|
||||
url = "https://mirror.msys2.org/mingw/mingw64/mingw-w64-x86_64-openblas-0.3.20-3-any.pkg.tar.zst";
|
||||
sha256 = "07d8cp8in2nbh6dsyis9cvy83y16gz5wfq5fp0fddgh1ak8ihyn2";
|
||||
url = "https://mirror.msys2.org/mingw/mingw64/mingw-w64-x86_64-openblas-0.3.20-1-any.pkg.tar.zst";
|
||||
sha256 = "1adbbycbvs2nkjhgap92fk3x0vqfjb3ghhvyd1xlnn56c5n0iphf";
|
||||
})
|
||||
|
||||
(pkgs.fetchurl {
|
||||
url = "https://mirror.msys2.org/mingw/mingw64/mingw-w64-x86_64-python-numpy-1.23.1-1-any.pkg.tar.zst";
|
||||
sha256 = "05by2nm402jkvzaxcz7g3vmh93qmh6f2ddhambmpn4778np6n9bz";
|
||||
})
|
||||
|
||||
(pkgs.fetchurl {
|
||||
url = "https://mirror.msys2.org/mingw/mingw64/mingw-w64-x86_64-python-setuptools-63.2.0-2-any.pkg.tar.zst";
|
||||
sha256 = "0280dajh9rvvg3zl4qrgbap6i6n3lxn172kscn6728ifhn3ap3bh";
|
||||
url = "https://mirror.msys2.org/mingw/mingw64/mingw-w64-x86_64-python-numpy-1.21.5-1-any.pkg.tar.zst";
|
||||
sha256 = "10bhfq65nrzxipgy75bqaad74daif4ay06phwvbx70b9j0wm33c3";
|
||||
})
|
||||
]
|
||||
|
|
Loading…
Reference in New Issue