Compare commits

...

24 Commits

Author SHA1 Message Date
David Nadlinger b7ee07d7f1 core/builtins: Simplify vector of None creation [nfc] 2022-04-22 23:50:00 +01:00
David Nadlinger 188208b959 core/typecheck: Implement unification for scalar indexing of ndarrays 2022-04-22 22:46:24 +01:00
David Nadlinger 164edd266e core/typecheck: Explicitly give errors on "advanced" (subset) indexing 2022-04-22 22:28:59 +01:00
David Nadlinger c74b7992f6 core/typecheck: Basic ndarray indexing support 2022-04-22 21:56:35 +01:00
David Nadlinger 72cb693e2e core/typecheck: First btis of NumPy-like array type inference
For readability of the codebase, I chose ndarray for the name of the
type, while [numpy.]array() is the name of the most commonly used
constructor.
2022-04-22 21:56:35 +01:00
David Nadlinger 8454741f9e cargo fmt the world 2022-04-22 21:56:35 +01:00
David Nadlinger 2e6fc4cfe5 [tmp] nix: Switch to aarch64-darwin 2022-04-22 21:56:21 +01:00
ychenfo 48cb485b89 nac3core: show outer type info in type error messages
Reviewed-on: M-Labs/nac3#274
Co-authored-by: ychenfo <yc@m-labs.hk>
Co-committed-by: ychenfo <yc@m-labs.hk>
2022-04-22 15:31:55 +08:00
Sebastien Bourdeauducq 837aaa95f1 flake: contain sipyco to nac3artiq-profile 2022-04-19 10:34:55 +08:00
Sebastien Bourdeauducq a19e9c0bec flake: provide llvm-as for IRRT
clang already depends on llvmPackages_13.llvm, so, unlike the statically-linked tools
from llvm-nac3, this does not make the bloat even worse.
2022-04-19 10:23:41 +08:00
Sebastien Bourdeauducq 5dbe1d3d7d llvm: restore llvm-config 2022-04-19 10:23:12 +08:00
Sebastien Bourdeauducq e9bca3c822 llvm: set LLVM_BUILD_TOOLS=OFF 2022-04-19 00:30:11 +08:00
Sebastien Bourdeauducq 42d1aad507 flake: add PGO build to Hydra 2022-04-18 23:58:43 +08:00
Sebastien Bourdeauducq 2777a6e05f flake: use nac3devices example for PGO 2022-04-18 23:57:57 +08:00
Sebastien Bourdeauducq 05be5e93c4 flake: update nixpkgs 2022-04-18 18:48:05 +08:00
Sebastien Bourdeauducq 85f21060e4 update to LLVM 14 2022-04-18 18:47:20 +08:00
Sebastien Bourdeauducq a308d24caa nac3standalone: cleanup 2022-04-18 16:02:48 +08:00
Sebastien Bourdeauducq 1eac111d4c cleanup 2022-04-18 15:55:37 +08:00
ychenfo 44199781dc nac3standalone: add tests for operators 2022-04-18 15:31:56 +08:00
ychenfo 711c3d3303 nac3core: support custom operators 2022-04-18 15:31:56 +08:00
sb10q 0975264482 README: center icon 2022-04-18 15:11:32 +08:00
Sebastien Bourdeauducq 087aded3a3 add icon
Icon is copyright Evgeny Filatov and not covered by any free software license.
2022-04-18 15:07:53 +08:00
ychenfo f14b32be67 nac3artiq: type check host int bound instead of panic when codegen 2022-04-16 03:01:37 +08:00
David Nadlinger 879c66cccf flake.nix: Fix outdated nixConfig keys
The old syntax seems to be silently ignored on (at least)
Nix 2.7.0.
2022-04-13 21:21:18 +01:00
50 changed files with 2276 additions and 1917 deletions

59
Cargo.lock generated
View File

@ -320,24 +320,21 @@ dependencies = [
[[package]] [[package]]
name = "inkwell" name = "inkwell"
version = "0.1.0-beta.4" version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "git+https://github.com/nbaksalyar/inkwell.git?branch=llvm14#54f9e286279666d40d5f6e2c3fde8a91334a7b63"
checksum = "2223d0eba0ae6d40a3e4680c6a3209143471e1f38b41746ea309aa36dde9f90b"
dependencies = [ dependencies = [
"either", "either",
"inkwell_internals", "inkwell_internals",
"libc", "libc",
"llvm-sys", "llvm-sys",
"once_cell", "once_cell",
"parking_lot 0.11.2", "parking_lot 0.12.0",
"regex",
] ]
[[package]] [[package]]
name = "inkwell_internals" name = "inkwell_internals"
version = "0.5.0" version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "git+https://github.com/nbaksalyar/inkwell.git?branch=llvm14#54f9e286279666d40d5f6e2c3fde8a91334a7b63"
checksum = "3c7090af3d300424caa81976b8c97bca41cd70e861272c072e188ae082fb49f9"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@ -422,9 +419,9 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
[[package]] [[package]]
name = "libc" name = "libc"
version = "0.2.122" version = "0.2.123"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ec647867e2bf0772e28c8bcde4f0d19a9216916e890543b5a03ed8ef27b8f259" checksum = "cb691a747a7ab48abc15c5b42066eaafde10dc427e3b6ee2a1cf43db04c763bd"
[[package]] [[package]]
name = "libloading" name = "libloading"
@ -444,9 +441,9 @@ checksum = "7fb9b38af92608140b86b693604b9ffcc5824240a484d1ecd4795bacb2fe88f3"
[[package]] [[package]]
name = "llvm-sys" name = "llvm-sys"
version = "130.0.3" version = "140.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "95eb03b4f7ae21f48ef7c565a3e3aa22c50616aea64645fb1fd7f6f56b51c274" checksum = "7edbec78fa56ea7a1ff451683a51b8ecf79a65ca9e88a4be6c4b0a6fc300d2a6"
dependencies = [ dependencies = [
"cc", "cc",
"lazy_static", "lazy_static",
@ -642,15 +639,6 @@ dependencies = [
"proc-macro-hack", "proc-macro-hack",
] ]
[[package]]
name = "pest"
version = "2.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "10f4872ae94d7b90ae48754df22fd42ad52ce740b8f370b03da4835417403e53"
dependencies = [
"ucd-trie",
]
[[package]] [[package]]
name = "petgraph" name = "petgraph"
version = "0.5.1" version = "0.5.1"
@ -836,9 +824,9 @@ dependencies = [
[[package]] [[package]]
name = "rayon" name = "rayon"
version = "1.5.1" version = "1.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c06aca804d41dbc8ba42dfd964f0d01334eceb64314b9ecf7c5fad5188a06d90" checksum = "fd249e82c21598a9a426a4e00dd7adc1d640b22445ec8545feef801d1a74c221"
dependencies = [ dependencies = [
"autocfg", "autocfg",
"crossbeam-deque", "crossbeam-deque",
@ -848,14 +836,13 @@ dependencies = [
[[package]] [[package]]
name = "rayon-core" name = "rayon-core"
version = "1.9.1" version = "1.9.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d78120e2c850279833f1dd3582f730c4ab53ed95aeaaaa862a2a5c71b1656d8e" checksum = "9f51245e1e62e1f1629cbfec37b5793bbabcaeb90f30e94d2ba03564687353e4"
dependencies = [ dependencies = [
"crossbeam-channel", "crossbeam-channel",
"crossbeam-deque", "crossbeam-deque",
"crossbeam-utils", "crossbeam-utils",
"lazy_static",
"num_cpus", "num_cpus",
] ]
@ -932,21 +919,9 @@ checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
[[package]] [[package]]
name = "semver" name = "semver"
version = "0.11.0" version = "1.0.7"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f301af10236f6df4160f7c3f04eec6dbc70ace82d23326abad5edee88801c6b6" checksum = "d65bd28f48be7196d222d95b9243287f48d27aca604e08497513019ff0502cc4"
dependencies = [
"semver-parser",
]
[[package]]
name = "semver-parser"
version = "0.10.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "00b0bef5b7f9e0df16536d3961cfb6e84331c065b4066afb39768d0e319411f7"
dependencies = [
"pest",
]
[[package]] [[package]]
name = "serde" name = "serde"
@ -1121,12 +1096,6 @@ dependencies = [
"crunchy", "crunchy",
] ]
[[package]]
name = "ucd-trie"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "56dee185309b50d1f11bfedef0fe6d036842e3fb77413abef29f8f8d1c5d4c1c"
[[package]] [[package]]
name = "unic-char-property" name = "unic-char-property"
version = "0.9.0" version = "0.9.0"

View File

@ -1,5 +1,10 @@
# NAC3 <div align="center">
![icon](https://git.m-labs.hk/M-Labs/nac3/raw/branch/master/nac3.svg)
</div>
# NAC3
NAC3 is a major, backward-incompatible rewrite of the compiler for the [ARTIQ](https://m-labs.hk/artiq) physics experiment control and data acquisition system. It features greatly improved compilation speeds, a much better type system, and more predictable and transparent operation. NAC3 is a major, backward-incompatible rewrite of the compiler for the [ARTIQ](https://m-labs.hk/artiq) physics experiment control and data acquisition system. It features greatly improved compilation speeds, a much better type system, and more predictable and transparent operation.
NAC3 has a modular design and its applicability reaches beyond ARTIQ. The ``nac3core`` module does not contain anything specific to ARTIQ, and can be used in any project that requires compiling Python to machine code. NAC3 has a modular design and its applicability reaches beyond ARTIQ. The ``nac3core`` module does not contain anything specific to ARTIQ, and can be used in any project that requires compiling Python to machine code.

View File

@ -2,11 +2,11 @@
"nodes": { "nodes": {
"nixpkgs": { "nixpkgs": {
"locked": { "locked": {
"lastModified": 1649619156, "lastModified": 1650244918,
"narHash": "sha256-p0q4zpuKMwrzGF+5ZU7Thnpac5TinhDI9jr2mBxhV4w=", "narHash": "sha256-DsS5nxjTpnoUC4pNXJI1rit7TnDTij8vQDa5PtcDCD0=",
"owner": "NixOS", "owner": "NixOS",
"repo": "nixpkgs", "repo": "nixpkgs",
"rev": "e7d63bd0d50df412f5a1d8acfa3caae75522e347", "rev": "7b38b03d76ab71bdc8dc325e3f6338d984cc35ca",
"type": "github" "type": "github"
}, },
"original": { "original": {

View File

@ -5,17 +5,23 @@
outputs = { self, nixpkgs }: outputs = { self, nixpkgs }:
let let
pkgs = import nixpkgs { system = "x86_64-linux"; }; pkgs = import nixpkgs { system = "aarch64-darwin"; };
in rec { in rec {
packages.x86_64-linux = rec { packages.aarch64-darwin = rec {
llvm-nac3 = pkgs.callPackage ./nix/llvm {}; llvm-nac3 = pkgs.callPackage ./nix/llvm {};
nac3artiq = pkgs.python3Packages.toPythonModule ( nac3artiq = pkgs.python3Packages.toPythonModule (
pkgs.rustPlatform.buildRustPackage { pkgs.rustPlatform.buildRustPackage rec {
name = "nac3artiq"; name = "nac3artiq";
outputs = [ "out" "runkernel" "standalone" ]; outputs = [ "out" "runkernel" "standalone" ];
src = self; src = self;
cargoLock = { lockFile = ./Cargo.lock; }; cargoLock = {
nativeBuildInputs = [ pkgs.python3 pkgs.llvmPackages_13.clang-unwrapped llvm-nac3 ]; lockFile = ./Cargo.lock;
outputHashes = {
"inkwell-0.1.0" = "sha256-THGKoTqQCSusxMukOiksQ9pCnxdIBUO6MH3fiwQjYVA=";
};
};
passthru.cargoLock = cargoLock;
nativeBuildInputs = [ pkgs.python3 pkgs.llvmPackages_13.clang-unwrapped pkgs.llvmPackages_13.llvm.out llvm-nac3 ];
buildInputs = [ pkgs.python3 llvm-nac3 ]; buildInputs = [ pkgs.python3 llvm-nac3 ];
checkInputs = [ (pkgs.python3.withPackages(ps: [ ps.numpy ])) ]; checkInputs = [ (pkgs.python3.withPackages(ps: [ ps.numpy ])) ];
checkPhase = checkPhase =
@ -56,8 +62,8 @@
pkgs.rustPlatform.buildRustPackage { pkgs.rustPlatform.buildRustPackage {
name = "nac3artiq-instrumented"; name = "nac3artiq-instrumented";
src = self; src = self;
cargoLock = { lockFile = ./Cargo.lock; }; inherit (nac3artiq) cargoLock;
nativeBuildInputs = [ pkgs.python3 pkgs.llvmPackages_13.clang-unwrapped llvm-nac3-instrumented ]; nativeBuildInputs = [ pkgs.python3 pkgs.llvmPackages_13.clang-unwrapped pkgs.llvmPackages_13.llvm.out llvm-nac3-instrumented ];
buildInputs = [ pkgs.python3 llvm-nac3-instrumented ]; buildInputs = [ pkgs.python3 llvm-nac3-instrumented ];
cargoBuildFlags = [ "--package" "nac3artiq" "--features" "init-llvm-profile" ]; cargoBuildFlags = [ "--package" "nac3artiq" "--features" "init-llvm-profile" ];
doCheck = false; doCheck = false;
@ -75,11 +81,36 @@
); );
nac3artiq-profile = pkgs.stdenvNoCC.mkDerivation { nac3artiq-profile = pkgs.stdenvNoCC.mkDerivation {
name = "nac3artiq-profile"; name = "nac3artiq-profile";
src = self; srcs = [
buildInputs = [ (python3-mimalloc.withPackages(ps: [ ps.numpy nac3artiq-instrumented ])) pkgs.lld_13 pkgs.llvmPackages_13.libllvm ]; (pkgs.fetchFromGitHub {
owner = "m-labs";
repo = "sipyco";
rev = "939f84f9b5eef7efbf7423c735d1834783b6140e";
sha256 = "sha256-15Nun4EY35j+6SPZkjzZtyH/ncxLS60KuGJjFh5kSTc=";
})
(pkgs.fetchFromGitHub {
owner = "m-labs";
repo = "artiq";
rev = "dd57fdc530baf926a5f354dc1c2bd90564affd96";
sha256 = "sha256-hcqVcToYWkc3oDFkKr9wZUF65ydiSYVHdmiGiu2Mc1c=";
})
];
buildInputs = [
(python3-mimalloc.withPackages(ps: [ ps.numpy ps.jsonschema nac3artiq-instrumented ]))
pkgs.lld_13
pkgs.llvmPackages_13.llvm.out
];
phases = [ "buildPhase" "installPhase" ]; phases = [ "buildPhase" "installPhase" ];
# TODO: get more representative code. buildPhase =
buildPhase = "python $src/nac3artiq/demo/demo.py"; ''
srcs=($srcs)
sipyco=''${srcs[0]}
artiq=''${srcs[1]}
export PYTHONPATH=$sipyco:$artiq
python -m artiq.frontend.artiq_ddb_template $artiq/artiq/examples/nac3devices/nac3devices.json > device_db.py
cp $artiq/artiq/examples/nac3devices/nac3devices.py .
python -m artiq.frontend.artiq_compile nac3devices.py
'';
installPhase = installPhase =
'' ''
mkdir $out mkdir $out
@ -94,8 +125,8 @@
pkgs.rustPlatform.buildRustPackage { pkgs.rustPlatform.buildRustPackage {
name = "nac3artiq-pgo"; name = "nac3artiq-pgo";
src = self; src = self;
cargoLock = { lockFile = ./Cargo.lock; }; inherit (nac3artiq) cargoLock;
nativeBuildInputs = [ pkgs.python3 pkgs.llvmPackages_13.clang-unwrapped llvm-nac3-pgo ]; nativeBuildInputs = [ pkgs.python3 pkgs.llvmPackages_13.clang-unwrapped pkgs.llvmPackages_13.llvm.out llvm-nac3-pgo ];
buildInputs = [ pkgs.python3 llvm-nac3-pgo ]; buildInputs = [ pkgs.python3 llvm-nac3-pgo ];
cargoBuildFlags = [ "--package" "nac3artiq" ]; cargoBuildFlags = [ "--package" "nac3artiq" ];
cargoTestFlags = [ "--package" "nac3ast" "--package" "nac3parser" "--package" "nac3core" "--package" "nac3artiq" ]; cargoTestFlags = [ "--package" "nac3ast" "--package" "nac3parser" "--package" "nac3core" "--package" "nac3artiq" ];
@ -111,24 +142,26 @@
packages.x86_64-w64-mingw32 = import ./nix/windows { inherit pkgs; }; packages.x86_64-w64-mingw32 = import ./nix/windows { inherit pkgs; };
devShell.x86_64-linux = pkgs.mkShell { devShell.aarch64-darwin = pkgs.mkShell {
name = "nac3-dev-shell"; name = "nac3-dev-shell";
buildInputs = with pkgs; [ buildInputs = with pkgs; [
# build dependencies # build dependencies
packages.x86_64-linux.llvm-nac3 packages.aarch64-darwin.llvm-nac3
llvmPackages_13.clang-unwrapped # IRRT llvmPackages_13.clang-unwrapped # IRRT
pkgs.llvmPackages_13.llvm.out # IRRT
libiconv
cargo cargo
rustc rustc
# runtime dependencies # runtime dependencies
lld_13 lld_13
(packages.x86_64-linux.python3-mimalloc.withPackages(ps: [ ps.numpy ])) (packages.aarch64-darwin.python3-mimalloc.withPackages(ps: [ ps.numpy ]))
# development tools # development tools
cargo-insta cargo-insta
clippy clippy
rustfmt rustfmt
]; ];
}; };
devShells.x86_64-linux.msys2 = pkgs.mkShell { devShells.aarch64-darwin.msys2 = pkgs.mkShell {
name = "nac3-dev-shell-msys2"; name = "nac3-dev-shell-msys2";
buildInputs = with pkgs; [ buildInputs = with pkgs; [
curl curl
@ -139,7 +172,7 @@
}; };
hydraJobs = { hydraJobs = {
inherit (packages.x86_64-linux) llvm-nac3 nac3artiq; inherit (packages.aarch64-darwin) llvm-nac3 nac3artiq nac3artiq-pgo;
llvm-nac3-msys2 = packages.x86_64-w64-mingw32.llvm-nac3; llvm-nac3-msys2 = packages.x86_64-w64-mingw32.llvm-nac3;
nac3artiq-msys2 = packages.x86_64-w64-mingw32.nac3artiq; nac3artiq-msys2 = packages.x86_64-w64-mingw32.nac3artiq;
nac3artiq-msys2-pkg = packages.x86_64-w64-mingw32.nac3artiq-pkg; nac3artiq-msys2-pkg = packages.x86_64-w64-mingw32.nac3artiq-pkg;
@ -148,7 +181,7 @@
}; };
nixConfig = { nixConfig = {
binaryCachePublicKeys = ["nixbld.m-labs.hk-1:5aSRVA5b320xbNvu30tqxVPXpld73bhtOeH6uAjRyHc="]; extra-trusted-public-keys = "nixbld.m-labs.hk-1:5aSRVA5b320xbNvu30tqxVPXpld73bhtOeH6uAjRyHc=";
binaryCaches = ["https://nixbld.m-labs.hk" "https://cache.nixos.org"]; extra-substituters = "https://nixbld.m-labs.hk";
}; };
} }

56
nac3.svg Normal file
View File

@ -0,0 +1,56 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<svg
id="a"
width="128"
height="128"
viewBox="0 0 95.99999 95.99999"
version="1.1"
sodipodi:docname="nac3.svg"
inkscape:version="1.1.1 (3bf5ae0d25, 2021-09-20)"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns="http://www.w3.org/2000/svg"
xmlns:svg="http://www.w3.org/2000/svg">
<defs
id="defs11" />
<sodipodi:namedview
id="namedview9"
pagecolor="#ffffff"
bordercolor="#666666"
borderopacity="1.0"
inkscape:pageshadow="2"
inkscape:pageopacity="0.0"
inkscape:pagecheckerboard="0"
inkscape:document-units="mm"
showgrid="false"
units="px"
width="128px"
inkscape:zoom="5.9448568"
inkscape:cx="60.472441"
inkscape:cy="60.556547"
inkscape:window-width="2560"
inkscape:window-height="1371"
inkscape:window-x="0"
inkscape:window-y="32"
inkscape:window-maximized="1"
inkscape:current-layer="a" />
<rect
x="40.072601"
y="-26.776209"
width="55.668747"
height="55.668747"
transform="matrix(0.71803815,0.69600374,-0.71803815,0.69600374,0,0)"
style="fill:#be211e;stroke:#000000;stroke-width:4.37375px;stroke-linecap:round;stroke-linejoin:round"
id="rect2" />
<line
x1="38.00692"
y1="63.457153"
x2="57.993061"
y2="63.457153"
style="fill:none;stroke:#000000;stroke-width:4.37269px;stroke-linecap:round;stroke-linejoin:round"
id="line4" />
<path
d="m 48.007301,57.843329 c -1.943097,0 -3.877522,-0.41727 -5.686157,-1.246007 -3.218257,-1.474616 -5.650382,-4.075418 -6.849639,-7.323671 -2.065624,-5.588921 -1.192751,-10.226647 2.575258,-13.827 0.611554,-0.584909 1.518048,-0.773041 2.323689,-0.488206 0.80673,0.286405 1.369495,0.998486 1.447563,1.827234 0.237469,2.549302 2.439719,5.917376 4.28414,6.55273 0.396859,0.13506 0.820953,-0.05859 1.097084,-0.35222 0.339254,-0.360754 0.451065,-0.961893 -1.013597,-3.191372 -2.089851,-3.181137 -4.638728,-8.754903 -0.262407,-15.069853 0.494457,-0.713491 1.384673,-1.068907 2.256469,-0.909156 0.871795,0.161332 1.583757,0.806404 1.752251,1.651189 0.716448,3.591862 2.962357,6.151755 5.199306,8.023138 1.935503,1.61861 4.344688,3.867387 5.435687,7.096643 2.283183,6.758017 -1.202511,14.114988 -8.060822,16.494025 -1.467083,0.509226 -2.98513,0.762536 -4.498836,0.762536 z M 39.358865,40.002192 c -0.304711,0.696206 -0.541636,2.080524 -0.56865,2.237454 -0.330316,1.918771 0.168305,3.803963 0.846157,5.539951 0.856828,2.19436 2.437543,3.942467 4.583411,4.925713 2.143691,0.981675 4.554131,1.097816 6.789992,0.322666 4.571485,-1.586549 6.977584,-6.532238 5.363036,-11.02597 v -5.27e-4 C 55.455481,39.447968 54.023463,38.162043 52.221335,36.65432 50.876945,35.529534 49.409662,33.987726 48.417983,32.135555 48.01343,31.37996 47.79547,30.34303 47.76669,29.413263 c -0.187481,0.669514 -0.212441,2.325923 -0.150396,2.93691 0.179209,1.764456 1.333476,3.644546 2.340611,5.171243 1.311568,1.988179 2.72058,6.037272 0.459681,8.367985 -1.54192,1.58953 -4.038511,2.052034 -5.839973,1.38492 -2.398314,-0.888147 -3.942744,-2.690627 -4.941118,-4.768029 -0.121194,-0.25217 -0.532464,-1.174187 -0.276619,-2.5041 z"
id="path6"
style="stroke-width:1.09317" />
</svg>

After

Width:  |  Height:  |  Size: 3.3 KiB

View File

@ -16,9 +16,10 @@ nac3parser = { path = "../nac3parser" }
nac3core = { path = "../nac3core" } nac3core = { path = "../nac3core" }
[dependencies.inkwell] [dependencies.inkwell]
version = "0.1.0-beta.4" git = "https://github.com/nbaksalyar/inkwell.git"
branch = "llvm14"
default-features = false default-features = false
features = ["llvm13-0", "target-x86", "target-arm", "target-riscv", "no-libffi-linking"] features = ["llvm14-0", "target-aarch64", "target-arm", "target-riscv", "no-libffi-linking"]
[features] [features]
init-llvm-profile = [] init-llvm-profile = []

View File

@ -6,7 +6,7 @@ use nac3core::{
}, },
symbol_resolver::ValueEnum, symbol_resolver::ValueEnum,
toplevel::{DefinitionId, GenCall}, toplevel::{DefinitionId, GenCall},
typecheck::typedef::{FunSignature, FuncArg, Type, TypeEnum} typecheck::typedef::{FunSignature, FuncArg, Type, TypeEnum},
}; };
use nac3parser::ast::{Expr, ExprKind, Located, Stmt, StmtKind, StrRef}; use nac3parser::ast::{Expr, ExprKind, Located, Stmt, StmtKind, StrRef};
@ -15,7 +15,10 @@ use inkwell::{
context::Context, module::Linkage, types::IntType, values::BasicValueEnum, AddressSpace, context::Context, module::Linkage, types::IntType, values::BasicValueEnum, AddressSpace,
}; };
use pyo3::{PyObject, PyResult, Python, types::{PyDict, PyList}}; use pyo3::{
types::{PyDict, PyList},
PyObject, PyResult, Python,
};
use crate::{symbol_resolver::InnerResolver, timeline::TimeFns}; use crate::{symbol_resolver::InnerResolver, timeline::TimeFns};
@ -68,7 +71,11 @@ impl<'b> CodeGenerator for ArtiqCodeGenerator<'b> {
) -> Result<Option<BasicValueEnum<'ctx>>, String> { ) -> Result<Option<BasicValueEnum<'ctx>>, String> {
let result = gen_call(self, ctx, obj, fun, params)?; let result = gen_call(self, ctx, obj, fun, params)?;
if let Some(end) = self.end.clone() { if let Some(end) = self.end.clone() {
let old_end = self.gen_expr(ctx, &end)?.unwrap().to_basic_value_enum(ctx, self, end.custom.unwrap())?; let old_end = self.gen_expr(ctx, &end)?.unwrap().to_basic_value_enum(
ctx,
self,
end.custom.unwrap(),
)?;
let now = self.timeline.emit_now_mu(ctx); let now = self.timeline.emit_now_mu(ctx);
let smax = ctx.module.get_function("llvm.smax.i64").unwrap_or_else(|| { let smax = ctx.module.get_function("llvm.smax.i64").unwrap_or_else(|| {
let i64 = ctx.ctx.i64_type(); let i64 = ctx.ctx.i64_type();
@ -88,7 +95,11 @@ impl<'b> CodeGenerator for ArtiqCodeGenerator<'b> {
ctx.builder.build_store(end_store, max); ctx.builder.build_store(end_store, max);
} }
if let Some(start) = self.start.clone() { if let Some(start) = self.start.clone() {
let start_val = self.gen_expr(ctx, &start)?.unwrap().to_basic_value_enum(ctx, self, start.custom.unwrap())?; let start_val = self.gen_expr(ctx, &start)?.unwrap().to_basic_value_enum(
ctx,
self,
start.custom.unwrap(),
)?;
self.timeline.emit_at_mu(ctx, start_val); self.timeline.emit_at_mu(ctx, start_val);
} }
Ok(result) Ok(result)
@ -120,7 +131,11 @@ impl<'b> CodeGenerator for ArtiqCodeGenerator<'b> {
let old_start = self.start.take(); let old_start = self.start.take();
let old_end = self.end.take(); let old_end = self.end.take();
let now = if let Some(old_start) = &old_start { let now = if let Some(old_start) = &old_start {
self.gen_expr(ctx, old_start)?.unwrap().to_basic_value_enum(ctx, self, old_start.custom.unwrap())? self.gen_expr(ctx, old_start)?.unwrap().to_basic_value_enum(
ctx,
self,
old_start.custom.unwrap(),
)?
} else { } else {
self.timeline.emit_now_mu(ctx) self.timeline.emit_now_mu(ctx)
}; };
@ -174,10 +189,11 @@ impl<'b> CodeGenerator for ArtiqCodeGenerator<'b> {
}; };
// set duration // set duration
let end_expr = self.end.take().unwrap(); let end_expr = self.end.take().unwrap();
let end_val = self let end_val = self.gen_expr(ctx, &end_expr)?.unwrap().to_basic_value_enum(
.gen_expr(ctx, &end_expr)? ctx,
.unwrap() self,
.to_basic_value_enum(ctx, self, end_expr.custom.unwrap())?; end_expr.custom.unwrap(),
)?;
// inside a sequential block // inside a sequential block
if old_start.is_none() { if old_start.is_none() {
@ -293,7 +309,7 @@ fn rpc_codegen_callback_fn<'ctx, 'a>(
let int32 = ctx.ctx.i32_type(); let int32 = ctx.ctx.i32_type();
let tag_ptr_type = ctx.ctx.struct_type(&[ptr_type.into(), size_type.into()], false); let tag_ptr_type = ctx.ctx.struct_type(&[ptr_type.into(), size_type.into()], false);
let service_id = int32.const_int(fun.1.0 as u64, false); let service_id = int32.const_int(fun.1 .0 as u64, false);
// -- setup rpc tags // -- setup rpc tags
let mut tag = Vec::new(); let mut tag = Vec::new();
if obj.is_some() { if obj.is_some() {
@ -363,10 +379,8 @@ fn rpc_codegen_callback_fn<'ctx, 'a>(
} }
// default value handling // default value handling
for k in keys.into_iter() { for k in keys.into_iter() {
mapping.insert( mapping
k.name, .insert(k.name, ctx.gen_symbol_val(generator, &k.default_value.unwrap(), k.ty).into());
ctx.gen_symbol_val(generator, &k.default_value.unwrap(), k.ty).into()
);
} }
// reorder the parameters // reorder the parameters
let mut real_params = fun let mut real_params = fun
@ -507,9 +521,15 @@ pub fn attributes_writeback<'ctx, 'a>(
let mut scratch_buffer = Vec::new(); let mut scratch_buffer = Vec::new();
for (_, val) in globals.iter() { for (_, val) in globals.iter() {
let val = val.as_ref(py); let val = val.as_ref(py);
let ty = inner_resolver.get_obj_type(py, val, &mut ctx.unifier, &top_levels, &ctx.primitives)?; let ty = inner_resolver.get_obj_type(
py,
val,
&mut ctx.unifier,
&top_levels,
&ctx.primitives,
)?;
if let Err(ty) = ty { if let Err(ty) = ty {
return Ok(Err(ty)) return Ok(Err(ty));
} }
let ty = ty.unwrap(); let ty = ty.unwrap();
match &*ctx.unifier.get_ty(ty) { match &*ctx.unifier.get_ty(ty) {
@ -522,14 +542,18 @@ pub fn attributes_writeback<'ctx, 'a>(
let obj = inner_resolver.get_obj_value(py, val, ctx, generator, ty)?.unwrap(); let obj = inner_resolver.get_obj_value(py, val, ctx, generator, ty)?.unwrap();
for (name, (field_ty, is_mutable)) in fields.iter() { for (name, (field_ty, is_mutable)) in fields.iter() {
if !is_mutable { if !is_mutable {
continue continue;
} }
if gen_rpc_tag(ctx, *field_ty, &mut scratch_buffer).is_ok() { if gen_rpc_tag(ctx, *field_ty, &mut scratch_buffer).is_ok() {
attributes.push(name.to_string()); attributes.push(name.to_string());
let index = ctx.get_attr_index(ty, *name); let index = ctx.get_attr_index(ty, *name);
values.push((*field_ty, ctx.build_gep_and_load( values.push((
*field_ty,
ctx.build_gep_and_load(
obj.into_pointer_value(), obj.into_pointer_value(),
&[zero, int32.const_int(index as u64, false)]))); &[zero, int32.const_int(index as u64, false)],
),
));
} }
} }
if !attributes.is_empty() { if !attributes.is_empty() {
@ -538,33 +562,43 @@ pub fn attributes_writeback<'ctx, 'a>(
pydict.set_item("fields", attributes)?; pydict.set_item("fields", attributes)?;
host_attributes.append(pydict)?; host_attributes.append(pydict)?;
} }
}, }
TypeEnum::TList { ty: elem_ty } => { TypeEnum::TList { ty: elem_ty } => {
if gen_rpc_tag(ctx, *elem_ty, &mut scratch_buffer).is_ok() { if gen_rpc_tag(ctx, *elem_ty, &mut scratch_buffer).is_ok() {
let pydict = PyDict::new(py); let pydict = PyDict::new(py);
pydict.set_item("obj", val)?; pydict.set_item("obj", val)?;
host_attributes.append(pydict)?; host_attributes.append(pydict)?;
values.push((ty, inner_resolver.get_obj_value(py, val, ctx, generator, ty)?.unwrap())); values.push((
ty,
inner_resolver.get_obj_value(py, val, ctx, generator, ty)?.unwrap(),
));
}
} }
},
_ => {} _ => {}
} }
} }
let fun = FunSignature { let fun = FunSignature {
args: values.iter().enumerate().map(|(i, (ty, _))| FuncArg { args: values
.iter()
.enumerate()
.map(|(i, (ty, _))| FuncArg {
name: i.to_string().into(), name: i.to_string().into(),
ty: *ty, ty: *ty,
default_value: None default_value: None,
}).collect(), })
.collect(),
ret: ctx.primitives.none, ret: ctx.primitives.none,
vars: Default::default() vars: Default::default(),
}; };
let args: Vec<_> = values.into_iter().map(|(_, val)| (None, ValueEnum::Dynamic(val))).collect(); let args: Vec<_> =
if let Err(e) = rpc_codegen_callback_fn(ctx, None, (&fun, DefinitionId(0)), args, generator) { values.into_iter().map(|(_, val)| (None, ValueEnum::Dynamic(val))).collect();
if let Err(e) = rpc_codegen_callback_fn(ctx, None, (&fun, DefinitionId(0)), args, generator)
{
return Ok(Err(e)); return Ok(Err(e));
} }
Ok(Ok(())) Ok(Ok(()))
}).unwrap()?; })
.unwrap()?;
Ok(()) Ok(())
} }

View File

@ -17,9 +17,9 @@ use nac3parser::{
ast::{self, ExprKind, Stmt, StmtKind, StrRef}, ast::{self, ExprKind, Stmt, StmtKind, StrRef},
parser::{self, parse_program}, parser::{self, parse_program},
}; };
use pyo3::create_exception;
use pyo3::prelude::*; use pyo3::prelude::*;
use pyo3::{exceptions, types::PyBytes, types::PyDict, types::PySet}; use pyo3::{exceptions, types::PyBytes, types::PyDict, types::PySet};
use pyo3::create_exception;
use parking_lot::{Mutex, RwLock}; use parking_lot::{Mutex, RwLock};
@ -40,7 +40,7 @@ use tempfile::{self, TempDir};
use crate::codegen::attributes_writeback; use crate::codegen::attributes_writeback;
use crate::{ use crate::{
codegen::{rpc_codegen_callback, ArtiqCodeGenerator}, codegen::{rpc_codegen_callback, ArtiqCodeGenerator},
symbol_resolver::{InnerResolver, PythonHelper, Resolver, DeferredEvaluationStore}, symbol_resolver::{DeferredEvaluationStore, InnerResolver, PythonHelper, Resolver},
}; };
mod codegen; mod codegen;
@ -93,7 +93,7 @@ struct Nac3 {
top_levels: Vec<TopLevelComponent>, top_levels: Vec<TopLevelComponent>,
string_store: Arc<RwLock<HashMap<String, i32>>>, string_store: Arc<RwLock<HashMap<String, i32>>>,
exception_ids: Arc<RwLock<HashMap<usize, usize>>>, exception_ids: Arc<RwLock<HashMap<usize, usize>>>,
deferred_eval_store: DeferredEvaluationStore deferred_eval_store: DeferredEvaluationStore,
} }
create_exception!(nac3artiq, CompileError, exceptions::PyException); create_exception!(nac3artiq, CompileError, exceptions::PyException);
@ -268,7 +268,7 @@ fn add_exceptions(
composer: &mut TopLevelComposer, composer: &mut TopLevelComposer,
builtin_def: &mut HashMap<StrRef, DefinitionId>, builtin_def: &mut HashMap<StrRef, DefinitionId>,
builtin_ty: &mut HashMap<StrRef, Type>, builtin_ty: &mut HashMap<StrRef, Type>,
error_names: &[&str] error_names: &[&str],
) -> Vec<Type> { ) -> Vec<Type> {
let mut types = Vec::new(); let mut types = Vec::new();
// note: this is only for builtin exceptions, i.e. the exception name is "0:{exn}" // note: this is only for builtin exceptions, i.e. the exception name is "0:{exn}"
@ -281,7 +281,7 @@ fn add_exceptions(
// constructor id // constructor id
def_id + 1, def_id + 1,
&mut composer.unifier, &mut composer.unifier,
&composer.primitives_ty &composer.primitives_ty,
); );
composer.definition_ast_list.push((Arc::new(RwLock::new(exception_class)), None)); composer.definition_ast_list.push((Arc::new(RwLock::new(exception_class)), None));
composer.definition_ast_list.push((Arc::new(RwLock::new(exception_fn)), None)); composer.definition_ast_list.push((Arc::new(RwLock::new(exception_fn)), None));
@ -331,7 +331,8 @@ impl Nac3 {
}, },
Arc::new(GenCall::new(Box::new(move |ctx, _, fun, args, generator| { Arc::new(GenCall::new(Box::new(move |ctx, _, fun, args, generator| {
let arg_ty = fun.0.args[0].ty; let arg_ty = fun.0.args[0].ty;
let arg = args[0].1.clone().to_basic_value_enum(ctx, generator, arg_ty).unwrap(); let arg =
args[0].1.clone().to_basic_value_enum(ctx, generator, arg_ty).unwrap();
time_fns.emit_at_mu(ctx, arg); time_fns.emit_at_mu(ctx, arg);
Ok(None) Ok(None)
}))), }))),
@ -349,7 +350,8 @@ impl Nac3 {
}, },
Arc::new(GenCall::new(Box::new(move |ctx, _, fun, args, generator| { Arc::new(GenCall::new(Box::new(move |ctx, _, fun, args, generator| {
let arg_ty = fun.0.args[0].ty; let arg_ty = fun.0.args[0].ty;
let arg = args[0].1.clone().to_basic_value_enum(ctx, generator, arg_ty).unwrap(); let arg =
args[0].1.clone().to_basic_value_enum(ctx, generator, arg_ty).unwrap();
time_fns.emit_delay_mu(ctx, arg); time_fns.emit_delay_mu(ctx, arg);
Ok(None) Ok(None)
}))), }))),
@ -363,8 +365,9 @@ impl Nac3 {
let types_mod = PyModule::import(py, "types").unwrap(); let types_mod = PyModule::import(py, "types").unwrap();
let get_id = |x| id_fn.call1((x,)).unwrap().extract().unwrap(); let get_id = |x| id_fn.call1((x,)).unwrap().extract().unwrap();
let get_attr_id = |obj: &PyModule, attr| id_fn.call1((obj.getattr(attr).unwrap(),)) let get_attr_id = |obj: &PyModule, attr| {
.unwrap().extract().unwrap(); id_fn.call1((obj.getattr(attr).unwrap(),)).unwrap().extract().unwrap()
};
let primitive_ids = PrimitivePythonId { let primitive_ids = PrimitivePythonId {
virtual_id: get_id( virtual_id: get_id(
builtins_mod builtins_mod
@ -373,8 +376,8 @@ impl Nac3 {
.call0() .call0()
.unwrap() .unwrap()
.get_item("virtual") .get_item("virtual")
.unwrap( .unwrap(),
)), ),
generic_alias: ( generic_alias: (
get_attr_id(typing_mod, "_GenericAlias"), get_attr_id(typing_mod, "_GenericAlias"),
get_attr_id(types_mod, "GenericAlias"), get_attr_id(types_mod, "GenericAlias"),
@ -519,8 +522,9 @@ impl Nac3 {
let class_obj; let class_obj;
if let StmtKind::ClassDef { name, .. } = &stmt.node { if let StmtKind::ClassDef { name, .. } = &stmt.node {
let class = py_module.getattr(name.to_string()).unwrap(); let class = py_module.getattr(name.to_string()).unwrap();
if issubclass.call1((class, exn_class)).unwrap().extract().unwrap() && if issubclass.call1((class, exn_class)).unwrap().extract().unwrap()
class.getattr("artiq_builtin").is_err() { && class.getattr("artiq_builtin").is_err()
{
class_obj = Some(class); class_obj = Some(class);
} else { } else {
class_obj = None; class_obj = None;
@ -566,13 +570,12 @@ impl Nac3 {
let (name, def_id, ty) = composer let (name, def_id, ty) = composer
.register_top_level(stmt.clone(), Some(resolver.clone()), path.clone()) .register_top_level(stmt.clone(), Some(resolver.clone()), path.clone())
.map_err(|e| { .map_err(|e| {
CompileError::new_err(format!( CompileError::new_err(format!("compilation failed\n----------\n{}", e))
"compilation failed\n----------\n{}",
e
))
})?; })?;
if let Some(class_obj) = class_obj { if let Some(class_obj) = class_obj {
self.exception_ids.write().insert(def_id.0, store_obj.call1(py, (class_obj, ))?.extract(py)?); self.exception_ids
.write()
.insert(def_id.0, store_obj.call1(py, (class_obj,))?.extract(py)?);
} }
match &stmt.node { match &stmt.node {
@ -642,7 +645,8 @@ impl Nac3 {
exception_ids: self.exception_ids.clone(), exception_ids: self.exception_ids.clone(),
deferred_eval_store: self.deferred_eval_store.clone(), deferred_eval_store: self.deferred_eval_store.clone(),
}); });
let resolver = Arc::new(Resolver(inner_resolver.clone())) as Arc<dyn SymbolResolver + Send + Sync>; let resolver =
Arc::new(Resolver(inner_resolver.clone())) as Arc<dyn SymbolResolver + Send + Sync>;
let (_, def_id, _) = composer let (_, def_id, _) = composer
.register_top_level(synthesized.pop().unwrap(), Some(resolver.clone()), "".into()) .register_top_level(synthesized.pop().unwrap(), Some(resolver.clone()), "".into())
.unwrap(); .unwrap();
@ -651,8 +655,12 @@ impl Nac3 {
FunSignature { args: vec![], ret: self.primitive.none, vars: HashMap::new() }; FunSignature { args: vec![], ret: self.primitive.none, vars: HashMap::new() };
let mut store = ConcreteTypeStore::new(); let mut store = ConcreteTypeStore::new();
let mut cache = HashMap::new(); let mut cache = HashMap::new();
let signature = let signature = store.from_signature(
store.from_signature(&mut composer.unifier, &self.primitive, &fun_signature, &mut cache); &mut composer.unifier,
&self.primitive,
&fun_signature,
&mut cache,
);
let signature = store.add_cty(signature); let signature = store.add_cty(signature);
if let Err(e) = composer.start_analysis(true) { if let Err(e) = composer.start_analysis(true) {
@ -738,8 +746,12 @@ impl Nac3 {
let mut store = ConcreteTypeStore::new(); let mut store = ConcreteTypeStore::new();
let mut cache = HashMap::new(); let mut cache = HashMap::new();
let signature = let signature = store.from_signature(
store.from_signature(&mut composer.unifier, &self.primitive, &fun_signature, &mut cache); &mut composer.unifier,
&self.primitive,
&fun_signature,
&mut cache,
);
let signature = store.add_cty(signature); let signature = store.add_cty(signature);
let attributes_writeback_task = CodeGenTask { let attributes_writeback_task = CodeGenTask {
subst: Default::default(), subst: Default::default(),
@ -777,14 +789,23 @@ impl Nac3 {
registry.add_task(task); registry.add_task(task);
registry.wait_tasks_complete(handles); registry.wait_tasks_complete(handles);
let mut generator = ArtiqCodeGenerator::new("attributes_writeback".to_string(), size_t, self.time_fns); let mut generator =
ArtiqCodeGenerator::new("attributes_writeback".to_string(), size_t, self.time_fns);
let context = inkwell::context::Context::create(); let context = inkwell::context::Context::create();
let module = context.create_module("attributes_writeback"); let module = context.create_module("attributes_writeback");
let builder = context.create_builder(); let builder = context.create_builder();
let (_, module, _) = gen_func_impl(&context, &mut generator, &registry, builder, module, let (_, module, _) = gen_func_impl(
attributes_writeback_task, |generator, ctx| { &context,
&mut generator,
&registry,
builder,
module,
attributes_writeback_task,
|generator, ctx| {
attributes_writeback(ctx, generator, inner_resolver.as_ref(), host_attributes) attributes_writeback(ctx, generator, inner_resolver.as_ref(), host_attributes)
}).unwrap(); },
)
.unwrap();
let buffer = module.write_bitcode_to_memory(); let buffer = module.write_bitcode_to_memory();
let buffer = buffer.as_slice().into(); let buffer = buffer.as_slice().into();
membuffer.lock().push(buffer); membuffer.lock().push(buffer);
@ -800,13 +821,22 @@ impl Nac3 {
.create_module_from_ir(MemoryBuffer::create_from_memory_range(buffer, "main")) .create_module_from_ir(MemoryBuffer::create_from_memory_range(buffer, "main"))
.unwrap(); .unwrap();
main.link_in_module(other) main.link_in_module(other).map_err(|err| CompileError::new_err(err.to_string()))?;
.map_err(|err| CompileError::new_err(err.to_string()))?;
} }
let builder = context.create_builder(); let builder = context.create_builder();
let modinit_return = main.get_function("__modinit__").unwrap().get_last_basic_block().unwrap().get_terminator().unwrap(); let modinit_return = main
.get_function("__modinit__")
.unwrap()
.get_last_basic_block()
.unwrap()
.get_terminator()
.unwrap();
builder.position_before(&modinit_return); builder.position_before(&modinit_return);
builder.build_call(main.get_function("attributes_writeback").unwrap(), &[], "attributes_writeback"); builder.build_call(
main.get_function("attributes_writeback").unwrap(),
&[],
"attributes_writeback",
);
main.link_in_module(load_irrt(&context)) main.link_in_module(load_irrt(&context))
.map_err(|err| CompileError::new_err(err.to_string()))?; .map_err(|err| CompileError::new_err(err.to_string()))?;
@ -880,9 +910,7 @@ impl Nac3 {
return Err(CompileError::new_err("failed to start linker")); return Err(CompileError::new_err("failed to start linker"));
} }
} else { } else {
return Err(CompileError::new_err( return Err(CompileError::new_err("linker returned non-zero status code"));
"linker returned non-zero status code",
));
} }
Ok(()) Ok(())

View File

@ -17,9 +17,9 @@ use pyo3::{
use std::{ use std::{
collections::HashMap, collections::HashMap,
sync::{ sync::{
atomic::{AtomicBool, Ordering::Relaxed},
Arc, Arc,
atomic::{AtomicBool, Ordering::Relaxed} },
}
}; };
use crate::PrimitivePythonId; use crate::PrimitivePythonId;
@ -153,7 +153,8 @@ impl StaticValue for PythonValue {
self.resolver self.resolver
.get_obj_value(py, self.value.as_ref(py), ctx, generator, expected_ty) .get_obj_value(py, self.value.as_ref(py), ctx, generator, expected_ty)
.map(Option::unwrap) .map(Option::unwrap)
}).map_err(|e| e.to_string()) })
.map_err(|e| e.to_string())
} }
fn get_field<'ctx, 'a>( fn get_field<'ctx, 'a>(
@ -175,9 +176,9 @@ impl StaticValue for PythonValue {
let obj = self.value.getattr(py, &name.to_string())?; let obj = self.value.getattr(py, &name.to_string())?;
let id = self.resolver.helper.id_fn.call1(py, (&obj,))?.extract(py)?; let id = self.resolver.helper.id_fn.call1(py, (&obj,))?.extract(py)?;
if self.id == self.resolver.primitive_ids.none { if self.id == self.resolver.primitive_ids.none {
return Ok(None) return Ok(None);
} else { } else {
return Ok(Some((id, obj))) return Ok(Some((id, obj)));
} }
} }
let def_id = { *self.resolver.pyid_to_def.read().get(&ty_id).unwrap() }; let def_id = { *self.resolver.pyid_to_def.read().get(&ty_id).unwrap() };
@ -384,10 +385,10 @@ impl InnerResolver {
} }
} }
if needs_defer { if needs_defer {
self.deferred_eval_store.store.write() self.deferred_eval_store.store.write().push((
.push((result.clone(), result.clone(),
constraints.extract()?, constraints.extract()?,
pyty.getattr("__name__")?.extract::<String>()? pyty.getattr("__name__")?.extract::<String>()?,
)) ))
} }
result result
@ -531,10 +532,7 @@ impl InnerResolver {
let str_fn = let str_fn =
pyo3::types::PyModule::import(py, "builtins").unwrap().getattr("repr").unwrap(); pyo3::types::PyModule::import(py, "builtins").unwrap().getattr("repr").unwrap();
let str_repr: String = str_fn.call1((pyty,)).unwrap().extract().unwrap(); let str_repr: String = str_fn.call1((pyty,)).unwrap().extract().unwrap();
Ok(Err(format!( Ok(Err(format!("{} is not registered with NAC3 (@nac3 decorator missing?)", str_repr)))
"{} is not registered with NAC3 (@nac3 decorator missing?)",
str_repr
)))
} }
} }
@ -549,7 +547,7 @@ impl InnerResolver {
let ty = self.helper.type_fn.call1(py, (obj,)).unwrap(); let ty = self.helper.type_fn.call1(py, (obj,)).unwrap();
let py_obj_id: u64 = self.helper.id_fn.call1(py, (obj,))?.extract(py)?; let py_obj_id: u64 = self.helper.id_fn.call1(py, (obj,))?.extract(py)?;
if let Some(ty) = self.pyid_to_type.read().get(&py_obj_id) { if let Some(ty) = self.pyid_to_type.read().get(&py_obj_id) {
return Ok(Ok(*ty)) return Ok(Ok(*ty));
} }
let (extracted_ty, inst_check) = match self.get_pyty_obj_type( let (extracted_ty, inst_check) = match self.get_pyty_obj_type(
py, py,
@ -616,7 +614,7 @@ impl InnerResolver {
let field_data = match obj.getattr("_nac3_option") { let field_data = match obj.getattr("_nac3_option") {
Ok(d) => d, Ok(d) => d,
// we use `none = Option(None)`, so the obj always have attr `_nac3_option` // we use `none = Option(None)`, so the obj always have attr `_nac3_option`
Err(_) => unreachable!("cannot be None") Err(_) => unreachable!("cannot be None"),
}; };
// if is `none` // if is `none`
let zelf_id: u64 = self.helper.id_fn.call1(py, (obj,))?.extract(py)?; let zelf_id: u64 = self.helper.id_fn.call1(py, (obj,))?.extract(py)?;
@ -627,7 +625,9 @@ impl InnerResolver {
let var_map = params let var_map = params
.iter() .iter()
.map(|(id_var, ty)| { .map(|(id_var, ty)| {
if let TypeEnum::TVar { id, range, name, loc, .. } = &*unifier.get_ty(*ty) { if let TypeEnum::TVar { id, range, name, loc, .. } =
&*unifier.get_ty(*ty)
{
assert_eq!(*id, *id_var); assert_eq!(*id, *id_var);
(*id, unifier.get_fresh_var_with_range(range, *name, *loc).0) (*id, unifier.get_fresh_var_with_range(range, *name, *loc).0)
} else { } else {
@ -635,7 +635,7 @@ impl InnerResolver {
} }
}) })
.collect::<HashMap<_, _>>(); .collect::<HashMap<_, _>>();
return Ok(Ok(unifier.subst(primitives.option, &var_map).unwrap())) return Ok(Ok(unifier.subst(primitives.option, &var_map).unwrap()));
} else { } else {
unreachable!("must be tobj") unreachable!("must be tobj")
} }
@ -659,9 +659,7 @@ impl InnerResolver {
let var_map = params let var_map = params
.iter() .iter()
.map(|(id_var, ty)| { .map(|(id_var, ty)| {
if let TypeEnum::TVar { id, range, name, loc, .. } = if let TypeEnum::TVar { id, range, name, loc, .. } = &*unifier.get_ty(*ty) {
&*unifier.get_ty(*ty)
{
assert_eq!(*id, *id_var); assert_eq!(*id, *id_var);
(*id, unifier.get_fresh_var_with_range(range, *name, *loc).0) (*id, unifier.get_fresh_var_with_range(range, *name, *loc).0)
} else { } else {
@ -673,7 +671,7 @@ impl InnerResolver {
// loop through non-function fields of the class to get the instantiated value // loop through non-function fields of the class to get the instantiated value
for field in fields.iter() { for field in fields.iter() {
let name: String = (*field.0).into(); let name: String = (*field.0).into();
if let TypeEnum::TFunc(..) = &*unifier.get_ty(field.1.0) { if let TypeEnum::TFunc(..) = &*unifier.get_ty(field.1 .0) {
continue; continue;
} else { } else {
let field_data = obj.getattr(&name)?; let field_data = obj.getattr(&name)?;
@ -689,7 +687,7 @@ impl InnerResolver {
} }
}; };
let field_ty = let field_ty =
unifier.subst(field.1.0, &var_map).unwrap_or(field.1.0); unifier.subst(field.1 .0, &var_map).unwrap_or(field.1 .0);
if let Err(e) = unifier.unify(ty, field_ty) { if let Err(e) = unifier.unify(ty, field_ty) {
// field type mismatch // field type mismatch
return Ok(Err(format!( return Ok(Err(format!(
@ -706,18 +704,54 @@ impl InnerResolver {
return Ok(Err("object is not of concrete type".into())); return Ok(Err("object is not of concrete type".into()));
} }
} }
let extracted_ty = unifier.subst(extracted_ty, &var_map).unwrap_or(extracted_ty); let extracted_ty =
unifier.subst(extracted_ty, &var_map).unwrap_or(extracted_ty);
Ok(Ok(extracted_ty)) Ok(Ok(extracted_ty))
}; };
let result = instantiate_obj(); let result = instantiate_obj();
// update/remove the cache according to the result // update/remove the cache according to the result
match result { match result {
Ok(Ok(ty)) => self.pyid_to_type.write().insert(py_obj_id, ty), Ok(Ok(ty)) => self.pyid_to_type.write().insert(py_obj_id, ty),
_ => self.pyid_to_type.write().remove(&py_obj_id) _ => self.pyid_to_type.write().remove(&py_obj_id),
}; };
result result
} }
_ => Ok(Ok(extracted_ty)), _ => {
// check integer bounds
if unifier.unioned(extracted_ty, primitives.int32) {
obj.extract::<i32>().map_or_else(
|_| Ok(Err(format!("{} is not in the range of int32", obj))),
|_| Ok(Ok(extracted_ty)),
)
} else if unifier.unioned(extracted_ty, primitives.int64) {
obj.extract::<i64>().map_or_else(
|_| Ok(Err(format!("{} is not in the range of int64", obj))),
|_| Ok(Ok(extracted_ty)),
)
} else if unifier.unioned(extracted_ty, primitives.uint32) {
obj.extract::<u32>().map_or_else(
|_| Ok(Err(format!("{} is not in the range of uint32", obj))),
|_| Ok(Ok(extracted_ty)),
)
} else if unifier.unioned(extracted_ty, primitives.uint64) {
obj.extract::<u64>().map_or_else(
|_| Ok(Err(format!("{} is not in the range of uint64", obj))),
|_| Ok(Ok(extracted_ty)),
)
} else if unifier.unioned(extracted_ty, primitives.bool) {
obj.extract::<bool>().map_or_else(
|_| Ok(Err(format!("{} is not in the range of bool", obj))),
|_| Ok(Ok(extracted_ty)),
)
} else if unifier.unioned(extracted_ty, primitives.float) {
obj.extract::<f64>().map_or_else(
|_| Ok(Err(format!("{} is not in the range of float64", obj))),
|_| Ok(Ok(extracted_ty)),
)
} else {
Ok(Ok(extracted_ty))
}
}
} }
} }
@ -733,33 +767,27 @@ impl InnerResolver {
self.helper.id_fn.call1(py, (self.helper.type_fn.call1(py, (obj,))?,))?.extract(py)?; self.helper.id_fn.call1(py, (self.helper.type_fn.call1(py, (obj,))?,))?.extract(py)?;
let id: u64 = self.helper.id_fn.call1(py, (obj,))?.extract(py)?; let id: u64 = self.helper.id_fn.call1(py, (obj,))?.extract(py)?;
if ty_id == self.primitive_ids.int || ty_id == self.primitive_ids.int32 { if ty_id == self.primitive_ids.int || ty_id == self.primitive_ids.int32 {
let val: i32 = obj.extract().map_err(|_| super::CompileError::new_err( let val: i32 = obj.extract().unwrap();
format!("{} is not in the range of int32", obj)))?;
self.id_to_primitive.write().insert(id, PrimitiveValue::I32(val)); self.id_to_primitive.write().insert(id, PrimitiveValue::I32(val));
Ok(Some(ctx.ctx.i32_type().const_int(val as u64, false).into())) Ok(Some(ctx.ctx.i32_type().const_int(val as u64, false).into()))
} else if ty_id == self.primitive_ids.int64 { } else if ty_id == self.primitive_ids.int64 {
let val: i64 = obj.extract().map_err(|_| super::CompileError::new_err( let val: i64 = obj.extract().unwrap();
format!("{} is not in the range of int64", obj)))?;
self.id_to_primitive.write().insert(id, PrimitiveValue::I64(val)); self.id_to_primitive.write().insert(id, PrimitiveValue::I64(val));
Ok(Some(ctx.ctx.i64_type().const_int(val as u64, false).into())) Ok(Some(ctx.ctx.i64_type().const_int(val as u64, false).into()))
} else if ty_id == self.primitive_ids.uint32 { } else if ty_id == self.primitive_ids.uint32 {
let val: u32 = obj.extract().map_err(|_| super::CompileError::new_err( let val: u32 = obj.extract().unwrap();
format!("{} is not in the range of uint32", obj)))?;
self.id_to_primitive.write().insert(id, PrimitiveValue::U32(val)); self.id_to_primitive.write().insert(id, PrimitiveValue::U32(val));
Ok(Some(ctx.ctx.i32_type().const_int(val as u64, false).into())) Ok(Some(ctx.ctx.i32_type().const_int(val as u64, false).into()))
} else if ty_id == self.primitive_ids.uint64 { } else if ty_id == self.primitive_ids.uint64 {
let val: u64 = obj.extract().map_err(|_| super::CompileError::new_err( let val: u64 = obj.extract().unwrap();
format!("{} is not in the range of uint64", obj)))?;
self.id_to_primitive.write().insert(id, PrimitiveValue::U64(val)); self.id_to_primitive.write().insert(id, PrimitiveValue::U64(val));
Ok(Some(ctx.ctx.i64_type().const_int(val, false).into())) Ok(Some(ctx.ctx.i64_type().const_int(val, false).into()))
} else if ty_id == self.primitive_ids.bool { } else if ty_id == self.primitive_ids.bool {
let val: bool = obj.extract().map_err(|_| super::CompileError::new_err( let val: bool = obj.extract().unwrap();
format!("{} is not in the range of bool", obj)))?;
self.id_to_primitive.write().insert(id, PrimitiveValue::Bool(val)); self.id_to_primitive.write().insert(id, PrimitiveValue::Bool(val));
Ok(Some(ctx.ctx.bool_type().const_int(val as u64, false).into())) Ok(Some(ctx.ctx.bool_type().const_int(val as u64, false).into()))
} else if ty_id == self.primitive_ids.float || ty_id == self.primitive_ids.float64 { } else if ty_id == self.primitive_ids.float || ty_id == self.primitive_ids.float64 {
let val: f64 = obj.extract().map_err(|_| super::CompileError::new_err( let val: f64 = obj.extract().unwrap();
format!("{} is not in the range of float64", obj)))?;
self.id_to_primitive.write().insert(id, PrimitiveValue::F64(val)); self.id_to_primitive.write().insert(id, PrimitiveValue::F64(val));
Ok(Some(ctx.ctx.f64_type().const_float(val).into())) Ok(Some(ctx.ctx.f64_type().const_float(val).into()))
} else if ty_id == self.primitive_ids.list { } else if ty_id == self.primitive_ids.list {
@ -770,8 +798,8 @@ impl InnerResolver {
} }
let len: usize = self.helper.len_fn.call1(py, (obj,))?.extract(py)?; let len: usize = self.helper.len_fn.call1(py, (obj,))?.extract(py)?;
let elem_ty = let elem_ty = if let TypeEnum::TList { ty } =
if let TypeEnum::TList { ty } = ctx.unifier.get_ty_immutable(expected_ty).as_ref() ctx.unifier.get_ty_immutable(expected_ty).as_ref()
{ {
*ty *ty
} else { } else {
@ -796,14 +824,15 @@ impl InnerResolver {
let arr: Result<Option<Vec<_>>, _> = (0..len) let arr: Result<Option<Vec<_>>, _> = (0..len)
.map(|i| { .map(|i| {
obj obj.get_item(i).and_then(|elem| {
.get_item(i) self.get_obj_value(py, elem, ctx, generator, elem_ty).map_err(|e| {
.and_then(|elem| self.get_obj_value(py, elem, ctx, generator, elem_ty) super::CompileError::new_err(format!(
.map_err( "Error getting element {}: {}",
|e| super::CompileError::new_err( i, e
format!("Error getting element {}: {}", i, e))
)) ))
}) })
})
})
.collect(); .collect();
let arr = arr?.unwrap(); let arr = arr?.unwrap();
@ -847,18 +876,19 @@ impl InnerResolver {
let tup_tys = ty.iter(); let tup_tys = ty.iter();
let elements: &PyTuple = obj.cast_as()?; let elements: &PyTuple = obj.cast_as()?;
assert_eq!(elements.len(), tup_tys.len()); assert_eq!(elements.len(), tup_tys.len());
let val: Result<Option<Vec<_>>, _> = let val: Result<Option<Vec<_>>, _> = elements
elements
.iter() .iter()
.enumerate() .enumerate()
.zip(tup_tys) .zip(tup_tys)
.map(|((i, elem), ty)| self .map(|((i, elem), ty)| {
.get_obj_value(py, elem, ctx, generator, *ty).map_err(|e| self.get_obj_value(py, elem, ctx, generator, *ty).map_err(|e| {
super::CompileError::new_err( super::CompileError::new_err(format!(
format!("Error getting element {}: {}", i, e) "Error getting element {}: {}",
) i, e
) ))
).collect(); })
})
.collect();
let val = val?.unwrap(); let val = val?.unwrap();
let val = ctx.ctx.const_struct(&val, false); let val = ctx.ctx.const_struct(&val, false);
Ok(Some(val.into())) Ok(Some(val.into()))
@ -872,7 +902,7 @@ impl InnerResolver {
{ {
*params.iter().next().unwrap().1 *params.iter().next().unwrap().1
} }
_ => unreachable!("must be option type") _ => unreachable!("must be option type"),
}; };
if id == self.primitive_ids.none { if id == self.primitive_ids.none {
// for option type, just a null ptr // for option type, just a null ptr
@ -884,7 +914,13 @@ impl InnerResolver {
)) ))
} else { } else {
match self match self
.get_obj_value(py, obj.getattr("_nac3_option").unwrap(), ctx, generator, option_val_ty) .get_obj_value(
py,
obj.getattr("_nac3_option").unwrap(),
ctx,
generator,
option_val_ty,
)
.map_err(|e| { .map_err(|e| {
super::CompileError::new_err(format!( super::CompileError::new_err(format!(
"Error getting value of Option object: {}", "Error getting value of Option object: {}",
@ -895,18 +931,27 @@ impl InnerResolver {
let global_str = format!("{}_option", id); let global_str = format!("{}_option", id);
{ {
if self.global_value_ids.read().contains_key(&id) { if self.global_value_ids.read().contains_key(&id) {
let global = ctx.module.get_global(&global_str).unwrap_or_else(|| { let global =
ctx.module.add_global(v.get_type(), Some(AddressSpace::Generic), &global_str) ctx.module.get_global(&global_str).unwrap_or_else(|| {
ctx.module.add_global(
v.get_type(),
Some(AddressSpace::Generic),
&global_str,
)
}); });
return Ok(Some(global.as_pointer_value().into())); return Ok(Some(global.as_pointer_value().into()));
} else { } else {
self.global_value_ids.write().insert(id, obj.into()); self.global_value_ids.write().insert(id, obj.into());
} }
} }
let global = ctx.module.add_global(v.get_type(), Some(AddressSpace::Generic), &global_str); let global = ctx.module.add_global(
v.get_type(),
Some(AddressSpace::Generic),
&global_str,
);
global.set_initializer(&v); global.set_initializer(&v);
Ok(Some(global.as_pointer_value().into())) Ok(Some(global.as_pointer_value().into()))
}, }
None => Ok(None), None => Ok(None),
} }
} }
@ -944,7 +989,12 @@ impl InnerResolver {
.iter() .iter()
.map(|(name, ty, _)| { .map(|(name, ty, _)| {
self.get_obj_value(py, obj.getattr(&name.to_string())?, ctx, generator, *ty) self.get_obj_value(py, obj.getattr(&name.to_string())?, ctx, generator, *ty)
.map_err(|e| super::CompileError::new_err(format!("Error getting field {}: {}", name, e))) .map_err(|e| {
super::CompileError::new_err(format!(
"Error getting field {}: {}",
name, e
))
})
}) })
.collect(); .collect();
let values = values?; let values = values?;
@ -999,8 +1049,7 @@ impl InnerResolver {
if id == self.primitive_ids.none { if id == self.primitive_ids.none {
Ok(SymbolValue::OptionNone) Ok(SymbolValue::OptionNone)
} else { } else {
self self.get_default_param_obj_value(py, obj.getattr("_nac3_option").unwrap())?
.get_default_param_obj_value(py, obj.getattr("_nac3_option").unwrap())?
.map(|v| SymbolValue::OptionSome(Box::new(v))) .map(|v| SymbolValue::OptionSome(Box::new(v)))
} }
} else { } else {
@ -1155,7 +1204,7 @@ impl SymbolResolver for Resolver {
&self, &self,
unifier: &mut Unifier, unifier: &mut Unifier,
defs: &[Arc<RwLock<TopLevelDef>>], defs: &[Arc<RwLock<TopLevelDef>>],
primitives: &PrimitiveStore primitives: &PrimitiveStore,
) -> Result<(), String> { ) -> Result<(), String> {
// we don't need a lock because this will only be run in a single thread // we don't need a lock because this will only be run in a single thread
if self.0.deferred_eval_store.needs_defer.load(Relaxed) { if self.0.deferred_eval_store.needs_defer.load(Relaxed) {
@ -1185,7 +1234,8 @@ impl SymbolResolver for Resolver {
} }
} }
Ok(Ok(())) Ok(Ok(()))
}).unwrap()? })
.unwrap()?
} }
Ok(()) Ok(())
} }

File diff suppressed because it is too large Load Diff

View File

@ -85,14 +85,10 @@ impl<U> crate::fold::Fold<U> for ConstantOptimizer {
fn fold_expr(&mut self, node: crate::Expr<U>) -> Result<crate::Expr<U>, Self::Error> { fn fold_expr(&mut self, node: crate::Expr<U>) -> Result<crate::Expr<U>, Self::Error> {
match node.node { match node.node {
crate::ExprKind::Tuple { elts, ctx } => { crate::ExprKind::Tuple { elts, ctx } => {
let elts = elts let elts =
.into_iter() elts.into_iter().map(|x| self.fold_expr(x)).collect::<Result<Vec<_>, _>>()?;
.map(|x| self.fold_expr(x)) let expr =
.collect::<Result<Vec<_>, _>>()?; if elts.iter().all(|e| matches!(e.node, crate::ExprKind::Constant { .. })) {
let expr = if elts
.iter()
.all(|e| matches!(e.node, crate::ExprKind::Constant { .. }))
{
let tuple = elts let tuple = elts
.into_iter() .into_iter()
.map(|e| match e.node { .map(|e| match e.node {
@ -100,18 +96,11 @@ impl<U> crate::fold::Fold<U> for ConstantOptimizer {
_ => unreachable!(), _ => unreachable!(),
}) })
.collect(); .collect();
crate::ExprKind::Constant { crate::ExprKind::Constant { value: Constant::Tuple(tuple), kind: None }
value: Constant::Tuple(tuple),
kind: None,
}
} else { } else {
crate::ExprKind::Tuple { elts, ctx } crate::ExprKind::Tuple { elts, ctx }
}; };
Ok(crate::Expr { Ok(crate::Expr { node: expr, custom: node.custom, location: node.location })
node: expr,
custom: node.custom,
location: node.location,
})
} }
_ => crate::fold::fold_expr(self, node), _ => crate::fold::fold_expr(self, node),
} }
@ -138,18 +127,12 @@ mod tests {
Located { Located {
location, location,
custom, custom,
node: ExprKind::Constant { node: ExprKind::Constant { value: 1.into(), kind: None },
value: 1.into(),
kind: None,
},
}, },
Located { Located {
location, location,
custom, custom,
node: ExprKind::Constant { node: ExprKind::Constant { value: 2.into(), kind: None },
value: 2.into(),
kind: None,
},
}, },
Located { Located {
location, location,
@ -160,26 +143,17 @@ mod tests {
Located { Located {
location, location,
custom, custom,
node: ExprKind::Constant { node: ExprKind::Constant { value: 3.into(), kind: None },
value: 3.into(),
kind: None,
},
}, },
Located { Located {
location, location,
custom, custom,
node: ExprKind::Constant { node: ExprKind::Constant { value: 4.into(), kind: None },
value: 4.into(),
kind: None,
},
}, },
Located { Located {
location, location,
custom, custom,
node: ExprKind::Constant { node: ExprKind::Constant { value: 5.into(), kind: None },
value: 5.into(),
kind: None,
},
}, },
], ],
}, },
@ -187,9 +161,7 @@ mod tests {
], ],
}, },
}; };
let new_ast = ConstantOptimizer::new() let new_ast = ConstantOptimizer::new().fold_expr(ast).unwrap_or_else(|e| match e {});
.fold_expr(ast)
.unwrap_or_else(|e| match e {});
assert_eq!( assert_eq!(
new_ast, new_ast,
Located { Located {
@ -199,11 +171,7 @@ mod tests {
value: Constant::Tuple(vec![ value: Constant::Tuple(vec![
1.into(), 1.into(),
2.into(), 2.into(),
Constant::Tuple(vec![ Constant::Tuple(vec![3.into(), 4.into(), 5.into(),])
3.into(),
4.into(),
5.into(),
])
]), ]),
kind: None kind: None
}, },

View File

@ -64,11 +64,4 @@ macro_rules! simple_fold {
}; };
} }
simple_fold!( simple_fold!(usize, String, bool, StrRef, constant::Constant, constant::ConversionFlag);
usize,
String,
bool,
StrRef,
constant::Constant,
constant::ConversionFlag
);

View File

@ -34,10 +34,7 @@ impl<U> ExprKind<U> {
ExprKind::Starred { .. } => "starred", ExprKind::Starred { .. } => "starred",
ExprKind::Slice { .. } => "slice", ExprKind::Slice { .. } => "slice",
ExprKind::JoinedStr { values } => { ExprKind::JoinedStr { values } => {
if values if values.iter().any(|e| matches!(e.node, ExprKind::JoinedStr { .. })) {
.iter()
.any(|e| matches!(e.node, ExprKind::JoinedStr { .. }))
{
"f-string expression" "f-string expression"
} else { } else {
"literal" "literal"

View File

@ -9,6 +9,6 @@ mod impls;
mod location; mod location;
pub use ast_gen::*; pub use ast_gen::*;
pub use location::{Location, FileName}; pub use location::{FileName, Location};
pub type Suite<U = ()> = Vec<Stmt<U>>; pub type Suite<U = ()> = Vec<Stmt<U>>;

View File

@ -21,7 +21,7 @@ impl From<String> for FileName {
pub struct Location { pub struct Location {
pub row: usize, pub row: usize,
pub column: usize, pub column: usize,
pub file: FileName pub file: FileName,
} }
impl fmt::Display for Location { impl fmt::Display for Location {
@ -53,11 +53,7 @@ impl Location {
) )
} }
} }
Visualize { Visualize { loc: *self, line, desc }
loc: *self,
line,
desc,
}
} }
} }

View File

@ -12,9 +12,10 @@ rayon = "1.5.1"
nac3parser = { path = "../nac3parser" } nac3parser = { path = "../nac3parser" }
[dependencies.inkwell] [dependencies.inkwell]
version = "0.1.0-beta.4" git = "https://github.com/nbaksalyar/inkwell.git"
branch = "llvm14"
default-features = false default-features = false
features = ["llvm13-0", "target-x86", "target-arm", "target-riscv", "no-libffi-linking"] features = ["llvm14-0", "target-aarch64", "target-arm", "target-riscv", "no-libffi-linking"]
[dev-dependencies] [dev-dependencies]
test-case = "1.2.0" test-case = "1.2.0"

View File

@ -18,17 +18,8 @@ fn main() {
* Compiling for WASM32 and filtering the output with regex is the closest we can get. * Compiling for WASM32 and filtering the output with regex is the closest we can get.
*/ */
const FLAG: &[&str] = &[ const FLAG: &[&str] =
"--target=wasm32", &["--target=wasm32", FILE, "-O3", "-emit-llvm", "-S", "-Wall", "-Wextra", "-o", "-"];
FILE,
"-O3",
"-emit-llvm",
"-S",
"-Wall",
"-Wextra",
"-o",
"-",
];
let output = Command::new("clang") let output = Command::new("clang")
.args(FLAG) .args(FLAG)
.output() .output()

View File

@ -10,20 +10,23 @@ use crate::{
}, },
symbol_resolver::{SymbolValue, ValueEnum}, symbol_resolver::{SymbolValue, ValueEnum},
toplevel::{DefinitionId, TopLevelDef}, toplevel::{DefinitionId, TopLevelDef},
typecheck::typedef::{FunSignature, FuncArg, Type, TypeEnum, Unifier}, typecheck::{
magic_methods::{binop_assign_name, binop_name},
typedef::{FunSignature, FuncArg, Type, TypeEnum, Unifier},
},
}; };
use inkwell::{ use inkwell::{
AddressSpace,
attributes::{Attribute, AttributeLoc}, attributes::{Attribute, AttributeLoc},
types::{AnyType, BasicType, BasicTypeEnum}, types::{AnyType, BasicType, BasicTypeEnum},
values::{BasicValueEnum, FunctionValue, IntValue, PointerValue} values::{BasicValueEnum, FunctionValue, IntValue, PointerValue},
AddressSpace,
}; };
use itertools::{chain, izip, zip, Itertools}; use itertools::{chain, izip, zip, Itertools};
use nac3parser::ast::{ use nac3parser::ast::{
self, Boolop, Comprehension, Constant, Expr, ExprKind, Location, Operator, StrRef, self, Boolop, Comprehension, Constant, Expr, ExprKind, Location, Operator, StrRef,
}; };
use super::{CodeGenerator, need_sret}; use super::{need_sret, CodeGenerator};
pub fn get_subst_key( pub fn get_subst_key(
unifier: &mut Unifier, unifier: &mut Unifier,
@ -238,7 +241,7 @@ impl<'ctx, 'a> CodeGenContext<'ctx, 'a> {
op: &Operator, op: &Operator,
lhs: BasicValueEnum<'ctx>, lhs: BasicValueEnum<'ctx>,
rhs: BasicValueEnum<'ctx>, rhs: BasicValueEnum<'ctx>,
signed: bool signed: bool,
) -> BasicValueEnum<'ctx> { ) -> BasicValueEnum<'ctx> {
let (lhs, rhs) = let (lhs, rhs) =
if let (BasicValueEnum::IntValue(lhs), BasicValueEnum::IntValue(rhs)) = (lhs, rhs) { if let (BasicValueEnum::IntValue(lhs), BasicValueEnum::IntValue(rhs)) = (lhs, rhs) {
@ -267,9 +270,15 @@ impl<'ctx, 'a> CodeGenContext<'ctx, 'a> {
(Operator::BitXor, _) => self.builder.build_xor(lhs, rhs, "xor").into(), (Operator::BitXor, _) => self.builder.build_xor(lhs, rhs, "xor").into(),
(Operator::BitAnd, _) => self.builder.build_and(lhs, rhs, "and").into(), (Operator::BitAnd, _) => self.builder.build_and(lhs, rhs, "and").into(),
(Operator::LShift, _) => self.builder.build_left_shift(lhs, rhs, "lshift").into(), (Operator::LShift, _) => self.builder.build_left_shift(lhs, rhs, "lshift").into(),
(Operator::RShift, _) => self.builder.build_right_shift(lhs, rhs, true, "rshift").into(), (Operator::RShift, _) => {
(Operator::FloorDiv, true) => self.builder.build_int_signed_div(lhs, rhs, "floordiv").into(), self.builder.build_right_shift(lhs, rhs, true, "rshift").into()
(Operator::FloorDiv, false) => self.builder.build_int_unsigned_div(lhs, rhs, "floordiv").into(), }
(Operator::FloorDiv, true) => {
self.builder.build_int_signed_div(lhs, rhs, "floordiv").into()
}
(Operator::FloorDiv, false) => {
self.builder.build_int_unsigned_div(lhs, rhs, "floordiv").into()
}
(Operator::Pow, s) => integer_power(generator, self, lhs, rhs, s).into(), (Operator::Pow, s) => integer_power(generator, self, lhs, rhs, s).into(),
// special implementation? // special implementation?
(Operator::MatMult, _) => unreachable!(), (Operator::MatMult, _) => unreachable!(),
@ -337,18 +346,28 @@ impl<'ctx, 'a> CodeGenContext<'ctx, 'a> {
let byval_id = Attribute::get_named_enum_kind_id("byval"); let byval_id = Attribute::get_named_enum_kind_id("byval");
let offset = if fun.get_enum_attribute(AttributeLoc::Param(0), sret_id).is_some() { let offset = if fun.get_enum_attribute(AttributeLoc::Param(0), sret_id).is_some() {
return_slot = Some(self.builder.build_alloca(fun.get_type().get_param_types()[0] return_slot = Some(
.into_pointer_type().get_element_type().into_struct_type(), call_name)); self.builder.build_alloca(
fun.get_type().get_param_types()[0]
.into_pointer_type()
.get_element_type()
.into_struct_type(),
call_name,
),
);
loc_params.push((*return_slot.as_ref().unwrap()).into()); loc_params.push((*return_slot.as_ref().unwrap()).into());
1 1
} else { } else {
0 0
}; };
for (i, param) in params.iter().enumerate() { for (i, param) in params.iter().enumerate() {
if fun.get_enum_attribute(AttributeLoc::Param((i + offset) as u32), byval_id).is_some() { if fun
.get_enum_attribute(AttributeLoc::Param((i + offset) as u32), byval_id)
.is_some()
{
// lazy update // lazy update
if loc_params.is_empty() { if loc_params.is_empty() {
loc_params.extend(params[0..i+offset].iter().copied()); loc_params.extend(params[0..i + offset].iter().copied());
} }
let slot = self.builder.build_alloca(param.get_type(), call_name); let slot = self.builder.build_alloca(param.get_type(), call_name);
loc_params.push(slot.into()); loc_params.push(slot.into());
@ -358,11 +377,7 @@ impl<'ctx, 'a> CodeGenContext<'ctx, 'a> {
} }
} }
} }
let params = if loc_params.is_empty() { let params = if loc_params.is_empty() { params } else { &loc_params };
params
} else {
&loc_params
};
let result = if let Some(target) = self.unwind_target { let result = if let Some(target) = self.unwind_target {
let current = self.builder.get_insert_block().unwrap().get_parent().unwrap(); let current = self.builder.get_insert_block().unwrap().get_parent().unwrap();
let then_block = self.ctx.append_basic_block(current, &format!("after.{}", call_name)); let then_block = self.ctx.append_basic_block(current, &format!("after.{}", call_name));
@ -493,12 +508,10 @@ pub fn gen_constructor<'ctx, 'a, G: CodeGenerator>(
match def { match def {
TopLevelDef::Class { methods, .. } => { TopLevelDef::Class { methods, .. } => {
// TODO: what about other fields that require alloca? // TODO: what about other fields that require alloca?
let mut fun_id = None; let fun_id = methods
for (name, _, id) in methods.iter() { .iter()
if name == &"__init__".into() { .find(|method| method.0 == "__init__".into())
fun_id = Some(*id); .and_then(|method| Some(method.2));
}
}
let ty = ctx.get_llvm_type(generator, signature.ret).into_pointer_type(); let ty = ctx.get_llvm_type(generator, signature.ret).into_pointer_type();
let zelf_ty: BasicTypeEnum = ty.get_element_type().try_into().unwrap(); let zelf_ty: BasicTypeEnum = ty.get_element_type().try_into().unwrap();
let zelf: BasicValueEnum<'ctx> = ctx.builder.build_alloca(zelf_ty, "alloca").into(); let zelf: BasicValueEnum<'ctx> = ctx.builder.build_alloca(zelf_ty, "alloca").into();
@ -638,8 +651,12 @@ pub fn gen_call<'ctx, 'a, G: CodeGenerator>(
); );
} }
// reorder the parameters // reorder the parameters
let mut real_params = let mut real_params = fun
fun.0.args.iter().map(|arg| (mapping.remove(&arg.name).unwrap(), arg.ty)).collect_vec(); .0
.args
.iter()
.map(|arg| (mapping.remove(&arg.name).unwrap(), arg.ty))
.collect_vec();
if let Some(obj) = &obj { if let Some(obj) = &obj {
real_params.insert(0, (obj.1.clone(), obj.0)); real_params.insert(0, (obj.1.clone(), obj.0));
} }
@ -702,32 +719,48 @@ pub fn gen_call<'ctx, 'a, G: CodeGenerator>(
}; };
let has_sret = ret_type.map_or(false, |ret_type| need_sret(ctx.ctx, ret_type)); let has_sret = ret_type.map_or(false, |ret_type| need_sret(ctx.ctx, ret_type));
let mut byvals = Vec::new(); let mut byvals = Vec::new();
let mut params = let mut params = args
args.iter().enumerate().map(|(i, arg)| match ctx.get_llvm_type(generator, arg.ty) { .iter()
.enumerate()
.map(|(i, arg)| {
match ctx.get_llvm_type(generator, arg.ty) {
BasicTypeEnum::StructType(ty) if is_extern => { BasicTypeEnum::StructType(ty) if is_extern => {
byvals.push((i, ty)); byvals.push((i, ty));
ty.ptr_type(AddressSpace::Generic).into() ty.ptr_type(AddressSpace::Generic).into()
}, }
x => x x => x,
}.into()).collect_vec(); }
.into()
})
.collect_vec();
if has_sret { if has_sret {
params.insert(0, ret_type.unwrap().ptr_type(AddressSpace::Generic).into()); params.insert(0, ret_type.unwrap().ptr_type(AddressSpace::Generic).into());
} }
let fun_ty = match ret_type { let fun_ty = match ret_type {
Some(ret_type) if !has_sret => ret_type.fn_type(&params, false), Some(ret_type) if !has_sret => ret_type.fn_type(&params, false),
_ => ctx.ctx.void_type().fn_type(&params, false) _ => ctx.ctx.void_type().fn_type(&params, false),
}; };
let fun_val = ctx.module.add_function(&symbol, fun_ty, None); let fun_val = ctx.module.add_function(&symbol, fun_ty, None);
let offset = if has_sret { let offset = if has_sret {
fun_val.add_attribute(AttributeLoc::Param(0), fun_val.add_attribute(
ctx.ctx.create_type_attribute(Attribute::get_named_enum_kind_id("sret"), ret_type.unwrap().as_any_type_enum())); AttributeLoc::Param(0),
ctx.ctx.create_type_attribute(
Attribute::get_named_enum_kind_id("sret"),
ret_type.unwrap().as_any_type_enum(),
),
);
1 1
} else { } else {
0 0
}; };
for (i, ty) in byvals { for (i, ty) in byvals {
fun_val.add_attribute(AttributeLoc::Param((i as u32) + offset), fun_val.add_attribute(
ctx.ctx.create_type_attribute(Attribute::get_named_enum_kind_id("byval"), ty.as_any_type_enum())); AttributeLoc::Param((i as u32) + offset),
ctx.ctx.create_type_attribute(
Attribute::get_named_enum_kind_id("byval"),
ty.as_any_type_enum(),
),
);
} }
fun_val fun_val
}); });
@ -791,7 +824,11 @@ pub fn gen_comprehension<'ctx, 'a, G: CodeGenerator>(
let cont_bb = ctx.ctx.append_basic_block(current, "cont"); let cont_bb = ctx.ctx.append_basic_block(current, "cont");
let Comprehension { target, iter, ifs, .. } = &generators[0]; let Comprehension { target, iter, ifs, .. } = &generators[0];
let iter_val = generator.gen_expr(ctx, iter)?.unwrap().to_basic_value_enum(ctx, generator, iter.custom.unwrap())?; let iter_val = generator.gen_expr(ctx, iter)?.unwrap().to_basic_value_enum(
ctx,
generator,
iter.custom.unwrap(),
)?;
let int32 = ctx.ctx.i32_type(); let int32 = ctx.ctx.i32_type();
let size_t = generator.get_size_type(ctx.ctx); let size_t = generator.get_size_type(ctx.ctx);
let zero_size_t = size_t.const_zero(); let zero_size_t = size_t.const_zero();
@ -927,21 +964,31 @@ pub fn gen_binop_expr<'ctx, 'a, G: CodeGenerator>(
left: &Expr<Option<Type>>, left: &Expr<Option<Type>>,
op: &Operator, op: &Operator,
right: &Expr<Option<Type>>, right: &Expr<Option<Type>>,
) -> Result<ValueEnum<'ctx>, String> { loc: Location,
is_aug_assign: bool,
) -> Result<Option<ValueEnum<'ctx>>, String> {
let ty1 = ctx.unifier.get_representative(left.custom.unwrap()); let ty1 = ctx.unifier.get_representative(left.custom.unwrap());
let ty2 = ctx.unifier.get_representative(right.custom.unwrap()); let ty2 = ctx.unifier.get_representative(right.custom.unwrap());
let left = generator.gen_expr(ctx, left)?.unwrap().to_basic_value_enum(ctx, generator, left.custom.unwrap())?; let left_val = generator.gen_expr(ctx, left)?.unwrap().to_basic_value_enum(
let right = generator.gen_expr(ctx, right)?.unwrap().to_basic_value_enum(ctx, generator, right.custom.unwrap())?; ctx,
generator,
left.custom.unwrap(),
)?;
let right_val = generator.gen_expr(ctx, right)?.unwrap().to_basic_value_enum(
ctx,
generator,
right.custom.unwrap(),
)?;
// we can directly compare the types, because we've got their representatives // we can directly compare the types, because we've got their representatives
// which would be unchanged until further unification, which we would never do // which would be unchanged until further unification, which we would never do
// when doing code generation for function instances // when doing code generation for function instances
Ok(if ty1 == ty2 && [ctx.primitives.int32, ctx.primitives.int64].contains(&ty1) { if ty1 == ty2 && [ctx.primitives.int32, ctx.primitives.int64].contains(&ty1) {
ctx.gen_int_ops(generator, op, left, right, true) Ok(Some(ctx.gen_int_ops(generator, op, left_val, right_val, true).into()))
} else if ty1 == ty2 && [ctx.primitives.uint32, ctx.primitives.uint64].contains(&ty1) { } else if ty1 == ty2 && [ctx.primitives.uint32, ctx.primitives.uint64].contains(&ty1) {
ctx.gen_int_ops(generator, op, left, right, false) Ok(Some(ctx.gen_int_ops(generator, op, left_val, right_val, false).into()))
} else if ty1 == ty2 && ctx.primitives.float == ty1 { } else if ty1 == ty2 && ctx.primitives.float == ty1 {
ctx.gen_float_ops(op, left, right) Ok(Some(ctx.gen_float_ops(op, left_val, right_val).into()))
} else if ty1 == ctx.primitives.float && ty2 == ctx.primitives.int32 { } else if ty1 == ctx.primitives.float && ty2 == ctx.primitives.int32 {
// Pow is the only operator that would pass typecheck between float and int // Pow is the only operator that would pass typecheck between float and int
assert!(*op == Operator::Pow); assert!(*op == Operator::Pow);
@ -951,14 +998,62 @@ pub fn gen_binop_expr<'ctx, 'a, G: CodeGenerator>(
let ty = f64_t.fn_type(&[f64_t.into(), i32_t.into()], false); let ty = f64_t.fn_type(&[f64_t.into(), i32_t.into()], false);
ctx.module.add_function("llvm.powi.f64.i32", ty, None) ctx.module.add_function("llvm.powi.f64.i32", ty, None)
}); });
ctx.builder let res = ctx
.build_call(pow_intr, &[left.into(), right.into()], "f_pow_i") .builder
.build_call(pow_intr, &[left_val.into(), right_val.into()], "f_pow_i")
.try_as_basic_value() .try_as_basic_value()
.unwrap_left() .unwrap_left();
Ok(Some(res.into()))
} else { } else {
unimplemented!() let (op_name, id) = if let TypeEnum::TObj { fields, obj_id, .. } =
ctx.unifier.get_ty_immutable(left.custom.unwrap()).as_ref()
{
let (binop_name, binop_assign_name) =
(binop_name(op).into(), binop_assign_name(op).into());
// if is aug_assign, try aug_assign operator first
if is_aug_assign && fields.contains_key(&binop_assign_name) {
(binop_assign_name, *obj_id)
} else {
(binop_name, *obj_id)
}
} else {
unreachable!("must be tobj")
};
let signature = match ctx.calls.get(&loc.into()) {
Some(call) => ctx.unifier.get_call_signature(*call).unwrap(),
None => {
if let TypeEnum::TObj { fields, .. } =
ctx.unifier.get_ty_immutable(left.custom.unwrap()).as_ref()
{
let fn_ty = fields.get(&op_name).unwrap().0;
if let TypeEnum::TFunc(sig) = ctx.unifier.get_ty_immutable(fn_ty).as_ref() {
sig.clone()
} else {
unreachable!("must be func sig")
}
} else {
unreachable!("must be tobj")
}
}
};
let fun_id = {
let defs = ctx.top_level.definitions.read();
let obj_def = defs.get(id.0).unwrap().read();
if let TopLevelDef::Class { methods, .. } = &*obj_def {
methods.iter().find(|method| method.0 == op_name).unwrap().2
} else {
unreachable!()
}
};
generator
.gen_call(
ctx,
Some((left.custom.unwrap(), left_val.into())),
(&signature, fun_id),
vec![(None, right_val.into())],
)
.map(|f| f.map(|f| f.into()))
} }
.into())
} }
pub fn gen_expr<'ctx, 'a, G: CodeGenerator>( pub fn gen_expr<'ctx, 'a, G: CodeGenerator>(
@ -979,14 +1074,14 @@ pub fn gen_expr<'ctx, 'a, G: CodeGenerator>(
ctx.unifier.get_ty(expr.custom.unwrap()).as_ref(), ctx.unifier.get_ty(expr.custom.unwrap()).as_ref(),
ctx.unifier.get_ty(ctx.primitives.option).as_ref(), ctx.unifier.get_ty(ctx.primitives.option).as_ref(),
) { ) {
( (TypeEnum::TObj { obj_id, params, .. }, TypeEnum::TObj { obj_id: opt_id, .. })
TypeEnum::TObj { obj_id, params, .. }, if *obj_id == *opt_id =>
TypeEnum::TObj { obj_id: opt_id, .. }, {
) if *obj_id == *opt_id => ctx ctx.get_llvm_type(generator, *params.iter().next().unwrap().1)
.get_llvm_type(generator, *params.iter().next().unwrap().1)
.ptr_type(AddressSpace::Generic) .ptr_type(AddressSpace::Generic)
.const_null() .const_null()
.into(), .into()
}
_ => unreachable!("must be option type"), _ => unreachable!("must be option type"),
} }
} }
@ -1004,12 +1099,9 @@ pub fn gen_expr<'ctx, 'a, G: CodeGenerator>(
let elements = elts let elements = elts
.iter() .iter()
.map(|x| { .map(|x| {
generator generator.gen_expr(ctx, x).map_or_else(Err, |v| {
.gen_expr(ctx, x) v.unwrap().to_basic_value_enum(ctx, generator, x.custom.unwrap())
.map_or_else( })
Err,
|v| v.unwrap().to_basic_value_enum(ctx, generator, x.custom.unwrap())
)
}) })
.collect::<Result<Vec<_>, _>>()?; .collect::<Result<Vec<_>, _>>()?;
let ty = if elements.is_empty() { let ty = if elements.is_empty() {
@ -1040,9 +1132,9 @@ pub fn gen_expr<'ctx, 'a, G: CodeGenerator>(
let element_val = elts let element_val = elts
.iter() .iter()
.map(|x| { .map(|x| {
generator generator.gen_expr(ctx, x).map_or_else(Err, |v| {
.gen_expr(ctx, x) v.unwrap().to_basic_value_enum(ctx, generator, x.custom.unwrap())
.map_or_else(Err, |v| v.unwrap().to_basic_value_enum(ctx, generator, x.custom.unwrap())) })
}) })
.collect::<Result<Vec<_>, _>>()?; .collect::<Result<Vec<_>, _>>()?;
let element_ty = element_val.iter().map(BasicValueEnum::get_type).collect_vec(); let element_ty = element_val.iter().map(BasicValueEnum::get_type).collect_vec();
@ -1063,14 +1155,17 @@ pub fn gen_expr<'ctx, 'a, G: CodeGenerator>(
ExprKind::Attribute { value, attr, .. } => { ExprKind::Attribute { value, attr, .. } => {
// note that we would handle class methods directly in calls // note that we would handle class methods directly in calls
match generator.gen_expr(ctx, value)?.unwrap() { match generator.gen_expr(ctx, value)?.unwrap() {
ValueEnum::Static(v) => v.get_field(*attr, ctx).map_or_else(|| { ValueEnum::Static(v) => v.get_field(*attr, ctx).map_or_else(
|| {
let v = v.to_basic_value_enum(ctx, generator, value.custom.unwrap())?; let v = v.to_basic_value_enum(ctx, generator, value.custom.unwrap())?;
let index = ctx.get_attr_index(value.custom.unwrap(), *attr); let index = ctx.get_attr_index(value.custom.unwrap(), *attr);
Ok(ValueEnum::Dynamic(ctx.build_gep_and_load( Ok(ValueEnum::Dynamic(ctx.build_gep_and_load(
v.into_pointer_value(), v.into_pointer_value(),
&[zero, int32.const_int(index as u64, false)], &[zero, int32.const_int(index as u64, false)],
))) as Result<_, String> ))) as Result<_, String>
}, Ok)?, },
Ok,
)?,
ValueEnum::Dynamic(v) => { ValueEnum::Dynamic(v) => {
let index = ctx.get_attr_index(value.custom.unwrap(), *attr); let index = ctx.get_attr_index(value.custom.unwrap(), *attr);
ValueEnum::Dynamic(ctx.build_gep_and_load( ValueEnum::Dynamic(ctx.build_gep_and_load(
@ -1125,13 +1220,16 @@ pub fn gen_expr<'ctx, 'a, G: CodeGenerator>(
phi.add_incoming(&[(&a, a_bb), (&b, b_bb)]); phi.add_incoming(&[(&a, a_bb), (&b, b_bb)]);
phi.as_basic_value().into() phi.as_basic_value().into()
} }
ExprKind::BinOp { op, left, right } => gen_binop_expr(generator, ctx, left, op, right)?, ExprKind::BinOp { op, left, right } => {
return gen_binop_expr(generator, ctx, left, op, right, expr.location, false);
}
ExprKind::UnaryOp { op, operand } => { ExprKind::UnaryOp { op, operand } => {
let ty = ctx.unifier.get_representative(operand.custom.unwrap()); let ty = ctx.unifier.get_representative(operand.custom.unwrap());
let val = let val = generator.gen_expr(ctx, operand)?.unwrap().to_basic_value_enum(
generator.gen_expr(ctx, operand)? ctx,
.unwrap() generator,
.to_basic_value_enum(ctx, generator, operand.custom.unwrap())?; operand.custom.unwrap(),
)?;
if ty == ctx.primitives.bool { if ty == ctx.primitives.bool {
let val = val.into_int_value(); let val = val.into_int_value();
match op { match op {
@ -1188,14 +1286,16 @@ pub fn gen_expr<'ctx, 'a, G: CodeGenerator>(
BasicValueEnum::IntValue(lhs), BasicValueEnum::IntValue(lhs),
BasicValueEnum::IntValue(rhs), BasicValueEnum::IntValue(rhs),
) = ( ) = (
generator generator.gen_expr(ctx, lhs)?.unwrap().to_basic_value_enum(
.gen_expr(ctx, lhs)? ctx,
.unwrap() generator,
.to_basic_value_enum(ctx, generator, lhs.custom.unwrap())?, lhs.custom.unwrap(),
generator )?,
.gen_expr(ctx, rhs)? generator.gen_expr(ctx, rhs)?.unwrap().to_basic_value_enum(
.unwrap() ctx,
.to_basic_value_enum(ctx, generator, rhs.custom.unwrap())?, generator,
rhs.custom.unwrap(),
)?,
) { ) {
(lhs, rhs) (lhs, rhs)
} else { } else {
@ -1216,14 +1316,16 @@ pub fn gen_expr<'ctx, 'a, G: CodeGenerator>(
BasicValueEnum::FloatValue(lhs), BasicValueEnum::FloatValue(lhs),
BasicValueEnum::FloatValue(rhs), BasicValueEnum::FloatValue(rhs),
) = ( ) = (
generator generator.gen_expr(ctx, lhs)?.unwrap().to_basic_value_enum(
.gen_expr(ctx, lhs)? ctx,
.unwrap() generator,
.to_basic_value_enum(ctx, generator, lhs.custom.unwrap())?, lhs.custom.unwrap(),
generator )?,
.gen_expr(ctx, rhs)? generator.gen_expr(ctx, rhs)?.unwrap().to_basic_value_enum(
.unwrap() ctx,
.to_basic_value_enum(ctx, generator, rhs.custom.unwrap())?, generator,
rhs.custom.unwrap(),
)?,
) { ) {
(lhs, rhs) (lhs, rhs)
} else { } else {
@ -1281,7 +1383,8 @@ pub fn gen_expr<'ctx, 'a, G: CodeGenerator>(
match result { match result {
None => None, None => None,
Some(v) => { Some(v) => {
let b = b.unwrap().to_basic_value_enum(ctx, generator, orelse.custom.unwrap())?; let b =
b.unwrap().to_basic_value_enum(ctx, generator, orelse.custom.unwrap())?;
Some(ctx.builder.build_store(v, b)) Some(ctx.builder.build_store(v, b))
} }
}; };
@ -1289,7 +1392,7 @@ pub fn gen_expr<'ctx, 'a, G: CodeGenerator>(
ctx.builder.position_at_end(cont_bb); ctx.builder.position_at_end(cont_bb);
match result { match result {
None => return Ok(None), None => return Ok(None),
Some(v) => return Ok(Some(ctx.builder.build_load(v, "if_exp_val_load").into())) Some(v) => return Ok(Some(ctx.builder.build_load(v, "if_exp_val_load").into())),
} }
} }
ExprKind::Call { func, args, keywords } => { ExprKind::Call { func, args, keywords } => {
@ -1342,13 +1445,7 @@ pub fn gen_expr<'ctx, 'a, G: CodeGenerator>(
let defs = ctx.top_level.definitions.read(); let defs = ctx.top_level.definitions.read();
let obj_def = defs.get(id.0).unwrap().read(); let obj_def = defs.get(id.0).unwrap().read();
if let TopLevelDef::Class { methods, .. } = &*obj_def { if let TopLevelDef::Class { methods, .. } = &*obj_def {
let mut fun_id = None; methods.iter().find(|method| method.0 == *attr).unwrap().2
for (name, _, id) in methods.iter() {
if name == attr {
fun_id = Some(*id);
}
}
fun_id.unwrap()
} else { } else {
unreachable!() unreachable!()
} }
@ -1369,14 +1466,12 @@ pub fn gen_expr<'ctx, 'a, G: CodeGenerator>(
.unwrap() .unwrap()
.get_parent() .get_parent()
.unwrap(); .unwrap();
let unreachable_block = ctx.ctx.append_basic_block( let unreachable_block = ctx
current_fun, .ctx
"unwrap_none_unreachable" .append_basic_block(current_fun, "unwrap_none_unreachable");
); let exn_block = ctx
let exn_block = ctx.ctx.append_basic_block( .ctx
current_fun, .append_basic_block(current_fun, "unwrap_none_exception");
"unwrap_none_exception"
);
ctx.builder.build_unconditional_branch(exn_block); ctx.builder.build_unconditional_branch(exn_block);
ctx.builder.position_at_end(exn_block); ctx.builder.position_at_end(exn_block);
ctx.raise_exn( ctx.raise_exn(
@ -1384,22 +1479,24 @@ pub fn gen_expr<'ctx, 'a, G: CodeGenerator>(
"0:UnwrapNoneError", "0:UnwrapNoneError",
err_msg, err_msg,
[None, None, None], [None, None, None],
ctx.current_loc ctx.current_loc,
); );
ctx.builder.position_at_end(unreachable_block); ctx.builder.position_at_end(unreachable_block);
let ptr = ctx let ptr = ctx
.get_llvm_type(generator, value.custom.unwrap()) .get_llvm_type(generator, value.custom.unwrap())
.into_pointer_type() .into_pointer_type()
.const_null(); .const_null();
return Ok(Some(ctx.builder.build_load( return Ok(Some(
ptr, ctx.builder
"unwrap_none_unreachable_load" .build_load(ptr, "unwrap_none_unreachable_load")
).into())); .into(),
));
} }
Some(v) => return Ok(Some(v)), Some(v) => return Ok(Some(v)),
} },
ValueEnum::Dynamic(BasicValueEnum::PointerValue(ptr)) => { ValueEnum::Dynamic(BasicValueEnum::PointerValue(ptr)) => {
let not_null = ctx.builder.build_is_not_null(ptr, "unwrap_not_null"); let not_null =
ctx.builder.build_is_not_null(ptr, "unwrap_not_null");
ctx.make_assert( ctx.make_assert(
generator, generator,
not_null, not_null,
@ -1408,12 +1505,11 @@ pub fn gen_expr<'ctx, 'a, G: CodeGenerator>(
[None, None, None], [None, None, None],
expr.location, expr.location,
); );
return Ok(Some(ctx.builder.build_load( return Ok(Some(
ptr, ctx.builder.build_load(ptr, "unwrap_some_load").into(),
"unwrap_some_load" ));
).into()))
} }
_ => unreachable!("option must be static or ptr") _ => unreachable!("option must be static or ptr"),
} }
} }
return Ok(generator return Ok(generator
@ -1524,16 +1620,13 @@ pub fn gen_expr<'ctx, 'a, G: CodeGenerator>(
} else { } else {
unreachable!("tuple subscript must be const int after type check"); unreachable!("tuple subscript must be const int after type check");
}; };
let v = generator let v = generator.gen_expr(ctx, value)?.unwrap();
.gen_expr(ctx, value)?
.unwrap();
match v { match v {
ValueEnum::Dynamic(v) => { ValueEnum::Dynamic(v) => {
let v = v.into_struct_value(); let v = v.into_struct_value();
ctx.builder.build_extract_value(v, index, "tup_elem").unwrap().into() ctx.builder.build_extract_value(v, index, "tup_elem").unwrap().into()
} }
ValueEnum::Static(v) => { ValueEnum::Static(v) => match v.get_tuple_element(index) {
match v.get_tuple_element(index) {
Some(v) => v, Some(v) => v,
None => { None => {
let tup = v let tup = v
@ -1541,13 +1634,12 @@ pub fn gen_expr<'ctx, 'a, G: CodeGenerator>(
.into_struct_value(); .into_struct_value();
ctx.builder.build_extract_value(tup, index, "tup_elem").unwrap().into() ctx.builder.build_extract_value(tup, index, "tup_elem").unwrap().into()
} }
} },
}
} }
} else { } else {
unreachable!("should not be other subscriptable types after type check"); unreachable!("should not be other subscriptable types after type check");
} }
}, }
ExprKind::ListComp { .. } => gen_comprehension(generator, ctx, expr)?.into(), ExprKind::ListComp { .. } => gen_comprehension(generator, ctx, expr)?.into(),
_ => unimplemented!(), _ => unimplemented!(),
})) }))

View File

@ -1,9 +1,4 @@
typedef _ExtInt(8) int8_t; #include "stdint.h"
typedef unsigned _ExtInt(8) uint8_t;
typedef _ExtInt(32) int32_t;
typedef unsigned _ExtInt(32) uint32_t;
typedef _ExtInt(64) int64_t;
typedef unsigned _ExtInt(64) uint64_t;
# define MAX(a, b) (a > b ? a : b) # define MAX(a, b) (a > b ? a : b)
# define MIN(a, b) (a > b ? b : a) # define MIN(a, b) (a > b ? b : a)

View File

@ -261,7 +261,11 @@ pub fn handle_slice_index_bound<'a, 'ctx, G: CodeGenerator>(
ctx.module.add_function(SYMBOL, fn_t, None) ctx.module.add_function(SYMBOL, fn_t, None)
}); });
let i = generator.gen_expr(ctx, i)?.unwrap().to_basic_value_enum(ctx, generator, i.custom.unwrap())?; let i = generator.gen_expr(ctx, i)?.unwrap().to_basic_value_enum(
ctx,
generator,
i.custom.unwrap(),
)?;
Ok(ctx Ok(ctx
.builder .builder
.build_call(func, &[i.into(), length.into()], "bounded_ind") .build_call(func, &[i.into(), length.into()], "bounded_ind")
@ -329,27 +333,19 @@ pub fn list_slice_assignment<'ctx, 'a>(
// index in bound and positive should be done // index in bound and positive should be done
// assert if dest.step == 1 then len(src) <= len(dest) else len(src) == len(dest), and // assert if dest.step == 1 then len(src) <= len(dest) else len(src) == len(dest), and
// throw exception if not satisfied // throw exception if not satisfied
let src_end = ctx.builder let src_end = ctx
.builder
.build_select( .build_select(
ctx.builder.build_int_compare( ctx.builder.build_int_compare(inkwell::IntPredicate::SLT, src_idx.2, zero, "is_neg"),
inkwell::IntPredicate::SLT,
src_idx.2,
zero,
"is_neg",
),
ctx.builder.build_int_sub(src_idx.1, one, "e_min_one"), ctx.builder.build_int_sub(src_idx.1, one, "e_min_one"),
ctx.builder.build_int_add(src_idx.1, one, "e_add_one"), ctx.builder.build_int_add(src_idx.1, one, "e_add_one"),
"final_e", "final_e",
) )
.into_int_value(); .into_int_value();
let dest_end = ctx.builder let dest_end = ctx
.builder
.build_select( .build_select(
ctx.builder.build_int_compare( ctx.builder.build_int_compare(inkwell::IntPredicate::SLT, dest_idx.2, zero, "is_neg"),
inkwell::IntPredicate::SLT,
dest_idx.2,
zero,
"is_neg",
),
ctx.builder.build_int_sub(dest_idx.1, one, "e_min_one"), ctx.builder.build_int_sub(dest_idx.1, one, "e_min_one"),
ctx.builder.build_int_add(dest_idx.1, one, "e_add_one"), ctx.builder.build_int_add(dest_idx.1, one, "e_add_one"),
"final_e", "final_e",

View File

@ -8,8 +8,6 @@ use crate::{
}; };
use crossbeam::channel::{unbounded, Receiver, Sender}; use crossbeam::channel::{unbounded, Receiver, Sender};
use inkwell::{ use inkwell::{
AddressSpace,
OptimizationLevel,
attributes::{Attribute, AttributeLoc}, attributes::{Attribute, AttributeLoc},
basic_block::BasicBlock, basic_block::BasicBlock,
builder::Builder, builder::Builder,
@ -17,10 +15,11 @@ use inkwell::{
module::Module, module::Module,
passes::{PassManager, PassManagerBuilder}, passes::{PassManager, PassManagerBuilder},
types::{AnyType, BasicType, BasicTypeEnum}, types::{AnyType, BasicType, BasicTypeEnum},
values::{BasicValueEnum, FunctionValue, PhiValue, PointerValue} values::{BasicValueEnum, FunctionValue, PhiValue, PointerValue},
AddressSpace, OptimizationLevel,
}; };
use itertools::Itertools; use itertools::Itertools;
use nac3parser::ast::{Stmt, StrRef, Location}; use nac3parser::ast::{Location, Stmt, StrRef};
use parking_lot::{Condvar, Mutex}; use parking_lot::{Condvar, Mutex};
use std::collections::{HashMap, HashSet}; use std::collections::{HashMap, HashSet};
use std::sync::{ use std::sync::{
@ -300,7 +299,10 @@ fn get_llvm_type<'ctx>(
&*definition.read() &*definition.read()
{ {
let struct_type = ctx.opaque_struct_type(&name.to_string()); let struct_type = ctx.opaque_struct_type(&name.to_string());
type_cache.insert(unifier.get_representative(ty), struct_type.ptr_type(AddressSpace::Generic).into()); type_cache.insert(
unifier.get_representative(ty),
struct_type.ptr_type(AddressSpace::Generic).into(),
);
let fields = fields_list let fields = fields_list
.iter() .iter()
.map(|f| { .map(|f| {
@ -326,7 +328,11 @@ fn get_llvm_type<'ctx>(
// a struct with fields in the order present in the tuple // a struct with fields in the order present in the tuple
let fields = ty let fields = ty
.iter() .iter()
.map(|ty| get_llvm_type(ctx, generator, unifier, top_level, type_cache, primitives, *ty)) .map(|ty| {
get_llvm_type(
ctx, generator, unifier, top_level, type_cache, primitives, *ty,
)
})
.collect_vec(); .collect_vec();
ctx.struct_type(&fields, false).into() ctx.struct_type(&fields, false).into()
} }
@ -349,26 +355,35 @@ fn get_llvm_type<'ctx>(
} }
fn need_sret<'ctx>(ctx: &'ctx Context, ty: BasicTypeEnum<'ctx>) -> bool { fn need_sret<'ctx>(ctx: &'ctx Context, ty: BasicTypeEnum<'ctx>) -> bool {
fn need_sret_impl<'ctx>(ctx: &'ctx Context, ty: BasicTypeEnum<'ctx>, maybe_large: bool) -> bool { fn need_sret_impl<'ctx>(
ctx: &'ctx Context,
ty: BasicTypeEnum<'ctx>,
maybe_large: bool,
) -> bool {
match ty { match ty {
BasicTypeEnum::IntType(_) | BasicTypeEnum::PointerType(_) => false, BasicTypeEnum::IntType(_) | BasicTypeEnum::PointerType(_) => false,
BasicTypeEnum::FloatType(_) if maybe_large => false, BasicTypeEnum::FloatType(_) if maybe_large => false,
BasicTypeEnum::StructType(ty) if maybe_large && ty.count_fields() <= 2 => BasicTypeEnum::StructType(ty) if maybe_large && ty.count_fields() <= 2 => {
ty.get_field_types().iter().any(|ty| need_sret_impl(ctx, *ty, false)), ty.get_field_types().iter().any(|ty| need_sret_impl(ctx, *ty, false))
}
_ => true, _ => true,
} }
} }
need_sret_impl(ctx, ty, true) need_sret_impl(ctx, ty, true)
} }
pub fn gen_func_impl<'ctx, G: CodeGenerator, F: FnOnce(&mut G, &mut CodeGenContext) -> Result<(), String>> ( pub fn gen_func_impl<
'ctx,
G: CodeGenerator,
F: FnOnce(&mut G, &mut CodeGenContext) -> Result<(), String>,
>(
context: &'ctx Context, context: &'ctx Context,
generator: &mut G, generator: &mut G,
registry: &WorkerRegistry, registry: &WorkerRegistry,
builder: Builder<'ctx>, builder: Builder<'ctx>,
module: Module<'ctx>, module: Module<'ctx>,
task: CodeGenTask, task: CodeGenTask,
codegen_function: F codegen_function: F,
) -> Result<(Builder<'ctx>, Module<'ctx>, FunctionValue<'ctx>), (Builder<'ctx>, String)> { ) -> Result<(Builder<'ctx>, Module<'ctx>, FunctionValue<'ctx>), (Builder<'ctx>, String)> {
let top_level_ctx = registry.top_level_ctx.clone(); let top_level_ctx = registry.top_level_ctx.clone();
let static_value_store = registry.static_value_store.clone(); let static_value_store = registry.static_value_store.clone();
@ -463,7 +478,15 @@ pub fn gen_func_impl<'ctx, G: CodeGenerator, F: FnOnce(&mut G, &mut CodeGenConte
let ret_type = if unifier.unioned(ret, primitives.none) { let ret_type = if unifier.unioned(ret, primitives.none) {
None None
} else { } else {
Some(get_llvm_type(context, generator, &mut unifier, top_level_ctx.as_ref(), &mut type_cache, &primitives, ret)) Some(get_llvm_type(
context,
generator,
&mut unifier,
top_level_ctx.as_ref(),
&mut type_cache,
&primitives,
ret,
))
}; };
let has_sret = ret_type.map_or(false, |ty| need_sret(context, ty)); let has_sret = ret_type.map_or(false, |ty| need_sret(context, ty));
@ -489,7 +512,7 @@ pub fn gen_func_impl<'ctx, G: CodeGenerator, F: FnOnce(&mut G, &mut CodeGenConte
let fn_type = match ret_type { let fn_type = match ret_type {
Some(ret_type) if !has_sret => ret_type.fn_type(&params, false), Some(ret_type) if !has_sret => ret_type.fn_type(&params, false),
_ => context.void_type().fn_type(&params, false) _ => context.void_type().fn_type(&params, false),
}; };
let symbol = &task.symbol_name; let symbol = &task.symbol_name;
@ -504,9 +527,13 @@ pub fn gen_func_impl<'ctx, G: CodeGenerator, F: FnOnce(&mut G, &mut CodeGenConte
fn_val.set_personality_function(personality); fn_val.set_personality_function(personality);
} }
if has_sret { if has_sret {
fn_val.add_attribute(AttributeLoc::Param(0), fn_val.add_attribute(
context.create_type_attribute(Attribute::get_named_enum_kind_id("sret"), AttributeLoc::Param(0),
ret_type.unwrap().as_any_type_enum())); context.create_type_attribute(
Attribute::get_named_enum_kind_id("sret"),
ret_type.unwrap().as_any_type_enum(),
),
);
} }
let init_bb = context.append_basic_block(fn_val, "init"); let init_bb = context.append_basic_block(fn_val, "init");

View File

@ -87,19 +87,15 @@ pub fn gen_store_target<'ctx, 'a, G: CodeGenerator>(
.unwrap() .unwrap()
.to_basic_value_enum(ctx, generator, value.custom.unwrap())? .to_basic_value_enum(ctx, generator, value.custom.unwrap())?
.into_pointer_value(); .into_pointer_value();
let len = ctx let len =
.build_gep_and_load(v, &[zero, i32_type.const_int(1, false)]) ctx.build_gep_and_load(v, &[zero, i32_type.const_int(1, false)]).into_int_value();
.into_int_value();
let raw_index = generator let raw_index = generator
.gen_expr(ctx, slice)? .gen_expr(ctx, slice)?
.unwrap() .unwrap()
.to_basic_value_enum(ctx, generator, slice.custom.unwrap())? .to_basic_value_enum(ctx, generator, slice.custom.unwrap())?
.into_int_value(); .into_int_value();
let raw_index = ctx.builder.build_int_s_extend( let raw_index =
raw_index, ctx.builder.build_int_s_extend(raw_index, generator.get_size_type(ctx.ctx), "sext");
generator.get_size_type(ctx.ctx),
"sext",
);
// handle negative index // handle negative index
let is_negative = ctx.builder.build_int_compare( let is_negative = ctx.builder.build_int_compare(
inkwell::IntPredicate::SLT, inkwell::IntPredicate::SLT,
@ -114,12 +110,8 @@ pub fn gen_store_target<'ctx, 'a, G: CodeGenerator>(
.into_int_value(); .into_int_value();
// unsigned less than is enough, because negative index after adjustment is // unsigned less than is enough, because negative index after adjustment is
// bigger than the length (for unsigned cmp) // bigger than the length (for unsigned cmp)
let bound_check = ctx.builder.build_int_compare( let bound_check =
inkwell::IntPredicate::ULT, ctx.builder.build_int_compare(inkwell::IntPredicate::ULT, index, len, "inbound");
index,
len,
"inbound",
);
ctx.make_assert( ctx.make_assert(
generator, generator,
bound_check, bound_check,
@ -494,7 +486,7 @@ pub fn get_builtins<'ctx, 'a>(
if symbol == "__nac3_raise" || symbol == "__nac3_resume" { if symbol == "__nac3_raise" || symbol == "__nac3_resume" {
fun.add_attribute( fun.add_attribute(
AttributeLoc::Function, AttributeLoc::Function,
ctx.ctx.create_enum_attribute(Attribute::get_named_enum_kind_id("noreturn"), 1), ctx.ctx.create_enum_attribute(Attribute::get_named_enum_kind_id("noreturn"), 0),
); );
} }
fun fun
@ -696,12 +688,15 @@ pub fn gen_try<'ctx, 'a, G: CodeGenerator>(
&mut ctx.unifier, &mut ctx.unifier,
type_.custom.unwrap(), type_.custom.unwrap(),
); );
let obj_id = if let TypeEnum::TObj { obj_id, .. } = &*ctx.unifier.get_ty(type_.custom.unwrap()) { let obj_id = if let TypeEnum::TObj { obj_id, .. } =
&*ctx.unifier.get_ty(type_.custom.unwrap())
{
*obj_id *obj_id
} else { } else {
unreachable!() unreachable!()
}; };
let exception_name = format!("{}:{}", ctx.resolver.get_exception_id(obj_id.0), exn_name); let exception_name =
format!("{}:{}", ctx.resolver.get_exception_id(obj_id.0), exn_name);
let exn_id = ctx.resolver.get_string_id(&exception_name); let exn_id = ctx.resolver.get_string_id(&exception_name);
let exn_id_global = let exn_id_global =
ctx.module.add_global(ctx.ctx.i32_type(), None, &format!("exn.{}", exn_id)); ctx.module.add_global(ctx.ctx.i32_type(), None, &format!("exn.{}", exn_id));
@ -751,7 +746,9 @@ pub fn gen_try<'ctx, 'a, G: CodeGenerator>(
let mut final_proxy_lambda = let mut final_proxy_lambda =
|ctx: &mut CodeGenContext<'ctx, 'a>, |ctx: &mut CodeGenContext<'ctx, 'a>,
target: BasicBlock<'ctx>, target: BasicBlock<'ctx>,
block: BasicBlock<'ctx>| final_proxy(ctx, target, block, final_data.as_mut().unwrap()); block: BasicBlock<'ctx>| {
final_proxy(ctx, target, block, final_data.as_mut().unwrap())
};
let mut redirect_lambda = |ctx: &mut CodeGenContext<'ctx, 'a>, let mut redirect_lambda = |ctx: &mut CodeGenContext<'ctx, 'a>,
target: BasicBlock<'ctx>, target: BasicBlock<'ctx>,
block: BasicBlock<'ctx>| { block: BasicBlock<'ctx>| {
@ -1020,8 +1017,8 @@ pub fn gen_stmt<'ctx, 'a, G: CodeGenerator>(
StmtKind::For { .. } => generator.gen_for(ctx, stmt)?, StmtKind::For { .. } => generator.gen_for(ctx, stmt)?,
StmtKind::With { .. } => generator.gen_with(ctx, stmt)?, StmtKind::With { .. } => generator.gen_with(ctx, stmt)?,
StmtKind::AugAssign { target, op, value, .. } => { StmtKind::AugAssign { target, op, value, .. } => {
let value = gen_binop_expr(generator, ctx, target, op, value)?; let value = gen_binop_expr(generator, ctx, target, op, value, stmt.location, true)?;
generator.gen_assign(ctx, target, value)?; generator.gen_assign(ctx, target, value.unwrap())?;
} }
StmtKind::Try { .. } => gen_try(generator, ctx, stmt)?, StmtKind::Try { .. } => gen_try(generator, ctx, stmt)?,
StmtKind::Raise { exc, .. } => { StmtKind::Raise { exc, .. } => {
@ -1059,7 +1056,7 @@ pub fn gen_stmt<'ctx, 'a, G: CodeGenerator>(
stmt.location, stmt.location,
) )
} }
_ => unimplemented!() _ => unimplemented!(),
}; };
Ok(()) Ok(())
} }

View File

@ -160,7 +160,7 @@ pub trait SymbolResolver {
&self, &self,
_unifier: &mut Unifier, _unifier: &mut Unifier,
_top_level_defs: &[Arc<RwLock<TopLevelDef>>], _top_level_defs: &[Arc<RwLock<TopLevelDef>>],
_primitives: &PrimitiveStore _primitives: &PrimitiveStore,
) -> Result<(), String> { ) -> Result<(), String> {
Ok(()) Ok(())
} }

View File

@ -14,8 +14,8 @@ pub fn get_exn_constructor(
class_id: usize, class_id: usize,
cons_id: usize, cons_id: usize,
unifier: &mut Unifier, unifier: &mut Unifier,
primitives: &PrimitiveStore primitives: &PrimitiveStore,
)-> (TopLevelDef, TopLevelDef, Type, Type) { ) -> (TopLevelDef, TopLevelDef, Type, Type) {
let int32 = primitives.int32; let int32 = primitives.int32;
let int64 = primitives.int64; let int64 = primitives.int64;
let string = primitives.str; let string = primitives.str;
@ -225,11 +225,8 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
codegen_callback: Some(Arc::new(GenCall::new(Box::new( codegen_callback: Some(Arc::new(GenCall::new(Box::new(
|ctx, obj, _, _, generator| { |ctx, obj, _, _, generator| {
let expect_ty = obj.clone().unwrap().0; let expect_ty = obj.clone().unwrap().0;
let obj_val = obj.unwrap().1.clone().to_basic_value_enum( let obj_val =
ctx, obj.unwrap().1.clone().to_basic_value_enum(ctx, generator, expect_ty)?;
generator,
expect_ty,
)?;
if let BasicValueEnum::PointerValue(ptr) = obj_val { if let BasicValueEnum::PointerValue(ptr) = obj_val {
Ok(Some(ctx.builder.build_is_not_null(ptr, "is_some").into())) Ok(Some(ctx.builder.build_is_not_null(ptr, "is_some").into()))
} else { } else {
@ -250,11 +247,8 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
codegen_callback: Some(Arc::new(GenCall::new(Box::new( codegen_callback: Some(Arc::new(GenCall::new(Box::new(
|ctx, obj, _, _, generator| { |ctx, obj, _, _, generator| {
let expect_ty = obj.clone().unwrap().0; let expect_ty = obj.clone().unwrap().0;
let obj_val = obj.unwrap().1.clone().to_basic_value_enum( let obj_val =
ctx, obj.unwrap().1.clone().to_basic_value_enum(ctx, generator, expect_ty)?;
generator,
expect_ty,
)?;
if let BasicValueEnum::PointerValue(ptr) = obj_val { if let BasicValueEnum::PointerValue(ptr) = obj_val {
Ok(Some(ctx.builder.build_is_null(ptr, "is_none").into())) Ok(Some(ctx.builder.build_is_null(ptr, "is_none").into()))
} else { } else {
@ -272,11 +266,9 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
instance_to_symbol: Default::default(), instance_to_symbol: Default::default(),
instance_to_stmt: Default::default(), instance_to_stmt: Default::default(),
resolver: None, resolver: None,
codegen_callback: Some(Arc::new(GenCall::new(Box::new( codegen_callback: Some(Arc::new(GenCall::new(Box::new(|_, _, _, _, _| {
|_, _, _, _, _| {
unreachable!("handled in gen_expr") unreachable!("handled in gen_expr")
}, })))),
)))),
loc: None, loc: None,
})), })),
Arc::new(RwLock::new(TopLevelDef::Function { Arc::new(RwLock::new(TopLevelDef::Function {
@ -567,7 +559,11 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
resolver: None, resolver: None,
codegen_callback: Some(Arc::new(GenCall::new(Box::new( codegen_callback: Some(Arc::new(GenCall::new(Box::new(
|ctx, _, _, args, generator| { |ctx, _, _, args, generator| {
let arg = args[0].1.clone().to_basic_value_enum(ctx, generator, ctx.primitives.float)?; let arg = args[0].1.clone().to_basic_value_enum(
ctx,
generator,
ctx.primitives.float,
)?;
let round_intrinsic = let round_intrinsic =
ctx.module.get_function("llvm.round.f64").unwrap_or_else(|| { ctx.module.get_function("llvm.round.f64").unwrap_or_else(|| {
let float = ctx.ctx.f64_type(); let float = ctx.ctx.f64_type();
@ -607,7 +603,11 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
resolver: None, resolver: None,
codegen_callback: Some(Arc::new(GenCall::new(Box::new( codegen_callback: Some(Arc::new(GenCall::new(Box::new(
|ctx, _, _, args, generator| { |ctx, _, _, args, generator| {
let arg = args[0].1.clone().to_basic_value_enum(ctx, generator, ctx.primitives.float)?; let arg = args[0].1.clone().to_basic_value_enum(
ctx,
generator,
ctx.primitives.float,
)?;
let round_intrinsic = let round_intrinsic =
ctx.module.get_function("llvm.round.f64").unwrap_or_else(|| { ctx.module.get_function("llvm.round.f64").unwrap_or_else(|| {
let float = ctx.ctx.f64_type(); let float = ctx.ctx.f64_type();
@ -668,13 +668,15 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
let ty_i32 = ctx.primitives.int32; let ty_i32 = ctx.primitives.int32;
for (i, arg) in args.iter().enumerate() { for (i, arg) in args.iter().enumerate() {
if arg.0 == Some("start".into()) { if arg.0 == Some("start".into()) {
start = Some(arg.1.clone().to_basic_value_enum(ctx, generator, ty_i32)?); start =
Some(arg.1.clone().to_basic_value_enum(ctx, generator, ty_i32)?);
} else if arg.0 == Some("stop".into()) { } else if arg.0 == Some("stop".into()) {
stop = Some(arg.1.clone().to_basic_value_enum(ctx, generator, ty_i32)?); stop = Some(arg.1.clone().to_basic_value_enum(ctx, generator, ty_i32)?);
} else if arg.0 == Some("step".into()) { } else if arg.0 == Some("step".into()) {
step = Some(arg.1.clone().to_basic_value_enum(ctx, generator, ty_i32)?); step = Some(arg.1.clone().to_basic_value_enum(ctx, generator, ty_i32)?);
} else if i == 0 { } else if i == 0 {
start = Some(arg.1.clone().to_basic_value_enum(ctx, generator, ty_i32)?); start =
Some(arg.1.clone().to_basic_value_enum(ctx, generator, ty_i32)?);
} else if i == 1 { } else if i == 1 {
stop = Some(arg.1.clone().to_basic_value_enum(ctx, generator, ty_i32)?); stop = Some(arg.1.clone().to_basic_value_enum(ctx, generator, ty_i32)?);
} else if i == 2 { } else if i == 2 {
@ -829,7 +831,11 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
resolver: None, resolver: None,
codegen_callback: Some(Arc::new(GenCall::new(Box::new( codegen_callback: Some(Arc::new(GenCall::new(Box::new(
|ctx, _, _, args, generator| { |ctx, _, _, args, generator| {
let arg = args[0].1.clone().to_basic_value_enum(ctx, generator, ctx.primitives.float)?; let arg = args[0].1.clone().to_basic_value_enum(
ctx,
generator,
ctx.primitives.float,
)?;
let floor_intrinsic = let floor_intrinsic =
ctx.module.get_function("llvm.floor.f64").unwrap_or_else(|| { ctx.module.get_function("llvm.floor.f64").unwrap_or_else(|| {
let float = ctx.ctx.f64_type(); let float = ctx.ctx.f64_type();
@ -869,7 +875,11 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
resolver: None, resolver: None,
codegen_callback: Some(Arc::new(GenCall::new(Box::new( codegen_callback: Some(Arc::new(GenCall::new(Box::new(
|ctx, _, _, args, generator| { |ctx, _, _, args, generator| {
let arg = args[0].1.clone().to_basic_value_enum(ctx, generator, ctx.primitives.float)?; let arg = args[0].1.clone().to_basic_value_enum(
ctx,
generator,
ctx.primitives.float,
)?;
let floor_intrinsic = let floor_intrinsic =
ctx.module.get_function("llvm.floor.f64").unwrap_or_else(|| { ctx.module.get_function("llvm.floor.f64").unwrap_or_else(|| {
let float = ctx.ctx.f64_type(); let float = ctx.ctx.f64_type();
@ -909,7 +919,11 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
resolver: None, resolver: None,
codegen_callback: Some(Arc::new(GenCall::new(Box::new( codegen_callback: Some(Arc::new(GenCall::new(Box::new(
|ctx, _, _, args, generator| { |ctx, _, _, args, generator| {
let arg = args[0].1.clone().to_basic_value_enum(ctx, generator, ctx.primitives.float)?; let arg = args[0].1.clone().to_basic_value_enum(
ctx,
generator,
ctx.primitives.float,
)?;
let ceil_intrinsic = let ceil_intrinsic =
ctx.module.get_function("llvm.ceil.f64").unwrap_or_else(|| { ctx.module.get_function("llvm.ceil.f64").unwrap_or_else(|| {
let float = ctx.ctx.f64_type(); let float = ctx.ctx.f64_type();
@ -949,7 +963,11 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
resolver: None, resolver: None,
codegen_callback: Some(Arc::new(GenCall::new(Box::new( codegen_callback: Some(Arc::new(GenCall::new(Box::new(
|ctx, _, _, args, generator| { |ctx, _, _, args, generator| {
let arg = args[0].1.clone().to_basic_value_enum(ctx, generator, ctx.primitives.float)?; let arg = args[0].1.clone().to_basic_value_enum(
ctx,
generator,
ctx.primitives.float,
)?;
let ceil_intrinsic = let ceil_intrinsic =
ctx.module.get_function("llvm.ceil.f64").unwrap_or_else(|| { ctx.module.get_function("llvm.ceil.f64").unwrap_or_else(|| {
let float = ctx.ctx.f64_type(); let float = ctx.ctx.f64_type();
@ -1005,7 +1023,10 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
Ok(if ctx.unifier.unioned(arg_ty, range_ty) { Ok(if ctx.unifier.unioned(arg_ty, range_ty) {
let arg = arg.into_pointer_value(); let arg = arg.into_pointer_value();
let (start, end, step) = destructure_range(ctx, arg); let (start, end, step) = destructure_range(ctx, arg);
Some(calculate_len_for_slice_range(generator, ctx, start, end, step).into()) Some(
calculate_len_for_slice_range(generator, ctx, start, end, step)
.into(),
)
} else { } else {
let int32 = ctx.ctx.i32_type(); let int32 = ctx.ctx.i32_type();
let zero = int32.const_zero(); let zero = int32.const_zero();
@ -1244,30 +1265,12 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
})), })),
]; ];
let ast_list: Vec<Option<ast::Stmt<()>>> = let ast_list: Vec<Option<ast::Stmt<()>>> = vec![None; top_level_def_list.len()];
(0..top_level_def_list.len()).map(|_| None).collect();
( (
izip!(top_level_def_list, ast_list).collect_vec(), izip!(top_level_def_list, ast_list).collect_vec(),
&[ &[
"int32", "int32", "int64", "uint32", "uint64", "float", "round", "round64", "range", "str",
"int64", "bool", "floor", "floor64", "ceil", "ceil64", "len", "min", "max", "abs", "Some",
"uint32",
"uint64",
"float",
"round",
"round64",
"range",
"str",
"bool",
"floor",
"floor64",
"ceil",
"ceil64",
"len",
"min",
"max",
"abs",
"Some",
], ],
) )
} }

View File

@ -91,8 +91,7 @@ impl TopLevelComposer {
assert!(name == *simple_name); assert!(name == *simple_name);
builtin_ty.insert(name, *signature); builtin_ty.insert(name, *signature);
builtin_id.insert(name, DefinitionId(id)); builtin_id.insert(name, DefinitionId(id));
} else if let TopLevelDef::Class { name, constructor, object_id, .. } = &*def } else if let TopLevelDef::Class { name, constructor, object_id, .. } = &*def {
{
assert!(id == object_id.0); assert!(id == object_id.0);
if let Some(constructor) = constructor { if let Some(constructor) = constructor {
builtin_ty.insert(*name, *constructor); builtin_ty.insert(*name, *constructor);
@ -739,8 +738,13 @@ impl TopLevelComposer {
let mut subst_list = Some(Vec::new()); let mut subst_list = Some(Vec::new());
// unification of previously assigned typevar // unification of previously assigned typevar
let mut unification_helper = |ty, def| { let mut unification_helper = |ty, def| {
let target_ty = let target_ty = get_type_from_type_annotation_kinds(
get_type_from_type_annotation_kinds(&temp_def_list, unifier, primitives, &def, &mut subst_list)?; &temp_def_list,
unifier,
primitives,
&def,
&mut subst_list,
)?;
unifier.unify(ty, target_ty).map_err(|e| e.to_display(unifier).to_string())?; unifier.unify(ty, target_ty).map_err(|e| e.to_display(unifier).to_string())?;
Ok(()) as Result<(), String> Ok(()) as Result<(), String>
}; };
@ -780,7 +784,9 @@ impl TopLevelComposer {
match &*def.read() { match &*def.read() {
TopLevelDef::Class { resolver: Some(resolver), .. } TopLevelDef::Class { resolver: Some(resolver), .. }
| TopLevelDef::Function { resolver: Some(resolver), .. } => { | TopLevelDef::Function { resolver: Some(resolver), .. } => {
if let Err(e) = resolver.handle_deferred_eval(unifier, &temp_def_list, primitives) { if let Err(e) =
resolver.handle_deferred_eval(unifier, &temp_def_list, primitives)
{
errors.insert(e); errors.insert(e);
} }
} }
@ -904,7 +910,7 @@ impl TopLevelComposer {
unifier, unifier,
primitives_store, primitives_store,
&type_annotation, &type_annotation,
&mut None &mut None,
)?; )?;
Ok(FuncArg { Ok(FuncArg {
@ -972,7 +978,7 @@ impl TopLevelComposer {
unifier, unifier,
primitives_store, primitives_store,
&return_ty_annotation, &return_ty_annotation,
&mut None &mut None,
)? )?
} else { } else {
primitives_store.none primitives_store.none
@ -1334,7 +1340,7 @@ impl TopLevelComposer {
)); ));
} }
} }
ast::StmtKind::Assign { .. } => {}, // we don't class attributes ast::StmtKind::Assign { .. } => {} // we don't class attributes
ast::StmtKind::Pass { .. } => {} ast::StmtKind::Pass { .. } => {}
ast::StmtKind::Expr { value: _, .. } => {} // typically a docstring; ignoring all expressions matches CPython behavior ast::StmtKind::Expr { value: _, .. } => {} // typically a docstring; ignoring all expressions matches CPython behavior
_ => { _ => {
@ -1483,24 +1489,30 @@ impl TopLevelComposer {
// first, fix function typevar ids // first, fix function typevar ids
// they may be changed with our use of placeholders // they may be changed with our use of placeholders
for (def, _) in definition_ast_list.iter().skip(self.builtin_num) { for (def, _) in definition_ast_list.iter().skip(self.builtin_num) {
if let TopLevelDef::Function { if let TopLevelDef::Function { signature, var_id, .. } = &mut *def.write() {
signature,
var_id,
..
} = &mut *def.write() {
if let TypeEnum::TFunc(FunSignature { args, ret, vars }) = if let TypeEnum::TFunc(FunSignature { args, ret, vars }) =
unifier.get_ty(*signature).as_ref() { unifier.get_ty(*signature).as_ref()
let new_var_ids = vars.values().map(|v| match &*unifier.get_ty(*v) { {
TypeEnum::TVar{id, ..} => *id, let new_var_ids = vars
.values()
.map(|v| match &*unifier.get_ty(*v) {
TypeEnum::TVar { id, .. } => *id,
_ => unreachable!(), _ => unreachable!(),
}).collect_vec(); })
.collect_vec();
if new_var_ids != *var_id { if new_var_ids != *var_id {
let new_signature = FunSignature { let new_signature = FunSignature {
args: args.clone(), args: args.clone(),
ret: *ret, ret: *ret,
vars: new_var_ids.iter().zip(vars.values()).map(|(id, v)| (*id, *v)).collect(), vars: new_var_ids
.iter()
.zip(vars.values())
.map(|(id, v)| (*id, *v))
.collect(),
}; };
unifier.unification_table.set_value(*signature, Rc::new(TypeEnum::TFunc(new_signature))); unifier
.unification_table
.set_value(*signature, Rc::new(TypeEnum::TFunc(new_signature)));
*var_id = new_var_ids; *var_id = new_var_ids;
} }
} }
@ -1527,7 +1539,7 @@ impl TopLevelComposer {
unifier, unifier,
primitives_ty, primitives_ty,
&make_self_type_annotation(type_vars, *object_id), &make_self_type_annotation(type_vars, *object_id),
&mut None &mut None,
)?; )?;
if ancestors if ancestors
.iter() .iter()
@ -1696,13 +1708,14 @@ impl TopLevelComposer {
unifier, unifier,
primitives_ty, primitives_ty,
&ty_ann, &ty_ann,
&mut None &mut None,
)?; )?;
vars.extend(type_vars.iter().map(|ty| vars.extend(type_vars.iter().map(|ty| {
if let TypeEnum::TVar { id, .. } = &*unifier.get_ty(*ty) { if let TypeEnum::TVar { id, .. } = &*unifier.get_ty(*ty) {
(*id, *ty) (*id, *ty)
} else { } else {
unreachable!() unreachable!()
}
})); }));
Some((self_ty, type_vars.clone())) Some((self_ty, type_vars.clone()))
} else { } else {
@ -1918,7 +1931,12 @@ impl TopLevelComposer {
} }
instance_to_stmt.insert( instance_to_stmt.insert(
get_subst_key(unifier, self_type, &subst, Some(&vars.keys().cloned().collect())), get_subst_key(
unifier,
self_type,
&subst,
Some(&vars.keys().cloned().collect()),
),
FunInstance { FunInstance {
body: Arc::new(fun_body), body: Arc::new(fun_body),
unifier_id: 0, unifier_id: 0,

View File

@ -521,65 +521,66 @@ pub fn parse_parameter_default_value(
} }
match &default.node { match &default.node {
ast::ExprKind::Constant { value, .. } => handle_constant(value, &default.location), ast::ExprKind::Constant { value, .. } => handle_constant(value, &default.location),
ast::ExprKind::Call { func, args, .. } if args.len() == 1 => { ast::ExprKind::Call { func, args, .. } if args.len() == 1 => match &func.node {
match &func.node {
ast::ExprKind::Name { id, .. } if *id == "int64".into() => match &args[0].node { ast::ExprKind::Name { id, .. } if *id == "int64".into() => match &args[0].node {
ast::ExprKind::Constant { value: Constant::Int(v), .. } => { ast::ExprKind::Constant { value: Constant::Int(v), .. } => {
let v: Result<i64, _> = (*v).try_into(); let v: Result<i64, _> = (*v).try_into();
match v { match v {
Ok(v) => Ok(SymbolValue::I64(v)), Ok(v) => Ok(SymbolValue::I64(v)),
_ => Err(format!("default param value out of range at {}", default.location)), _ => {
Err(format!("default param value out of range at {}", default.location))
} }
} }
_ => Err(format!("only allow constant integer here at {}", default.location))
} }
_ => Err(format!("only allow constant integer here at {}", default.location)),
},
ast::ExprKind::Name { id, .. } if *id == "uint32".into() => match &args[0].node { ast::ExprKind::Name { id, .. } if *id == "uint32".into() => match &args[0].node {
ast::ExprKind::Constant { value: Constant::Int(v), .. } => { ast::ExprKind::Constant { value: Constant::Int(v), .. } => {
let v: Result<u32, _> = (*v).try_into(); let v: Result<u32, _> = (*v).try_into();
match v { match v {
Ok(v) => Ok(SymbolValue::U32(v)), Ok(v) => Ok(SymbolValue::U32(v)),
_ => Err(format!("default param value out of range at {}", default.location)), _ => {
Err(format!("default param value out of range at {}", default.location))
} }
} }
_ => Err(format!("only allow constant integer here at {}", default.location))
} }
_ => Err(format!("only allow constant integer here at {}", default.location)),
},
ast::ExprKind::Name { id, .. } if *id == "uint64".into() => match &args[0].node { ast::ExprKind::Name { id, .. } if *id == "uint64".into() => match &args[0].node {
ast::ExprKind::Constant { value: Constant::Int(v), .. } => { ast::ExprKind::Constant { value: Constant::Int(v), .. } => {
let v: Result<u64, _> = (*v).try_into(); let v: Result<u64, _> = (*v).try_into();
match v { match v {
Ok(v) => Ok(SymbolValue::U64(v)), Ok(v) => Ok(SymbolValue::U64(v)),
_ => Err(format!("default param value out of range at {}", default.location)), _ => {
Err(format!("default param value out of range at {}", default.location))
} }
} }
_ => Err(format!("only allow constant integer here at {}", default.location))
} }
ast::ExprKind::Name { id, .. } if *id == "Some".into() => Ok( _ => Err(format!("only allow constant integer here at {}", default.location)),
SymbolValue::OptionSome( },
Box::new(parse_parameter_default_value(&args[0], resolver)?) ast::ExprKind::Name { id, .. } if *id == "Some".into() => Ok(SymbolValue::OptionSome(
) Box::new(parse_parameter_default_value(&args[0], resolver)?),
), )),
_ => Err(format!("unsupported default parameter at {}", default.location)), _ => Err(format!("unsupported default parameter at {}", default.location)),
} },
} ast::ExprKind::Tuple { elts, .. } => Ok(SymbolValue::Tuple(
ast::ExprKind::Tuple { elts, .. } => Ok(SymbolValue::Tuple(elts elts.iter()
.iter()
.map(|x| parse_parameter_default_value(x, resolver)) .map(|x| parse_parameter_default_value(x, resolver))
.collect::<Result<Vec<_>, _>>()? .collect::<Result<Vec<_>, _>>()?,
)), )),
ast::ExprKind::Name { id, .. } if id == &"none".into() => Ok(SymbolValue::OptionNone), ast::ExprKind::Name { id, .. } if id == &"none".into() => Ok(SymbolValue::OptionNone),
ast::ExprKind::Name { id, .. } => { ast::ExprKind::Name { id, .. } => {
resolver.get_default_param_value(default).ok_or_else( resolver.get_default_param_value(default).ok_or_else(|| {
|| format!( format!(
"`{}` cannot be used as a default parameter at {} \ "`{}` cannot be used as a default parameter at {} \
(not primitive type, option or tuple / not defined?)", (not primitive type, option or tuple / not defined?)",
id, id, default.location
default.location
)
) )
})
} }
_ => Err(format!( _ => Err(format!(
"unsupported default parameter (not primitive type, option or tuple) at {}", "unsupported default parameter (not primitive type, option or tuple) at {}",
default.location default.location
)) )),
} }
} }

View File

@ -34,23 +34,23 @@ impl TypeAnnotation {
} }
None => format!("class_def_{}", id.0), None => format!("class_def_{}", id.0),
}; };
format!( format!("{}{}", class_name, {
"{}{}", let param_list =
class_name, params.iter().map(|p| p.stringify(unifier)).collect_vec().join(", ");
{
let param_list = params.iter().map(|p| p.stringify(unifier)).collect_vec().join(", ");
if param_list.is_empty() { if param_list.is_empty() {
"".into() "".into()
} else { } else {
format!("[{}]", param_list) format!("[{}]", param_list)
} }
} })
)
} }
Virtual(ty) => format!("virtual[{}]", ty.stringify(unifier)), Virtual(ty) => format!("virtual[{}]", ty.stringify(unifier)),
List(ty) => format!("list[{}]", ty.stringify(unifier)), List(ty) => format!("list[{}]", ty.stringify(unifier)),
Tuple(types) => { Tuple(types) => {
format!("tuple[{}]", types.iter().map(|p| p.stringify(unifier)).collect_vec().join(", ")) format!(
"tuple[{}]",
types.iter().map(|p| p.stringify(unifier)).collect_vec().join(", ")
)
} }
} }
} }
@ -302,7 +302,7 @@ pub fn get_type_from_type_annotation_kinds(
unifier: &mut Unifier, unifier: &mut Unifier,
primitives: &PrimitiveStore, primitives: &PrimitiveStore,
ann: &TypeAnnotation, ann: &TypeAnnotation,
subst_list: &mut Option<Vec<Type>> subst_list: &mut Option<Vec<Type>>,
) -> Result<Type, String> { ) -> Result<Type, String> {
match ann { match ann {
TypeAnnotation::CustomClass { id: obj_id, params } => { TypeAnnotation::CustomClass { id: obj_id, params } => {
@ -324,7 +324,7 @@ pub fn get_type_from_type_annotation_kinds(
unifier, unifier,
primitives, primitives,
x, x,
subst_list subst_list,
) )
}) })
.collect::<Result<Vec<_>, _>>()?; .collect::<Result<Vec<_>, _>>()?;
@ -402,7 +402,7 @@ pub fn get_type_from_type_annotation_kinds(
unifier, unifier,
primitives, primitives,
ty.as_ref(), ty.as_ref(),
subst_list subst_list,
)?; )?;
Ok(unifier.add_ty(TypeEnum::TVirtual { ty })) Ok(unifier.add_ty(TypeEnum::TVirtual { ty }))
} }
@ -412,7 +412,7 @@ pub fn get_type_from_type_annotation_kinds(
unifier, unifier,
primitives, primitives,
ty.as_ref(), ty.as_ref(),
subst_list subst_list,
)?; )?;
Ok(unifier.add_ty(TypeEnum::TList { ty })) Ok(unifier.add_ty(TypeEnum::TList { ty }))
} }
@ -420,7 +420,13 @@ pub fn get_type_from_type_annotation_kinds(
let tys = tys let tys = tys
.iter() .iter()
.map(|x| { .map(|x| {
get_type_from_type_annotation_kinds(top_level_defs, unifier, primitives, x, subst_list) get_type_from_type_annotation_kinds(
top_level_defs,
unifier,
primitives,
x,
subst_list,
)
}) })
.collect::<Result<Vec<_>, _>>()?; .collect::<Result<Vec<_>, _>>()?;
Ok(unifier.add_ty(TypeEnum::TTuple { ty: tys })) Ok(unifier.add_ty(TypeEnum::TTuple { ty: tys }))

View File

@ -20,8 +20,9 @@ impl<'a> Inferencer<'a> {
defined_identifiers: &mut HashSet<StrRef>, defined_identifiers: &mut HashSet<StrRef>,
) -> Result<(), String> { ) -> Result<(), String> {
match &pattern.node { match &pattern.node {
ast::ExprKind::Name { id, .. } if id == &"none".into() => ast::ExprKind::Name { id, .. } if id == &"none".into() => {
Err(format!("cannot assign to a `none` (at {})", pattern.location)), Err(format!("cannot assign to a `none` (at {})", pattern.location))
}
ExprKind::Name { id, .. } => { ExprKind::Name { id, .. } => {
if !defined_identifiers.contains(id) { if !defined_identifiers.contains(id) {
defined_identifiers.insert(*id); defined_identifiers.insert(*id);

View File

@ -83,7 +83,7 @@ where
pub fn impl_binop( pub fn impl_binop(
unifier: &mut Unifier, unifier: &mut Unifier,
store: &PrimitiveStore, _store: &PrimitiveStore,
ty: Type, ty: Type,
other_ty: &[Type], other_ty: &[Type],
ret_ty: Type, ret_ty: Type,
@ -120,7 +120,7 @@ pub fn impl_binop(
fields.insert(binop_assign_name(op).into(), { fields.insert(binop_assign_name(op).into(), {
( (
unifier.add_ty(TypeEnum::TFunc(FunSignature { unifier.add_ty(TypeEnum::TFunc(FunSignature {
ret: store.none, ret: ret_ty,
vars: function_vars.clone(), vars: function_vars.clone(),
args: vec![FuncArg { args: vec![FuncArg {
ty: other_ty, ty: other_ty,

View File

@ -423,14 +423,14 @@ impl<'a> fold::Fold<()> for Inferencer<'a> {
(None, None) => {} (None, None) => {}
}, },
ast::StmtKind::AugAssign { target, op, value, .. } => { ast::StmtKind::AugAssign { target, op, value, .. } => {
let res_ty = self.infer_bin_ops(stmt.location, target, op, value)?; let res_ty = self.infer_bin_ops(stmt.location, target, op, value, true)?;
self.unify(res_ty, target.custom.unwrap(), &stmt.location)?; self.unify(res_ty, target.custom.unwrap(), &stmt.location)?;
} }
ast::StmtKind::Assert { test, msg, .. } => { ast::StmtKind::Assert { test, msg, .. } => {
self.unify(test.custom.unwrap(), self.primitives.bool, &test.location)?; self.unify(test.custom.unwrap(), self.primitives.bool, &test.location)?;
match msg { match msg {
Some(m) => self.unify(m.custom.unwrap(), self.primitives.str, &m.location)?, Some(m) => self.unify(m.custom.unwrap(), self.primitives.str, &m.location)?,
None => () None => (),
} }
} }
_ => return report_error("Unsupported statement type", stmt.location), _ => return report_error("Unsupported statement type", stmt.location),
@ -464,9 +464,14 @@ impl<'a> fold::Fold<()> for Inferencer<'a> {
let var_map = params let var_map = params
.iter() .iter()
.map(|(id_var, ty)| { .map(|(id_var, ty)| {
if let TypeEnum::TVar { id, range, name, loc, .. } = &*self.unifier.get_ty(*ty) { if let TypeEnum::TVar { id, range, name, loc, .. } =
&*self.unifier.get_ty(*ty)
{
assert_eq!(*id, *id_var); assert_eq!(*id, *id_var);
(*id, self.unifier.get_fresh_var_with_range(range, *name, *loc).0) (
*id,
self.unifier.get_fresh_var_with_range(range, *name, *loc).0,
)
} else { } else {
unreachable!() unreachable!()
} }
@ -505,7 +510,7 @@ impl<'a> fold::Fold<()> for Inferencer<'a> {
} }
ast::ExprKind::BoolOp { values, .. } => Some(self.infer_bool_ops(values)?), ast::ExprKind::BoolOp { values, .. } => Some(self.infer_bool_ops(values)?),
ast::ExprKind::BinOp { left, op, right } => { ast::ExprKind::BinOp { left, op, right } => {
Some(self.infer_bin_ops(expr.location, left, op, right)?) Some(self.infer_bin_ops(expr.location, left, op, right, false)?)
} }
ast::ExprKind::UnaryOp { op, operand } => Some(self.infer_unary_ops(op, operand)?), ast::ExprKind::UnaryOp { op, operand } => Some(self.infer_unary_ops(op, operand)?),
ast::ExprKind::Compare { left, ops, comparators } => { ast::ExprKind::Compare { left, ops, comparators } => {
@ -822,7 +827,7 @@ impl<'a> Inferencer<'a> {
}, },
}); });
} else { } else {
return report_error("Integer out of bound", args[0].location) return report_error("Integer out of bound", args[0].location);
} }
} }
} }
@ -842,7 +847,7 @@ impl<'a> Inferencer<'a> {
}, },
}); });
} else { } else {
return report_error("Integer out of bound", args[0].location) return report_error("Integer out of bound", args[0].location);
} }
} }
} }
@ -862,10 +867,62 @@ impl<'a> Inferencer<'a> {
}, },
}); });
} else { } else {
return report_error("Integer out of bound", args[0].location) return report_error("Integer out of bound", args[0].location);
} }
} }
} }
// array() is a "magic" function call that determines the number of
// dimensions in the result from the nesting of the array argument type,
// to match the host Python NumPy API.
if id == "array".into() {
if args.is_empty() {
return report_error(
"`array()` expects at least one argument (contents in list form)",
func_location,
);
}
if args.len() > 2 || !keywords.is_empty() {
return report_error(
"Additional `array()` arguments not yet implemented",
func_location,
);
// TODO: Implement `dtype=` kwarg.
}
let list_arg = self.fold_expr(args.remove(0))?;
// TODO: Implement special case for emtpy arrays (e.g. `array([[]]))`)
// to match NumPy.
let mut num_dims = 0;
let mut elem_type = list_arg.custom.unwrap();
while let TypeEnum::TList { ty } = &*self.unifier.get_ty(elem_type) {
elem_type = *ty;
num_dims += 1;
}
if num_dims == 0 {
return report_error(
"expected list argument to array(), not xxx",
func_location,
);
}
let custom =
Some(self.unifier.add_ty(TypeEnum::TNDArray { ty: elem_type, num_dims }));
return Ok(Located {
location,
custom,
node: ExprKind::Call {
func: Box::new(Located {
custom: None,
location: func.location,
node: ExprKind::Name { id, ctx },
}),
args: vec![list_arg],
keywords: vec![],
},
});
}
Located { location: func_location, custom, node: ExprKind::Name { id, ctx } } Located { location: func_location, custom, node: ExprKind::Name { id, ctx } }
} else { } else {
func func
@ -962,8 +1019,9 @@ impl<'a> Inferencer<'a> {
Ok(self.unifier.add_ty(TypeEnum::TTuple { ty: ty? })) Ok(self.unifier.add_ty(TypeEnum::TTuple { ty: ty? }))
} }
ast::Constant::Str(_) => Ok(self.primitives.str), ast::Constant::Str(_) => Ok(self.primitives.str),
ast::Constant::None ast::Constant::None => {
=> report_error("CPython `None` not supported (nac3 uses `none` instead)", *loc), report_error("CPython `None` not supported (nac3 uses `none` instead)", *loc)
}
_ => report_error("not supported", *loc), _ => report_error("not supported", *loc),
} }
} }
@ -998,8 +1056,11 @@ impl<'a> Inferencer<'a> {
} }
(None, _) => { (None, _) => {
let t = self.unifier.stringify(ty); let t = self.unifier.stringify(ty);
report_error(&format!("`{}::{}` field/method does not exist", t, attr), value.location) report_error(
}, &format!("`{}::{}` field/method does not exist", t, attr),
value.location,
)
}
} }
} else { } else {
let attr_ty = self.unifier.get_dummy_var().0; let attr_ty = self.unifier.get_dummy_var().0;
@ -1028,8 +1089,22 @@ impl<'a> Inferencer<'a> {
left: &ast::Expr<Option<Type>>, left: &ast::Expr<Option<Type>>,
op: &ast::Operator, op: &ast::Operator,
right: &ast::Expr<Option<Type>>, right: &ast::Expr<Option<Type>>,
is_aug_assign: bool,
) -> InferenceResult { ) -> InferenceResult {
let method = binop_name(op).into(); let method = if let TypeEnum::TObj { fields, .. } =
self.unifier.get_ty_immutable(left.custom.unwrap()).as_ref()
{
let (binop_name, binop_assign_name) =
(binop_name(op).into(), binop_assign_name(op).into());
// if is aug_assign, try aug_assign operator first
if is_aug_assign && fields.contains_key(&binop_assign_name) {
binop_assign_name
} else {
binop_name
}
} else {
binop_name(op).into()
};
self.build_method_call( self.build_method_call(
location, location,
method, method,
@ -1081,6 +1156,7 @@ impl<'a> Inferencer<'a> {
for v in [lower.as_ref(), upper.as_ref(), step.as_ref()].iter().flatten() { for v in [lower.as_ref(), upper.as_ref(), step.as_ref()].iter().flatten() {
self.constrain(v.custom.unwrap(), self.primitives.int32, &v.location)?; self.constrain(v.custom.unwrap(), self.primitives.int32, &v.location)?;
} }
// xxx: Support TNDArray.
let list = self.unifier.add_ty(TypeEnum::TList { ty }); let list = self.unifier.add_ty(TypeEnum::TList { ty });
self.constrain(value.custom.unwrap(), list, &value.location)?; self.constrain(value.custom.unwrap(), list, &value.location)?;
Ok(list) Ok(list)
@ -1099,18 +1175,79 @@ impl<'a> Inferencer<'a> {
Ok(ty) Ok(ty)
} }
_ => { _ => {
if let TypeEnum::TTuple { .. } = &*self.unifier.get_ty(value.custom.unwrap()) match &*self.unifier.get_ty(value.custom.unwrap()) {
{ TypeEnum::TTuple { .. } => return report_error(
return report_error("Tuple index must be a constant (KernelInvariant is also not supported)", slice.location) "Tuple index must be a constant (KernelInvariant is also not supported)",
slice.location,
),
TypeEnum::TNDArray { ty: elem_ty, num_dims } => {
let num_idxs = match &*self.unifier.get_ty(slice.custom.unwrap()) {
TypeEnum::TTuple { ty: idx_tys } => {
for idx_ty in idx_tys.iter() {
// xxx: NumPy supports a tuple of tuples for "advanced indexing"
// of multidimensional arrays (sequence index -> subset). We
// don't support this, but could give a better error message.
self.constrain(
*idx_ty,
self.primitives.int32,
&slice.location,
)?;
} }
idx_tys.len()
}
TypeEnum::TList { .. } | TypeEnum::TNDArray { .. } => {
return report_error(
concat!(
"ndarray index is list/array, but NumPy advanced (subset)",
"indexing is not supported yet"
),
slice.location,
);
}
_ => {
// xxx: Could lead to suboptimal error message, as higher-dimensional indexing is not mentioned?!
self.constrain(
slice.custom.unwrap(),
self.primitives.int32,
&slice.location,
)?;
1
}
};
if *num_dims < num_idxs {
report_error(
&format!(
"ndarray has dimension {}, but {} indices supplied",
num_dims, num_idxs
),
slice.location,
)
} else if *num_dims == num_idxs {
Ok(*elem_ty)
} else {
Ok(self.unifier.add_ty(TypeEnum::TNDArray {
ty: *elem_ty,
num_dims: *num_dims - num_idxs,
}))
}
}
_ => {
// the index is not a constant, so value can only be a list // the index is not a constant, so value can only be a list
self.constrain(slice.custom.unwrap(), self.primitives.int32, &slice.location)?; // xxx: Or an ndarray now, so remove the constraint?
self.constrain(
slice.custom.unwrap(),
self.primitives.int32,
&slice.location,
)?;
let list = self.unifier.add_ty(TypeEnum::TList { ty }); let list = self.unifier.add_ty(TypeEnum::TList { ty });
self.constrain(value.custom.unwrap(), list, &value.location)?; self.constrain(value.custom.unwrap(), list, &value.location)?;
Ok(ty) Ok(ty)
} }
} }
} }
}
}
fn infer_if_expr( fn infer_if_expr(
&mut self, &mut self,

View File

@ -513,6 +513,20 @@ impl TestEnvironment {
[("a", "list[int32]"), ("b", "list[int32]")].iter().cloned().collect(), [("a", "list[int32]"), ("b", "list[int32]")].iter().cloned().collect(),
&[] &[]
; "listcomp test")] ; "listcomp test")]
#[test_case(
indoc! {"
a = array([1, 2])
a0 = a[0]
b = array([[1, 2], [3, 4]])
b0 = b[0]
b00 = b[0, 0]
c = 1
ac = a[c]
"},
[("a", "ndarray[int32, 1]"), ("a0", "int32"), ("b", "ndarray[int32, 2]"),
("b0", "ndarray[int32, 1]"), ("b00", "int32"), ("ac", "int32")].iter().cloned().collect(),
&[]
; "array test")]
#[test_case(indoc! {" #[test_case(indoc! {"
a = virtual(Bar(), Bar) a = virtual(Bar(), Bar)
b = a.b() b = a.b()
@ -533,6 +547,7 @@ fn test_basic(source: &str, mapping: HashMap<&str, &str>, virtuals: &[(&str, &st
let mut env = TestEnvironment::new(); let mut env = TestEnvironment::new();
let id_to_name = std::mem::take(&mut env.id_to_name); let id_to_name = std::mem::take(&mut env.id_to_name);
let mut defined_identifiers: HashSet<_> = env.identifier_mapping.keys().cloned().collect(); let mut defined_identifiers: HashSet<_> = env.identifier_mapping.keys().cloned().collect();
defined_identifiers.insert("array".into());
defined_identifiers.insert("virtual".into()); defined_identifiers.insert("virtual".into());
let mut inferencer = env.get_inferencer(); let mut inferencer = env.get_inferencer();
inferencer.defined_identifiers = defined_identifiers.clone(); inferencer.defined_identifiers = defined_identifiers.clone();

View File

@ -137,6 +137,13 @@ pub enum TypeEnum {
TList { TList {
ty: Type, ty: Type,
}, },
TNDArray {
ty: Type,
// We could introduce a more sensible limit for the number of dimensions
// and make this e.g. u8; usize for now to avoid some casts.
num_dims: usize,
},
TObj { TObj {
obj_id: DefinitionId, obj_id: DefinitionId,
fields: Mapping<StrRef, (Type, bool)>, fields: Mapping<StrRef, (Type, bool)>,
@ -156,6 +163,7 @@ impl TypeEnum {
TypeEnum::TVar { .. } => "TVar", TypeEnum::TVar { .. } => "TVar",
TypeEnum::TTuple { .. } => "TTuple", TypeEnum::TTuple { .. } => "TTuple",
TypeEnum::TList { .. } => "TList", TypeEnum::TList { .. } => "TList",
TypeEnum::TNDArray { .. } => "TNDArray",
TypeEnum::TObj { .. } => "TObj", TypeEnum::TObj { .. } => "TObj",
TypeEnum::TVirtual { .. } => "TVirtual", TypeEnum::TVirtual { .. } => "TVirtual",
TypeEnum::TCall { .. } => "TCall", TypeEnum::TCall { .. } => "TCall",
@ -173,7 +181,7 @@ pub struct Unifier {
pub(crate) calls: Vec<Rc<Call>>, pub(crate) calls: Vec<Rc<Call>>,
var_id: u32, var_id: u32,
unify_cache: HashSet<(Type, Type)>, unify_cache: HashSet<(Type, Type)>,
snapshot: Option<(usize, u32)> snapshot: Option<(usize, u32)>,
} }
impl Default for Unifier { impl Default for Unifier {
@ -387,6 +395,7 @@ impl Unifier {
TVar { .. } => allowed_typevars.iter().any(|b| self.unification_table.unioned(a, *b)), TVar { .. } => allowed_typevars.iter().any(|b| self.unification_table.unioned(a, *b)),
TCall { .. } => false, TCall { .. } => false,
TList { ty } => self.is_concrete(*ty, allowed_typevars), TList { ty } => self.is_concrete(*ty, allowed_typevars),
TNDArray { ty, .. } => self.is_concrete(*ty, allowed_typevars),
TTuple { ty } => ty.iter().all(|ty| self.is_concrete(*ty, allowed_typevars)), TTuple { ty } => ty.iter().all(|ty| self.is_concrete(*ty, allowed_typevars)),
TObj { params: vars, .. } => { TObj { params: vars, .. } => {
vars.values().all(|ty| self.is_concrete(*ty, allowed_typevars)) vars.values().all(|ty| self.is_concrete(*ty, allowed_typevars))
@ -459,10 +468,7 @@ impl Unifier {
if let Some(i) = required.iter().position(|v| v == k) { if let Some(i) = required.iter().position(|v| v == k) {
required.remove(i); required.remove(i);
} }
let i = all_names let i = all_names.iter().position(|v| &v.0 == k).ok_or_else(|| {
.iter()
.position(|v| &v.0 == k)
.ok_or_else(|| {
self.restore_snapshot(); self.restore_snapshot();
TypeError::new(TypeErrorKind::UnknownArgName(*k), *loc) TypeError::new(TypeErrorKind::UnknownArgName(*k), *loc)
})?; })?;
@ -652,17 +658,43 @@ impl Unifier {
self.unify_impl(x, b, false)?; self.unify_impl(x, b, false)?;
self.set_a_to_b(a, x); self.set_a_to_b(a, x);
} }
(TVar { fields: Some(fields), range, .. }, TNDArray { ty, num_dims }) => {
for (k, v) in fields.iter() {
match *k {
RecordKey::Int(_) => {
// .<n> is generated during generic scalar indexing lowering.
let indexed_ty = if *num_dims == 1 {
*ty
} else {
self.add_ty(TNDArray { ty: *ty, num_dims: *num_dims - 1 })
};
self.unify_impl(v.ty, indexed_ty, false).map_err(|e| e.at(v.loc))?
}
RecordKey::Str(_) => {
// xxx: Implement .shape here?
return Err(TypeError::new(TypeErrorKind::NoSuchField(*k, b), v.loc));
}
}
}
let x = self.check_var_compatibility(b, range)?.unwrap_or(b);
self.unify_impl(x, b, false)?;
self.set_a_to_b(a, x);
}
(TTuple { ty: ty1 }, TTuple { ty: ty2 }) => { (TTuple { ty: ty1 }, TTuple { ty: ty2 }) => {
if ty1.len() != ty2.len() { if ty1.len() != ty2.len() {
return Err(TypeError::new(TypeErrorKind::IncompatibleTypes(a, b), None)); return Err(TypeError::new(TypeErrorKind::IncompatibleTypes(a, b), None));
} }
for (x, y) in ty1.iter().zip(ty2.iter()) { for (x, y) in ty1.iter().zip(ty2.iter()) {
self.unify_impl(*x, *y, false)?; if self.unify_impl(*x, *y, false).is_err() {
return Err(TypeError::new(TypeErrorKind::IncompatibleTypes(a, b), None));
}
} }
self.set_a_to_b(a, b); self.set_a_to_b(a, b);
} }
(TList { ty: ty1 }, TList { ty: ty2 }) => { (TList { ty: ty1 }, TList { ty: ty2 }) => {
self.unify_impl(*ty1, *ty2, false)?; if self.unify_impl(*ty1, *ty2, false).is_err() {
return Err(TypeError::new(TypeErrorKind::IncompatibleTypes(a, b), None));
}
self.set_a_to_b(a, b); self.set_a_to_b(a, b);
} }
(TVar { fields: Some(map), range, .. }, TObj { fields, .. }) => { (TVar { fields: Some(map), range, .. }, TObj { fields, .. }) => {
@ -743,12 +775,16 @@ impl Unifier {
self.incompatible_types(a, b)?; self.incompatible_types(a, b)?;
} }
for (x, y) in zip(params1.values(), params2.values()) { for (x, y) in zip(params1.values(), params2.values()) {
self.unify_impl(*x, *y, false)?; if self.unify_impl(*x, *y, false).is_err() {
return Err(TypeError::new(TypeErrorKind::IncompatibleTypes(a, b), None));
};
} }
self.set_a_to_b(a, b); self.set_a_to_b(a, b);
} }
(TVirtual { ty: ty1 }, TVirtual { ty: ty2 }) => { (TVirtual { ty: ty1 }, TVirtual { ty: ty2 }) => {
self.unify_impl(*ty1, *ty2, false)?; if self.unify_impl(*ty1, *ty2, false).is_err() {
return Err(TypeError::new(TypeErrorKind::IncompatibleTypes(a, b), None));
};
self.set_a_to_b(a, b); self.set_a_to_b(a, b);
} }
(TCall(calls1), TCall(calls2)) => { (TCall(calls1), TCall(calls2)) => {
@ -784,9 +820,13 @@ impl Unifier {
if x.name != y.name || x.default_value != y.default_value { if x.name != y.name || x.default_value != y.default_value {
return Err(TypeError::new(TypeErrorKind::IncompatibleTypes(a, b), None)); return Err(TypeError::new(TypeErrorKind::IncompatibleTypes(a, b), None));
} }
self.unify_impl(x.ty, y.ty, false)?; if self.unify_impl(x.ty, y.ty, false).is_err() {
return Err(TypeError::new(TypeErrorKind::IncompatibleTypes(a, b), None));
};
} }
self.unify_impl(sign1.ret, sign2.ret, false)?; if self.unify_impl(sign1.ret, sign2.ret, false).is_err() {
return Err(TypeError::new(TypeErrorKind::IncompatibleTypes(a, b), None));
};
self.set_a_to_b(a, b); self.set_a_to_b(a, b);
} }
(TVar { fields: Some(fields), .. }, _) => { (TVar { fields: Some(fields), .. }, _) => {
@ -895,6 +935,13 @@ impl Unifier {
TypeEnum::TList { ty } => { TypeEnum::TList { ty } => {
format!("list[{}]", self.internal_stringify(*ty, obj_to_name, var_to_name, notes)) format!("list[{}]", self.internal_stringify(*ty, obj_to_name, var_to_name, notes))
} }
TypeEnum::TNDArray { ty, num_dims } => {
format!(
"ndarray[{}, {}]",
self.internal_stringify(*ty, obj_to_name, var_to_name, notes),
num_dims
)
}
TypeEnum::TVirtual { ty } => { TypeEnum::TVirtual { ty } => {
format!( format!(
"virtual[{}]", "virtual[{}]",

View File

@ -286,7 +286,7 @@ fn test_unify(
("v1", "tuple[int]"), ("v1", "tuple[int]"),
("v2", "tuple[float]"), ("v2", "tuple[float]"),
], ],
(("v1", "v2"), "Incompatible types: 0 and 1") (("v1", "v2"), "Incompatible types: tuple[0] and tuple[1]")
; "tuple parameter mismatch" ; "tuple parameter mismatch"
)] )]
#[test_case(2, #[test_case(2,
@ -478,7 +478,9 @@ fn test_typevar_range() {
let int_list = env.unifier.add_ty(TypeEnum::TList { ty: int }); let int_list = env.unifier.add_ty(TypeEnum::TList { ty: int });
assert_eq!( assert_eq!(
env.unify(a_list, int_list), env.unify(a_list, int_list),
Err("Expected any one of these types: 1, but got 0".into()) Err("Incompatible types: list[typevar22] and list[0]\
\n\nNotes:\n typevar22 {1}"
.into())
); );
let a = env.unifier.get_fresh_var_with_range(&[int, float], None, None).0; let a = env.unifier.get_fresh_var_with_range(&[int, float], None, None).0;
@ -506,7 +508,10 @@ fn test_rigid_var() {
assert_eq!(env.unify(a, b), Err("Incompatible types: typevar3 and typevar2".to_string())); assert_eq!(env.unify(a, b), Err("Incompatible types: typevar3 and typevar2".to_string()));
env.unifier.unify(list_a, list_x).unwrap(); env.unifier.unify(list_a, list_x).unwrap();
assert_eq!(env.unify(list_x, list_int), Err("Incompatible types: 0 and typevar2".to_string())); assert_eq!(
env.unify(list_x, list_int),
Err("Incompatible types: list[typevar2] and list[0]".to_string())
);
env.unifier.replace_rigid_var(a, int); env.unifier.replace_rigid_var(a, int);
env.unifier.unify(list_x, list_int).unwrap(); env.unifier.unify(list_x, list_int).unwrap();

View File

@ -16,21 +16,10 @@ pub struct UnificationTable<V> {
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
enum Action<V> { enum Action<V> {
Parent { Parent { key: usize, original_parent: usize },
key: usize, Value { key: usize, original_value: Option<V> },
original_parent: usize, Rank { key: usize, original_rank: u32 },
}, Marker { generation: u32 },
Value {
key: usize,
original_value: Option<V>,
},
Rank {
key: usize,
original_rank: u32,
},
Marker {
generation: u32,
}
} }
impl<V> Default for UnificationTable<V> { impl<V> Default for UnificationTable<V> {
@ -41,7 +30,13 @@ impl<V> Default for UnificationTable<V> {
impl<V> UnificationTable<V> { impl<V> UnificationTable<V> {
pub fn new() -> UnificationTable<V> { pub fn new() -> UnificationTable<V> {
UnificationTable { parents: Vec::new(), ranks: Vec::new(), values: Vec::new(), log: Vec::new(), generation: 0 } UnificationTable {
parents: Vec::new(),
ranks: Vec::new(),
values: Vec::new(),
log: Vec::new(),
generation: 0,
}
} }
pub fn new_key(&mut self, v: V) -> UnificationKey { pub fn new_key(&mut self, v: V) -> UnificationKey {
@ -125,7 +120,10 @@ impl<V> UnificationTable<V> {
pub fn restore_snapshot(&mut self, snapshot: (usize, u32)) { pub fn restore_snapshot(&mut self, snapshot: (usize, u32)) {
let (log_len, generation) = snapshot; let (log_len, generation) = snapshot;
assert!(self.log.len() >= log_len, "snapshot restoration error"); assert!(self.log.len() >= log_len, "snapshot restoration error");
assert!(matches!(self.log[log_len - 1], Action::Marker { generation: gen } if gen == generation), "snapshot restoration error"); assert!(
matches!(self.log[log_len - 1], Action::Marker { generation: gen } if gen == generation),
"snapshot restoration error"
);
for action in self.log.drain(log_len - 1..).rev() { for action in self.log.drain(log_len - 1..).rev() {
match action { match action {
Action::Parent { key, original_parent } => { Action::Parent { key, original_parent } => {
@ -145,7 +143,10 @@ impl<V> UnificationTable<V> {
pub fn discard_snapshot(&mut self, snapshot: (usize, u32)) { pub fn discard_snapshot(&mut self, snapshot: (usize, u32)) {
let (log_len, generation) = snapshot; let (log_len, generation) = snapshot;
assert!(self.log.len() >= log_len, "snapshot discard error"); assert!(self.log.len() >= log_len, "snapshot discard error");
assert!(matches!(self.log[log_len - 1], Action::Marker { generation: gen } if gen == generation), "snapshot discard error"); assert!(
matches!(self.log[log_len - 1], Action::Marker { generation: gen } if gen == generation),
"snapshot discard error"
);
self.log.clear(); self.log.clear();
} }
} }
@ -159,11 +160,23 @@ where
.enumerate() .enumerate()
.map(|(i, (v, p))| if *p == i { v.as_ref().map(|v| v.as_ref().clone()) } else { None }) .map(|(i, (v, p))| if *p == i { v.as_ref().map(|v| v.as_ref().clone()) } else { None })
.collect(); .collect();
UnificationTable { parents: self.parents.clone(), ranks: self.ranks.clone(), values, log: Vec::new(), generation: 0 } UnificationTable {
parents: self.parents.clone(),
ranks: self.ranks.clone(),
values,
log: Vec::new(),
generation: 0,
}
} }
pub fn from_send(table: &UnificationTable<V>) -> UnificationTable<Rc<V>> { pub fn from_send(table: &UnificationTable<V>) -> UnificationTable<Rc<V>> {
let values = table.values.iter().cloned().map(|v| v.map(Rc::new)).collect(); let values = table.values.iter().cloned().map(|v| v.map(Rc::new)).collect();
UnificationTable { parents: table.parents.clone(), ranks: table.ranks.clone(), values, log: Vec::new(), generation: 0 } UnificationTable {
parents: table.parents.clone(),
ranks: table.ranks.clone(),
values,
log: Vec::new(),
generation: 0,
}
} }
} }

View File

@ -1,15 +1,15 @@
use lalrpop_util::ParseError;
use nac3ast::*;
use crate::ast::Ident; use crate::ast::Ident;
use crate::ast::Location; use crate::ast::Location;
use crate::token::Tok;
use crate::error::*; use crate::error::*;
use crate::token::Tok;
use lalrpop_util::ParseError;
use nac3ast::*;
pub fn make_config_comment( pub fn make_config_comment(
com_loc: Location, com_loc: Location,
stmt_loc: Location, stmt_loc: Location,
nac3com_above: Vec<(Ident, Tok)>, nac3com_above: Vec<(Ident, Tok)>,
nac3com_end: Option<Ident> nac3com_end: Option<Ident>,
) -> Result<Vec<Ident>, ParseError<Location, Tok, LexicalError>> { ) -> Result<Vec<Ident>, ParseError<Location, Tok, LexicalError>> {
if com_loc.column() != stmt_loc.column() && !nac3com_above.is_empty() { if com_loc.column() != stmt_loc.column() && !nac3com_above.is_empty() {
return Err(ParseError::User { return Err(ParseError::User {
@ -23,18 +23,21 @@ pub fn make_config_comment(
) )
) )
} }
}) });
}; };
Ok( Ok(nac3com_above
nac3com_above
.into_iter() .into_iter()
.map(|(com, _)| com) .map(|(com, _)| com)
.chain(nac3com_end.map_or_else(|| vec![].into_iter(), |com| vec![com].into_iter())) .chain(nac3com_end.map_or_else(|| vec![].into_iter(), |com| vec![com].into_iter()))
.collect() .collect())
)
} }
pub fn handle_small_stmt<U>(stmts: &mut [Stmt<U>], nac3com_above: Vec<(Ident, Tok)>, nac3com_end: Option<Ident>, com_above_loc: Location) -> Result<(), ParseError<Location, Tok, LexicalError>> { pub fn handle_small_stmt<U>(
stmts: &mut [Stmt<U>],
nac3com_above: Vec<(Ident, Tok)>,
nac3com_end: Option<Ident>,
com_above_loc: Location,
) -> Result<(), ParseError<Location, Tok, LexicalError>> {
if com_above_loc.column() != stmts[0].location.column() && !nac3com_above.is_empty() { if com_above_loc.column() != stmts[0].location.column() && !nac3com_above.is_empty() {
return Err(ParseError::User { return Err(ParseError::User {
error: LexicalError { error: LexicalError {
@ -47,17 +50,12 @@ pub fn handle_small_stmt<U>(stmts: &mut [Stmt<U>], nac3com_above: Vec<(Ident, To
) )
) )
} }
}) });
} }
apply_config_comments( apply_config_comments(&mut stmts[0], nac3com_above.into_iter().map(|(com, _)| com).collect());
&mut stmts[0],
nac3com_above
.into_iter()
.map(|(com, _)| com).collect()
);
apply_config_comments( apply_config_comments(
stmts.last_mut().unwrap(), stmts.last_mut().unwrap(),
nac3com_end.map_or_else(Vec::new, |com| vec![com]) nac3com_end.map_or_else(Vec::new, |com| vec![com]),
); );
Ok(()) Ok(())
} }
@ -80,6 +78,8 @@ fn apply_config_comments<U>(stmt: &mut Stmt<U>, comments: Vec<Ident>) {
| StmtKind::Nonlocal { config_comment, .. } | StmtKind::Nonlocal { config_comment, .. }
| StmtKind::Assert { config_comment, .. } => config_comment.extend(comments), | StmtKind::Assert { config_comment, .. } => config_comment.extend(comments),
_ => { unreachable!("only small statements should call this function") } _ => {
unreachable!("only small statements should call this function")
}
} }
} }

View File

@ -145,35 +145,27 @@ impl From<LalrpopError<Location, Tok, LexicalError>> for ParseError {
fn from(err: LalrpopError<Location, Tok, LexicalError>) -> Self { fn from(err: LalrpopError<Location, Tok, LexicalError>) -> Self {
match err { match err {
// TODO: Are there cases where this isn't an EOF? // TODO: Are there cases where this isn't an EOF?
LalrpopError::InvalidToken { location } => ParseError { LalrpopError::InvalidToken { location } => {
error: ParseErrorType::Eof, ParseError { error: ParseErrorType::Eof, location }
location, }
}, LalrpopError::ExtraToken { token } => {
LalrpopError::ExtraToken { token } => ParseError { ParseError { error: ParseErrorType::ExtraToken(token.1), location: token.0 }
error: ParseErrorType::ExtraToken(token.1), }
location: token.0, LalrpopError::User { error } => {
}, ParseError { error: ParseErrorType::Lexical(error.error), location: error.location }
LalrpopError::User { error } => ParseError { }
error: ParseErrorType::Lexical(error.error),
location: error.location,
},
LalrpopError::UnrecognizedToken { token, expected } => { LalrpopError::UnrecognizedToken { token, expected } => {
// Hacky, but it's how CPython does it. See PyParser_AddToken, // Hacky, but it's how CPython does it. See PyParser_AddToken,
// in particular "Only one possible expected token" comment. // in particular "Only one possible expected token" comment.
let expected = if expected.len() == 1 { let expected = if expected.len() == 1 { Some(expected[0].clone()) } else { None };
Some(expected[0].clone())
} else {
None
};
ParseError { ParseError {
error: ParseErrorType::UnrecognizedToken(token.1, expected), error: ParseErrorType::UnrecognizedToken(token.1, expected),
location: token.0, location: token.0,
} }
} }
LalrpopError::UnrecognizedEOF { location, .. } => ParseError { LalrpopError::UnrecognizedEOF { location, .. } => {
error: ParseErrorType::Eof, ParseError { error: ParseErrorType::Eof, location }
location, }
},
} }
} }
} }

View File

@ -15,10 +15,7 @@ struct FStringParser<'a> {
impl<'a> FStringParser<'a> { impl<'a> FStringParser<'a> {
fn new(source: &'a str, str_location: Location) -> Self { fn new(source: &'a str, str_location: Location) -> Self {
Self { Self { chars: source.chars().peekable(), str_location }
chars: source.chars().peekable(),
str_location,
}
} }
#[inline] #[inline]
@ -251,17 +248,11 @@ impl<'a> FStringParser<'a> {
} }
if !content.is_empty() { if !content.is_empty() {
values.push(self.expr(ExprKind::Constant { values.push(self.expr(ExprKind::Constant { value: content.into(), kind: None }))
value: content.into(),
kind: None,
}))
} }
let s = match values.len() { let s = match values.len() {
0 => self.expr(ExprKind::Constant { 0 => self.expr(ExprKind::Constant { value: String::new().into(), kind: None }),
value: String::new().into(),
kind: None,
}),
1 => values.into_iter().next().unwrap(), 1 => values.into_iter().next().unwrap(),
_ => self.expr(ExprKind::JoinedStr { values }), _ => self.expr(ExprKind::JoinedStr { values }),
}; };
@ -277,9 +268,7 @@ fn parse_fstring_expr(source: &str) -> Result<Expr, ParseError> {
/// Parse an fstring from a string, located at a certain position in the sourcecode. /// Parse an fstring from a string, located at a certain position in the sourcecode.
/// In case of errors, we will get the location and the error returned. /// In case of errors, we will get the location and the error returned.
pub fn parse_located_fstring(source: &str, location: Location) -> Result<Expr, FStringError> { pub fn parse_located_fstring(source: &str, location: Location) -> Result<Expr, FStringError> {
FStringParser::new(source, location) FStringParser::new(source, location).parse().map_err(|error| FStringError { error, location })
.parse()
.map_err(|error| FStringError { error, location })
} }
#[cfg(test)] #[cfg(test)]

View File

@ -69,10 +69,7 @@ pub fn parse_args(func_args: Vec<FunctionArgument>) -> Result<ArgumentList, Lexi
keywords.push(ast::Keyword::new( keywords.push(ast::Keyword::new(
location, location,
ast::KeywordData { ast::KeywordData { arg: name.map(|name| name.into()), value: Box::new(value) },
arg: name.map(|name| name.into()),
value: Box::new(value),
},
)); ));
} }
None => { None => {

View File

@ -3,12 +3,12 @@
//! This means source code is translated into separate tokens. //! This means source code is translated into separate tokens.
pub use super::token::Tok; pub use super::token::Tok;
use crate::ast::{Location, FileName}; use crate::ast::{FileName, Location};
use crate::error::{LexicalError, LexicalErrorType}; use crate::error::{LexicalError, LexicalErrorType};
use std::char; use std::char;
use std::cmp::Ordering; use std::cmp::Ordering;
use std::str::FromStr;
use std::num::IntErrorKind; use std::num::IntErrorKind;
use std::str::FromStr;
use unic_emoji_char::is_emoji_presentation; use unic_emoji_char::is_emoji_presentation;
use unic_ucd_ident::{is_xid_continue, is_xid_start}; use unic_ucd_ident::{is_xid_continue, is_xid_start};
@ -32,20 +32,14 @@ impl IndentationLevel {
if self.spaces <= other.spaces { if self.spaces <= other.spaces {
Ok(Ordering::Less) Ok(Ordering::Less)
} else { } else {
Err(LexicalError { Err(LexicalError { location, error: LexicalErrorType::TabError })
location,
error: LexicalErrorType::TabError,
})
} }
} }
Ordering::Greater => { Ordering::Greater => {
if self.spaces >= other.spaces { if self.spaces >= other.spaces {
Ok(Ordering::Greater) Ok(Ordering::Greater)
} else { } else {
Err(LexicalError { Err(LexicalError { location, error: LexicalErrorType::TabError })
location,
error: LexicalErrorType::TabError,
})
} }
} }
Ordering::Equal => Ok(self.spaces.cmp(&other.spaces)), Ordering::Equal => Ok(self.spaces.cmp(&other.spaces)),
@ -63,7 +57,7 @@ pub struct Lexer<T: Iterator<Item = char>> {
chr1: Option<char>, chr1: Option<char>,
chr2: Option<char>, chr2: Option<char>,
location: Location, location: Location,
config_comment_prefix: Option<&'static str> config_comment_prefix: Option<&'static str>,
} }
pub static KEYWORDS: phf::Map<&'static str, Tok> = phf::phf_map! { pub static KEYWORDS: phf::Map<&'static str, Tok> = phf::phf_map! {
@ -136,11 +130,7 @@ where
T: Iterator<Item = char>, T: Iterator<Item = char>,
{ {
pub fn new(source: T) -> Self { pub fn new(source: T) -> Self {
let mut nlh = NewlineHandler { let mut nlh = NewlineHandler { source, chr0: None, chr1: None };
source,
chr0: None,
chr1: None,
};
nlh.shift(); nlh.shift();
nlh.shift(); nlh.shift();
nlh nlh
@ -195,7 +185,7 @@ where
location: start, location: start,
chr1: None, chr1: None,
chr2: None, chr2: None,
config_comment_prefix: Some(" nac3:") config_comment_prefix: Some(" nac3:"),
}; };
lxr.next_char(); lxr.next_char();
lxr.next_char(); lxr.next_char();
@ -287,15 +277,15 @@ where
let end_pos = self.get_pos(); let end_pos = self.get_pos();
let value = match i128::from_str_radix(&value_text, radix) { let value = match i128::from_str_radix(&value_text, radix) {
Ok(value) => value, Ok(value) => value,
Err(e) => { Err(e) => match e.kind() {
match e.kind() {
IntErrorKind::PosOverflow | IntErrorKind::NegOverflow => i128::MAX, IntErrorKind::PosOverflow | IntErrorKind::NegOverflow => i128::MAX,
_ => return Err(LexicalError { _ => {
return Err(LexicalError {
error: LexicalErrorType::OtherError(format!("{:?}", e)), error: LexicalErrorType::OtherError(format!("{:?}", e)),
location: start_pos, location: start_pos,
}), })
}
} }
},
}; };
Ok((start_pos, Tok::Int { value }, end_pos)) Ok((start_pos, Tok::Int { value }, end_pos))
} }
@ -338,14 +328,7 @@ where
if self.chr0 == Some('j') || self.chr0 == Some('J') { if self.chr0 == Some('j') || self.chr0 == Some('J') {
self.next_char(); self.next_char();
let end_pos = self.get_pos(); let end_pos = self.get_pos();
Ok(( Ok((start_pos, Tok::Complex { real: 0.0, imag: value }, end_pos))
start_pos,
Tok::Complex {
real: 0.0,
imag: value,
},
end_pos,
))
} else { } else {
let end_pos = self.get_pos(); let end_pos = self.get_pos();
Ok((start_pos, Tok::Float { value }, end_pos)) Ok((start_pos, Tok::Float { value }, end_pos))
@ -364,7 +347,7 @@ where
let value = value_text.parse::<i128>().ok(); let value = value_text.parse::<i128>().ok();
let nonzero = match value { let nonzero = match value {
Some(value) => value != 0i128, Some(value) => value != 0i128,
None => true None => true,
}; };
if start_is_zero && nonzero { if start_is_zero && nonzero {
return Err(LexicalError { return Err(LexicalError {
@ -433,9 +416,8 @@ where
fn lex_comment(&mut self) -> Option<Spanned> { fn lex_comment(&mut self) -> Option<Spanned> {
self.next_char(); self.next_char();
// if possibly nac3 pseudocomment, special handling for `# nac3:` // if possibly nac3 pseudocomment, special handling for `# nac3:`
let (mut prefix, mut is_comment) = self let (mut prefix, mut is_comment) =
.config_comment_prefix self.config_comment_prefix.map_or_else(|| ("".chars(), false), |v| (v.chars(), true));
.map_or_else(|| ("".chars(), false), |v| (v.chars(), true));
// for the correct location of config comment // for the correct location of config comment
let mut start_loc = self.location; let mut start_loc = self.location;
start_loc.go_left(); start_loc.go_left();
@ -460,22 +442,20 @@ where
return Some(( return Some((
start_loc, start_loc,
Tok::ConfigComment { content: content.trim().into() }, Tok::ConfigComment { content: content.trim().into() },
self.location self.location,
)); ));
} }
} }
} }
} }
self.next_char(); self.next_char();
}; }
} }
fn unicode_literal(&mut self, literal_number: usize) -> Result<char, LexicalError> { fn unicode_literal(&mut self, literal_number: usize) -> Result<char, LexicalError> {
let mut p: u32 = 0u32; let mut p: u32 = 0u32;
let unicode_error = LexicalError { let unicode_error =
error: LexicalErrorType::UnicodeError, LexicalError { error: LexicalErrorType::UnicodeError, location: self.get_pos() };
location: self.get_pos(),
};
for i in 1..=literal_number { for i in 1..=literal_number {
match self.next_char() { match self.next_char() {
Some(c) => match c.to_digit(16) { Some(c) => match c.to_digit(16) {
@ -530,10 +510,8 @@ where
} }
} }
} }
unicode_names2::character(&name).ok_or(LexicalError { unicode_names2::character(&name)
error: LexicalErrorType::UnicodeError, .ok_or(LexicalError { error: LexicalErrorType::UnicodeError, location: start_pos })
location: start_pos,
})
} }
fn lex_string( fn lex_string(
@ -650,14 +628,9 @@ where
let end_pos = self.get_pos(); let end_pos = self.get_pos();
let tok = if is_bytes { let tok = if is_bytes {
Tok::Bytes { Tok::Bytes { value: string_content.chars().map(|c| c as u8).collect() }
value: string_content.chars().map(|c| c as u8).collect(),
}
} else { } else {
Tok::String { Tok::String { value: string_content, is_fstring }
value: string_content,
is_fstring,
}
}; };
Ok((start_pos, tok, end_pos)) Ok((start_pos, tok, end_pos))
@ -842,11 +815,7 @@ where
let tok_start = self.get_pos(); let tok_start = self.get_pos();
self.next_char(); self.next_char();
let tok_end = self.get_pos(); let tok_end = self.get_pos();
self.emit(( self.emit((tok_start, Tok::Name { name: c.to_string().into() }, tok_end));
tok_start,
Tok::Name { name: c.to_string().into() },
tok_end,
));
} else { } else {
self.consume_character(c)?; self.consume_character(c)?;
} }
@ -1439,14 +1408,8 @@ class Foo(A, B):
assert_eq!( assert_eq!(
tokens, tokens,
vec![ vec![
Tok::String { Tok::String { value: "\\\\".to_owned(), is_fstring: false },
value: "\\\\".to_owned(), Tok::String { value: "\\".to_owned(), is_fstring: false },
is_fstring: false,
},
Tok::String {
value: "\\".to_owned(),
is_fstring: false,
},
Tok::Newline, Tok::Newline,
] ]
); );
@ -1459,27 +1422,13 @@ class Foo(A, B):
assert_eq!( assert_eq!(
tokens, tokens,
vec![ vec![
Tok::Int { Tok::Int { value: 47i128 },
value: 47i128, Tok::Int { value: 13i128 },
}, Tok::Int { value: 0i128 },
Tok::Int { Tok::Int { value: 123i128 },
value: 13i128,
},
Tok::Int {
value: 0i128,
},
Tok::Int {
value: 123i128,
},
Tok::Float { value: 0.2 }, Tok::Float { value: 0.2 },
Tok::Complex { Tok::Complex { real: 0.0, imag: 2.0 },
real: 0.0, Tok::Complex { real: 0.0, imag: 2.2 },
imag: 2.0,
},
Tok::Complex {
real: 0.0,
imag: 2.2,
},
Tok::Newline, Tok::Newline,
] ]
); );
@ -1539,21 +1488,13 @@ class Foo(A, B):
assert_eq!( assert_eq!(
tokens, tokens,
vec![ vec![
Tok::Name { Tok::Name { name: String::from("avariable").into() },
name: String::from("avariable").into(),
},
Tok::Equal, Tok::Equal,
Tok::Int { Tok::Int { value: 99i128 },
value: 99i128
},
Tok::Plus, Tok::Plus,
Tok::Int { Tok::Int { value: 2i128 },
value: 2i128
},
Tok::Minus, Tok::Minus,
Tok::Int { Tok::Int { value: 0i128 },
value: 0i128
},
Tok::Newline, Tok::Newline,
] ]
); );
@ -1740,42 +1681,15 @@ class Foo(A, B):
assert_eq!( assert_eq!(
tokens, tokens,
vec![ vec![
Tok::String { Tok::String { value: String::from("double"), is_fstring: false },
value: String::from("double"), Tok::String { value: String::from("single"), is_fstring: false },
is_fstring: false, Tok::String { value: String::from("can't"), is_fstring: false },
}, Tok::String { value: String::from("\\\""), is_fstring: false },
Tok::String { Tok::String { value: String::from("\t\r\n"), is_fstring: false },
value: String::from("single"), Tok::String { value: String::from("\\g"), is_fstring: false },
is_fstring: false, Tok::String { value: String::from("raw\\'"), is_fstring: false },
}, Tok::String { value: String::from("Đ"), is_fstring: false },
Tok::String { Tok::String { value: String::from("\u{80}\u{0}a"), is_fstring: false },
value: String::from("can't"),
is_fstring: false,
},
Tok::String {
value: String::from("\\\""),
is_fstring: false,
},
Tok::String {
value: String::from("\t\r\n"),
is_fstring: false,
},
Tok::String {
value: String::from("\\g"),
is_fstring: false,
},
Tok::String {
value: String::from("raw\\'"),
is_fstring: false,
},
Tok::String {
value: String::from("Đ"),
is_fstring: false,
},
Tok::String {
value: String::from("\u{80}\u{0}a"),
is_fstring: false,
},
Tok::Newline, Tok::Newline,
] ]
); );
@ -1840,41 +1754,17 @@ class Foo(A, B):
fn test_raw_byte_literal() { fn test_raw_byte_literal() {
let source = r"rb'\x1z'"; let source = r"rb'\x1z'";
let tokens = lex_source(source); let tokens = lex_source(source);
assert_eq!( assert_eq!(tokens, vec![Tok::Bytes { value: b"\\x1z".to_vec() }, Tok::Newline]);
tokens,
vec![
Tok::Bytes {
value: b"\\x1z".to_vec()
},
Tok::Newline
]
);
let source = r"rb'\\'"; let source = r"rb'\\'";
let tokens = lex_source(source); let tokens = lex_source(source);
assert_eq!( assert_eq!(tokens, vec![Tok::Bytes { value: b"\\\\".to_vec() }, Tok::Newline])
tokens,
vec![
Tok::Bytes {
value: b"\\\\".to_vec()
},
Tok::Newline
]
)
} }
#[test] #[test]
fn test_escape_octet() { fn test_escape_octet() {
let source = r##"b'\43a\4\1234'"##; let source = r##"b'\43a\4\1234'"##;
let tokens = lex_source(source); let tokens = lex_source(source);
assert_eq!( assert_eq!(tokens, vec![Tok::Bytes { value: b"#a\x04S4".to_vec() }, Tok::Newline])
tokens,
vec![
Tok::Bytes {
value: b"#a\x04S4".to_vec()
},
Tok::Newline
]
)
} }
#[test] #[test]
@ -1883,13 +1773,7 @@ class Foo(A, B):
let tokens = lex_source(source); let tokens = lex_source(source);
assert_eq!( assert_eq!(
tokens, tokens,
vec![ vec![Tok::String { value: "\u{2002}".to_owned(), is_fstring: false }, Tok::Newline]
Tok::String {
value: "\u{2002}".to_owned(),
is_fstring: false,
},
Tok::Newline
]
) )
} }
} }

View File

@ -31,5 +31,5 @@ lalrpop_mod!(
#[allow(unused)] #[allow(unused)]
python python
); );
pub mod token;
pub mod config_comment_helper; pub mod config_comment_helper;
pub mod token;

View File

@ -75,9 +75,7 @@ pub fn parse(source: &str, mode: Mode, file: FileName) -> Result<ast::Mod, Parse
let marker_token = (Default::default(), mode.to_marker(), Default::default()); let marker_token = (Default::default(), mode.to_marker(), Default::default());
let tokenizer = iter::once(Ok(marker_token)).chain(lxr); let tokenizer = iter::once(Ok(marker_token)).chain(lxr);
python::TopParser::new() python::TopParser::new().parse(tokenizer).map_err(ParseError::from)
.parse(tokenizer)
.map_err(ParseError::from)
} }
#[cfg(test)] #[cfg(test)]

View File

@ -1,7 +1,7 @@
//! Different token definitions. //! Different token definitions.
//! Loosely based on token.h from CPython source: //! Loosely based on token.h from CPython source:
use std::fmt::{self, Write};
use crate::ast; use crate::ast;
use std::fmt::{self, Write};
/// Python source code can be tokenized in a sequence of these tokens. /// Python source code can be tokenized in a sequence of these tokens.
#[derive(Clone, Debug, PartialEq)] #[derive(Clone, Debug, PartialEq)]
@ -111,8 +111,16 @@ impl fmt::Display for Tok {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
use Tok::*; use Tok::*;
match self { match self {
Name { name } => write!(f, "'{}'", ast::get_str_from_ref(&ast::get_str_ref_lock(), *name)), Name { name } => {
Int { value } => if *value != i128::MAX { write!(f, "'{}'", value) } else { write!(f, "'#OFL#'") }, write!(f, "'{}'", ast::get_str_from_ref(&ast::get_str_ref_lock(), *name))
}
Int { value } => {
if *value != i128::MAX {
write!(f, "'{}'", value)
} else {
write!(f, "'#OFL#'")
}
}
Float { value } => write!(f, "'{}'", value), Float { value } => write!(f, "'{}'", value),
Complex { real, imag } => write!(f, "{}j{}", real, imag), Complex { real, imag } => write!(f, "{}j{}", real, imag),
String { value, is_fstring } => { String { value, is_fstring } => {
@ -134,7 +142,11 @@ impl fmt::Display for Tok {
} }
f.write_str("\"") f.write_str("\"")
} }
ConfigComment { content } => write!(f, "ConfigComment: '{}'", ast::get_str_from_ref(&ast::get_str_ref_lock(), *content)), ConfigComment { content } => write!(
f,
"ConfigComment: '{}'",
ast::get_str_from_ref(&ast::get_str_ref_lock(), *content)
),
Newline => f.write_str("Newline"), Newline => f.write_str("Newline"),
Indent => f.write_str("Indent"), Indent => f.write_str("Indent"),
Dedent => f.write_str("Dedent"), Dedent => f.write_str("Dedent"),

View File

@ -10,6 +10,7 @@ nac3parser = { path = "../nac3parser" }
nac3core = { path = "../nac3core" } nac3core = { path = "../nac3core" }
[dependencies.inkwell] [dependencies.inkwell]
version = "0.1.0-beta.4" git = "https://github.com/nbaksalyar/inkwell.git"
branch = "llvm14"
default-features = false default-features = false
features = ["llvm13-0", "target-x86", "target-arm", "target-riscv", "no-libffi-linking"] features = ["llvm14-0", "target-aarch64", "target-arm", "target-riscv", "no-libffi-linking"]

View File

@ -0,0 +1,257 @@
from __future__ import annotations
@extern
def output_int32(x: int32):
...
@extern
def output_uint32(x: uint32):
...
@extern
def output_int64(x: int64):
...
@extern
def output_uint64(x: uint64):
...
@extern
def output_float64(x: float):
...
def run() -> int32:
test_int32()
test_uint32()
test_int64()
test_uint64()
test_A()
test_B()
return 0
def test_int32():
a = 17
b = 3
output_int32(a + b)
output_int32(a - b)
output_int32(a * b)
output_int32(a // b)
output_int32(a % b)
output_int32(a | b)
output_int32(a ^ b)
output_int32(a & b)
output_int32(a << b)
output_int32(a >> b)
output_float64(a / b)
a += b
output_int32(a)
a -= b
output_int32(a)
a *= b
output_int32(a)
a //= b
output_int32(a)
a %= b
output_int32(a)
a |= b
output_int32(a)
a ^= b
output_int32(a)
a &= b
output_int32(a)
a <<= b
output_int32(a)
a >>= b
output_int32(a)
# fail because (a / b) is float
# a /= b
def test_uint32():
a = uint32(17)
b = uint32(3)
output_uint32(a + b)
output_uint32(a - b)
output_uint32(a * b)
output_uint32(a // b)
output_uint32(a % b)
output_uint32(a | b)
output_uint32(a ^ b)
output_uint32(a & b)
output_uint32(a << b)
output_uint32(a >> b)
output_float64(a / b)
a += b
output_uint32(a)
a -= b
output_uint32(a)
a *= b
output_uint32(a)
a //= b
output_uint32(a)
a %= b
output_uint32(a)
a |= b
output_uint32(a)
a ^= b
output_uint32(a)
a &= b
output_uint32(a)
a <<= b
output_uint32(a)
a >>= b
output_uint32(a)
def test_int64():
a = int64(17)
b = int64(3)
output_int64(a + b)
output_int64(a - b)
output_int64(a * b)
output_int64(a // b)
output_int64(a % b)
output_int64(a | b)
output_int64(a ^ b)
output_int64(a & b)
output_int64(a << b)
output_int64(a >> b)
output_float64(a / b)
a += b
output_int64(a)
a -= b
output_int64(a)
a *= b
output_int64(a)
a //= b
output_int64(a)
a %= b
output_int64(a)
a |= b
output_int64(a)
a ^= b
output_int64(a)
a &= b
output_int64(a)
a <<= b
output_int64(a)
a >>= b
output_int64(a)
def test_uint64():
a = uint64(17)
b = uint64(3)
output_uint64(a + b)
output_uint64(a - b)
output_uint64(a * b)
output_uint64(a // b)
output_uint64(a % b)
output_uint64(a | b)
output_uint64(a ^ b)
output_uint64(a & b)
output_uint64(a << b)
output_uint64(a >> b)
output_float64(a / b)
a += b
output_uint64(a)
a -= b
output_uint64(a)
a *= b
output_uint64(a)
a //= b
output_uint64(a)
a %= b
output_uint64(a)
a |= b
output_uint64(a)
a ^= b
output_uint64(a)
a &= b
output_uint64(a)
a <<= b
output_uint64(a)
a >>= b
output_uint64(a)
class A:
a: int32
def __init__(self, a: int32):
self.a = a
def __add__(self, other: A) -> A:
output_int32(self.a + other.a)
return A(self.a + other.a)
def __sub__(self, other: A) -> A:
output_int32(self.a - other.a)
return A(self.a - other.a)
def test_A():
a = A(17)
b = A(3)
c = a + b
# fail due to alloca in __add__ function
# output_int32(c.a)
a += b
# fail due to alloca in __add__ function
# output_int32(a.a)
a = A(17)
b = A(3)
d = a - b
# fail due to alloca in __add__ function
# output_int32(c.a)
a -= b
# fail due to alloca in __add__ function
# output_int32(a.a)
a = A(17)
b = A(3)
a.__add__(b)
a.__sub__(b)
class B:
a: int32
def __init__(self, a: int32):
self.a = a
def __add__(self, other: B) -> B:
output_int32(self.a + other.a)
return B(self.a + other.a)
def __sub__(self, other: B) -> B:
output_int32(self.a - other.a)
return B(self.a - other.a)
def __iadd__(self, other: B) -> B:
output_int32(self.a + other.a + 24)
return B(self.a + other.a + 24)
def __isub__(self, other: B) -> B:
output_int32(self.a - other.a - 24)
return B(self.a - other.a - 24)
def test_B():
a = B(17)
b = B(3)
c = a + b
# fail due to alloca in __add__ function
# output_int32(c.a)
a += b
# fail due to alloca in __add__ function
# output_int32(a.a)
a = B(17)
b = B(3)
d = a - b
# fail due to alloca in __add__ function
# output_int32(c.a)
a -= b
# fail due to alloca in __add__ function
# output_int32(a.a)
a = B(17)
b = B(3)
a.__add__(b)
a.__sub__(b)

View File

@ -30,46 +30,13 @@ use nac3parser::{
mod basic_symbol_resolver; mod basic_symbol_resolver;
use basic_symbol_resolver::*; use basic_symbol_resolver::*;
fn main() { fn handle_typevar_definition(
let file_name = env::args().nth(1).unwrap();
let threads: u32 = env::args().nth(2).map(|s| str::parse(&s).unwrap()).unwrap_or(1);
Target::initialize_all(&InitializationConfig::default());
let program = match fs::read_to_string(file_name.clone()) {
Ok(program) => program,
Err(err) => {
println!("Cannot open input file: {}", err);
return;
}
};
let primitive: PrimitiveStore = TopLevelComposer::make_primitives().0;
let (mut composer, builtins_def, builtins_ty) =
TopLevelComposer::new(vec![], Default::default());
let internal_resolver: Arc<ResolverInternal> = ResolverInternal {
id_to_type: builtins_ty.into(),
id_to_def: builtins_def.into(),
class_names: Default::default(),
module_globals: Default::default(),
str_store: Default::default(),
}
.into();
let resolver =
Arc::new(Resolver(internal_resolver.clone())) as Arc<dyn SymbolResolver + Send + Sync>;
let parser_result = parser::parse_program(&program, file_name.into()).unwrap();
for stmt in parser_result.into_iter() {
if let StmtKind::Assign { targets, value, .. } = &stmt.node {
fn handle_typevar_definition(
var: &Expr, var: &Expr,
resolver: &(dyn SymbolResolver + Send + Sync), resolver: &(dyn SymbolResolver + Send + Sync),
def_list: &[Arc<RwLock<TopLevelDef>>], def_list: &[Arc<RwLock<TopLevelDef>>],
unifier: &mut Unifier, unifier: &mut Unifier,
primitives: &PrimitiveStore, primitives: &PrimitiveStore,
) -> Result<Type, String> { ) -> Result<Type, String> {
if let ExprKind::Call { func, args, .. } = &var.node { if let ExprKind::Call { func, args, .. } = &var.node {
if matches!(&func.node, ExprKind::Name { id, .. } if id == &"TypeVar".into()) { if matches!(&func.node, ExprKind::Name { id, .. } if id == &"TypeVar".into()) {
let constraints = args let constraints = args
@ -85,26 +52,20 @@ fn main() {
Default::default(), Default::default(),
)?; )?;
get_type_from_type_annotation_kinds( get_type_from_type_annotation_kinds(
def_list, unifier, primitives, &ty, &mut None def_list, unifier, primitives, &ty, &mut None,
) )
}) })
.collect::<Result<Vec<_>, _>>()?; .collect::<Result<Vec<_>, _>>()?;
Ok(unifier.get_fresh_var_with_range(&constraints, None, None).0) Ok(unifier.get_fresh_var_with_range(&constraints, None, None).0)
} else { } else {
Err(format!( Err(format!("expression {:?} cannot be handled as a TypeVar in global scope", var))
"expression {:?} cannot be handled as a TypeVar in global scope",
var
))
} }
} else { } else {
Err(format!( Err(format!("expression {:?} cannot be handled as a TypeVar in global scope", var))
"expression {:?} cannot be handled as a TypeVar in global scope",
var
))
}
} }
}
fn handle_assignment_pattern( fn handle_assignment_pattern(
targets: &[Expr], targets: &[Expr],
value: &Expr, value: &Expr,
resolver: &(dyn SymbolResolver + Send + Sync), resolver: &(dyn SymbolResolver + Send + Sync),
@ -112,7 +73,7 @@ fn main() {
def_list: &[Arc<RwLock<TopLevelDef>>], def_list: &[Arc<RwLock<TopLevelDef>>],
unifier: &mut Unifier, unifier: &mut Unifier,
primitives: &PrimitiveStore, primitives: &PrimitiveStore,
) -> Result<(), String> { ) -> Result<(), String> {
if targets.len() == 1 { if targets.len() == 1 {
match &targets[0].node { match &targets[0].node {
ExprKind::Name { id, .. } => { ExprKind::Name { id, .. } => {
@ -125,15 +86,13 @@ fn main() {
) { ) {
internal_resolver.add_id_type(*id, var); internal_resolver.add_id_type(*id, var);
Ok(()) Ok(())
} else if let Ok(val) = } else if let Ok(val) = parse_parameter_default_value(value.borrow(), resolver) {
parse_parameter_default_value(value.borrow(), resolver)
{
internal_resolver.add_module_global(*id, val); internal_resolver.add_module_global(*id, val);
Ok(()) Ok(())
} else { } else {
Err(format!("fails to evaluate this expression `{:?}` as a constant or TypeVar at {}", Err(format!(
targets[0].node, "fails to evaluate this expression `{:?}` as a constant or TypeVar at {}",
targets[0].location, targets[0].node, targets[0].location,
)) ))
} }
} }
@ -179,14 +138,45 @@ fn main() {
Ok(()) Ok(())
} }
} }
_ => Err(format!( _ => Err(format!("unpack of this expression is not supported at {}", value.location)),
"unpack of this expression is not supported at {}",
value.location
)),
}
} }
} }
}
fn main() {
let file_name = env::args().nth(1).unwrap();
let threads: u32 = env::args().nth(2).map(|s| str::parse(&s).unwrap()).unwrap_or(1);
Target::initialize_all(&InitializationConfig::default());
let program = match fs::read_to_string(file_name.clone()) {
Ok(program) => program,
Err(err) => {
println!("Cannot open input file: {}", err);
return;
}
};
let primitive: PrimitiveStore = TopLevelComposer::make_primitives().0;
let (mut composer, builtins_def, builtins_ty) =
TopLevelComposer::new(vec![], Default::default());
let internal_resolver: Arc<ResolverInternal> = ResolverInternal {
id_to_type: builtins_ty.into(),
id_to_def: builtins_def.into(),
class_names: Default::default(),
module_globals: Default::default(),
str_store: Default::default(),
}
.into();
let resolver =
Arc::new(Resolver(internal_resolver.clone())) as Arc<dyn SymbolResolver + Send + Sync>;
let parser_result = parser::parse_program(&program, file_name.into()).unwrap();
for stmt in parser_result.into_iter() {
match &stmt.node {
StmtKind::Assign { targets, value, .. } => {
let def_list = composer.extract_def_list(); let def_list = composer.extract_def_list();
let unifier = &mut composer.unifier; let unifier = &mut composer.unifier;
let primitives = &composer.primitives_ty; let primitives = &composer.primitives_ty;
@ -202,17 +192,26 @@ fn main() {
eprintln!("{}", err); eprintln!("{}", err);
return; return;
} }
continue;
} }
// allow (and ignore) "from __future__ import annotations"
let (name, def_id, ty) = StmtKind::ImportFrom { module, names, .. }
composer.register_top_level(stmt, Some(resolver.clone()), "__main__".into()).unwrap(); if module == &Some("__future__".into())
&& names.len() == 1
&& names[0].name == "annotations".into() =>
{
()
}
_ => {
let (name, def_id, ty) = composer
.register_top_level(stmt, Some(resolver.clone()), "__main__".into())
.unwrap();
internal_resolver.add_id_def(name, def_id); internal_resolver.add_id_def(name, def_id);
if let Some(ty) = ty { if let Some(ty) = ty {
internal_resolver.add_id_type(name, ty); internal_resolver.add_id_type(name, ty);
} }
} }
}
}
let signature = FunSignature { args: vec![], ret: primitive.int32, vars: HashMap::new() }; let signature = FunSignature { args: vec![], ret: primitive.int32, vars: HashMap::new() };
let mut store = ConcreteTypeStore::new(); let mut store = ConcreteTypeStore::new();

View File

@ -8,7 +8,6 @@
, ncurses , ncurses
, zlib , zlib
, which , which
, llvmPackages_13
, debugVersion ? false , debugVersion ? false
, enableManpages ? false , enableManpages ? false
, enableSharedLibraries ? false , enableSharedLibraries ? false
@ -18,7 +17,7 @@
let let
inherit (lib) optional optionals optionalString; inherit (lib) optional optionals optionalString;
release_version = "13.0.1"; release_version = "14.0.1";
candidate = ""; # empty or "rcN" candidate = ""; # empty or "rcN"
dash-candidate = lib.optionalString (candidate != "") "-${candidate}"; dash-candidate = lib.optionalString (candidate != "") "-${candidate}";
version = "${release_version}${dash-candidate}"; # differentiating these (variables) is important for RCs version = "${release_version}${dash-candidate}"; # differentiating these (variables) is important for RCs
@ -35,7 +34,7 @@ in stdenv.mkDerivation (rec {
pname = "llvm"; pname = "llvm";
inherit version; inherit version;
src = fetch pname "sha256-7GuA2Cw4SsrS3BkpA6bPLNuv+4ibhL+5janXHmMPyDQ="; src = fetch pname "sha256-W4kBfewnKTEasUNALwPaHeptDHndXHAbyTnPizTwHsI=";
unpackPhase = '' unpackPhase = ''
unpackFile $src unpackFile $src
@ -50,19 +49,12 @@ in stdenv.mkDerivation (rec {
buildInputs = [ ]; buildInputs = [ ];
propagatedBuildInputs = optionals (stdenv.buildPlatform == stdenv.hostPlatform) [ ncurses ] propagatedBuildInputs = [ ncurses zlib ];
++ [ zlib ];
checkInputs = [ which ]; checkInputs = [ which ];
patches = [ patches = [
./gnu-install-dirs.patch ./gnu-install-dirs.patch
# Fix random compiler crashes: https://bugs.llvm.org/show_bug.cgi?id=50611
(fetchpatch {
url = "https://raw.githubusercontent.com/archlinux/svntogit-packages/4764a4f8c920912a2bfd8b0eea57273acfe0d8a8/trunk/no-strict-aliasing-DwarfCompileUnit.patch";
sha256 = "18l6mrvm2vmwm77ckcnbjvh6ybvn72rhrb799d4qzwac4x2ifl7g";
stripLen = 1;
})
./llvm-future-riscv-abi.diff ./llvm-future-riscv-abi.diff
]; ];
@ -121,7 +113,9 @@ in stdenv.mkDerivation (rec {
"-DLLVM_DEFAULT_TARGET_TRIPLE=${stdenv.hostPlatform.config}" "-DLLVM_DEFAULT_TARGET_TRIPLE=${stdenv.hostPlatform.config}"
"-DLLVM_ENABLE_UNWIND_TABLES=OFF" "-DLLVM_ENABLE_UNWIND_TABLES=OFF"
"-DLLVM_ENABLE_THREADS=OFF" "-DLLVM_ENABLE_THREADS=OFF"
"-DLLVM_TARGETS_TO_BUILD=X86;ARM;RISCV" "-DLLVM_INCLUDE_BENCHMARKS=OFF"
"-DLLVM_BUILD_TOOLS=OFF"
"-DLLVM_TARGETS_TO_BUILD=host;ARM;RISCV"
] ++ optionals enableSharedLibraries [ ] ++ optionals enableSharedLibraries [
"-DLLVM_LINK_LLVM_DYLIB=ON" "-DLLVM_LINK_LLVM_DYLIB=ON"
] ++ optionals enableManpages [ ] ++ optionals enableManpages [
@ -137,7 +131,6 @@ in stdenv.mkDerivation (rec {
"-DCAN_TARGET_i386=false" "-DCAN_TARGET_i386=false"
] ++ optionals (stdenv.hostPlatform != stdenv.buildPlatform) [ ] ++ optionals (stdenv.hostPlatform != stdenv.buildPlatform) [
"-DCMAKE_CROSSCOMPILING=True" "-DCMAKE_CROSSCOMPILING=True"
"-DLLVM_TABLEGEN=${llvmPackages_13.tools.llvm}/bin/llvm-tblgen"
( (
let let
nativeCC = pkgsBuildBuild.targetPackages.stdenv.cc; nativeCC = pkgsBuildBuild.targetPackages.stdenv.cc;
@ -154,6 +147,7 @@ in stdenv.mkDerivation (rec {
] ++ extraCmakeFlags; ] ++ extraCmakeFlags;
postBuild = '' postBuild = ''
make llvm-config
rm -fR $out rm -fR $out
''; '';
@ -162,6 +156,7 @@ in stdenv.mkDerivation (rec {
''; '';
postInstall = '' postInstall = ''
cp bin/llvm-config $out/bin
mkdir -p $python/share mkdir -p $python/share
mv $out/share/opt-viewer $python/share/opt-viewer mv $out/share/opt-viewer $python/share/opt-viewer
moveToOutput "bin/llvm-config*" "$dev" moveToOutput "bin/llvm-config*" "$dev"

View File

@ -1,23 +1,10 @@
diff --git a/CMakeLists.txt b/CMakeLists.txt diff --git a/CMakeLists.txt b/CMakeLists.txt
index 135036f509d2..265c36f8211b 100644 index fec956091cd5..5a766f5c5d7c 100644
--- a/CMakeLists.txt --- a/CMakeLists.txt
+++ b/CMakeLists.txt +++ b/CMakeLists.txt
@@ -270,15 +270,21 @@ if (CMAKE_BUILD_TYPE AND @@ -303,6 +303,9 @@ set(LLVM_EXAMPLES_INSTALL_DIR "examples" CACHE STRING
message(FATAL_ERROR "Invalid value for CMAKE_BUILD_TYPE: ${CMAKE_BUILD_TYPE}") "Path for examples subdirectory (enabled by LLVM_BUILD_EXAMPLES=ON) (defaults to 'examples')")
endif() mark_as_advanced(LLVM_EXAMPLES_INSTALL_DIR)
+include(GNUInstallDirs)
+
set(LLVM_LIBDIR_SUFFIX "" CACHE STRING "Define suffix of library directory name (32/64)" )
-set(LLVM_TOOLS_INSTALL_DIR "bin" CACHE STRING "Path for binary subdirectory (defaults to 'bin')")
+set(LLVM_TOOLS_INSTALL_DIR "${CMAKE_INSTALL_BINDIR}" CACHE STRING
+ "Path for binary subdirectory (defaults to 'bin')")
mark_as_advanced(LLVM_TOOLS_INSTALL_DIR)
set(LLVM_UTILS_INSTALL_DIR "${LLVM_TOOLS_INSTALL_DIR}" CACHE STRING
"Path to install LLVM utilities (enabled by LLVM_INSTALL_UTILS=ON) (defaults to LLVM_TOOLS_INSTALL_DIR)")
mark_as_advanced(LLVM_UTILS_INSTALL_DIR)
+set(LLVM_INSTALL_CMAKE_DIR "${CMAKE_INSTALL_LIBDIR}${LLVM_LIBDIR_SUFFIX}/cmake/llvm" CACHE STRING +set(LLVM_INSTALL_CMAKE_DIR "${CMAKE_INSTALL_LIBDIR}${LLVM_LIBDIR_SUFFIX}/cmake/llvm" CACHE STRING
+ "Path for CMake subdirectory (defaults to lib/cmake/llvm)" ) + "Path for CMake subdirectory (defaults to lib/cmake/llvm)" )
@ -25,70 +12,22 @@ index 135036f509d2..265c36f8211b 100644
# They are used as destination of target generators. # They are used as destination of target generators.
set(LLVM_RUNTIME_OUTPUT_INTDIR ${CMAKE_CURRENT_BINARY_DIR}/${CMAKE_CFG_INTDIR}/bin) set(LLVM_RUNTIME_OUTPUT_INTDIR ${CMAKE_CURRENT_BINARY_DIR}/${CMAKE_CFG_INTDIR}/bin)
set(LLVM_LIBRARY_OUTPUT_INTDIR ${CMAKE_CURRENT_BINARY_DIR}/${CMAKE_CFG_INTDIR}/lib${LLVM_LIBDIR_SUFFIX}) set(LLVM_LIBRARY_OUTPUT_INTDIR ${CMAKE_CURRENT_BINARY_DIR}/${CMAKE_CFG_INTDIR}/lib${LLVM_LIBDIR_SUFFIX})
@@ -581,9 +587,9 @@ option (LLVM_ENABLE_SPHINX "Use Sphinx to generate llvm documentation." OFF)
option (LLVM_ENABLE_OCAMLDOC "Build OCaml bindings documentation." ON)
option (LLVM_ENABLE_BINDINGS "Build bindings." ON)
-set(LLVM_INSTALL_DOXYGEN_HTML_DIR "share/doc/llvm/doxygen-html"
+set(LLVM_INSTALL_DOXYGEN_HTML_DIR "${CMAKE_INSTALL_DOCDIR}/${project}/doxygen-html"
CACHE STRING "Doxygen-generated HTML documentation install directory")
-set(LLVM_INSTALL_OCAMLDOC_HTML_DIR "share/doc/llvm/ocaml-html"
+set(LLVM_INSTALL_OCAMLDOC_HTML_DIR "${CMAKE_INSTALL_DOCDIR}/${project}/ocaml-html"
CACHE STRING "OCamldoc-generated HTML documentation install directory")
option (LLVM_BUILD_EXTERNAL_COMPILER_RT
@@ -1048,7 +1054,7 @@ endif()
if (NOT LLVM_INSTALL_TOOLCHAIN_ONLY)
install(DIRECTORY include/llvm include/llvm-c
- DESTINATION include
+ DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}
COMPONENT llvm-headers
FILES_MATCHING
PATTERN "*.def"
@@ -1059,7 +1065,7 @@ if (NOT LLVM_INSTALL_TOOLCHAIN_ONLY)
)
install(DIRECTORY ${LLVM_INCLUDE_DIR}/llvm ${LLVM_INCLUDE_DIR}/llvm-c
- DESTINATION include
+ DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}
COMPONENT llvm-headers
FILES_MATCHING
PATTERN "*.def"
@@ -1073,13 +1079,13 @@ if (NOT LLVM_INSTALL_TOOLCHAIN_ONLY)
if (LLVM_INSTALL_MODULEMAPS)
install(DIRECTORY include/llvm include/llvm-c
- DESTINATION include
+ DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}
COMPONENT llvm-headers
FILES_MATCHING
PATTERN "module.modulemap"
)
install(FILES include/llvm/module.install.modulemap
- DESTINATION include/llvm
+ DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}/llvm
COMPONENT llvm-headers
RENAME "module.extern.modulemap"
)
diff --git a/cmake/modules/AddLLVM.cmake b/cmake/modules/AddLLVM.cmake diff --git a/cmake/modules/AddLLVM.cmake b/cmake/modules/AddLLVM.cmake
index 9c2b85374307..5531ceeb2eeb 100644 index fed1fec7d72e..4baed19b9e98 100644
--- a/cmake/modules/AddLLVM.cmake --- a/cmake/modules/AddLLVM.cmake
+++ b/cmake/modules/AddLLVM.cmake +++ b/cmake/modules/AddLLVM.cmake
@@ -818,9 +818,9 @@ macro(add_llvm_library name) @@ -838,8 +838,8 @@ macro(add_llvm_library name)
get_target_export_arg(${name} LLVM export_to_llvmexports ${umbrella}) get_target_export_arg(${name} LLVM export_to_llvmexports ${umbrella})
install(TARGETS ${name} install(TARGETS ${name}
${export_to_llvmexports} ${export_to_llvmexports}
- LIBRARY DESTINATION lib${LLVM_LIBDIR_SUFFIX} COMPONENT ${name} - LIBRARY DESTINATION lib${LLVM_LIBDIR_SUFFIX} COMPONENT ${name}
- ARCHIVE DESTINATION lib${LLVM_LIBDIR_SUFFIX} COMPONENT ${name} - ARCHIVE DESTINATION lib${LLVM_LIBDIR_SUFFIX} COMPONENT ${name}
- RUNTIME DESTINATION bin COMPONENT ${name}) + LIBRARY DESTINATION "${CMAKE_INSTALL_LIBDIR}${LLVM_LIBDIR_SUFFIX}" COMPONENT ${name}
+ LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR}${LLVM_LIBDIR_SUFFIX} COMPONENT ${name} + ARCHIVE DESTINATION "${CMAKE_INSTALL_LIBDIR}${LLVM_LIBDIR_SUFFIX}" COMPONENT ${name}
+ ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR}${LLVM_LIBDIR_SUFFIX} COMPONENT ${name} RUNTIME DESTINATION "${CMAKE_INSTALL_BINDIR}" COMPONENT ${name})
+ RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR} COMPONENT ${name})
if (NOT LLVM_ENABLE_IDE) if (NOT LLVM_ENABLE_IDE)
add_llvm_install_targets(install-${name} @@ -1056,7 +1056,7 @@ function(process_llvm_pass_plugins)
@@ -1036,7 +1036,7 @@ function(process_llvm_pass_plugins)
"set(LLVM_STATIC_EXTENSIONS ${LLVM_STATIC_EXTENSIONS})") "set(LLVM_STATIC_EXTENSIONS ${LLVM_STATIC_EXTENSIONS})")
install(FILES install(FILES
${llvm_cmake_builddir}/LLVMConfigExtensions.cmake ${llvm_cmake_builddir}/LLVMConfigExtensions.cmake
@ -97,16 +36,7 @@ index 9c2b85374307..5531ceeb2eeb 100644
COMPONENT cmake-exports) COMPONENT cmake-exports)
set(ExtensionDef "${LLVM_BINARY_DIR}/include/llvm/Support/Extension.def") set(ExtensionDef "${LLVM_BINARY_DIR}/include/llvm/Support/Extension.def")
@@ -1250,7 +1250,7 @@ macro(add_llvm_example name) @@ -1902,7 +1902,7 @@ function(llvm_install_library_symlink name dest type)
endif()
add_llvm_executable(${name} ${ARGN})
if( LLVM_BUILD_EXAMPLES )
- install(TARGETS ${name} RUNTIME DESTINATION examples)
+ install(TARGETS ${name} RUNTIME DESTINATION ${CMAKE_INSTALL_DOCDIR}/examples)
endif()
set_target_properties(${name} PROPERTIES FOLDER "Examples")
endmacro(add_llvm_example name)
@@ -1868,7 +1868,7 @@ function(llvm_install_library_symlink name dest type)
set(full_name ${CMAKE_${type}_LIBRARY_PREFIX}${name}${CMAKE_${type}_LIBRARY_SUFFIX}) set(full_name ${CMAKE_${type}_LIBRARY_PREFIX}${name}${CMAKE_${type}_LIBRARY_SUFFIX})
set(full_dest ${CMAKE_${type}_LIBRARY_PREFIX}${dest}${CMAKE_${type}_LIBRARY_SUFFIX}) set(full_dest ${CMAKE_${type}_LIBRARY_PREFIX}${dest}${CMAKE_${type}_LIBRARY_SUFFIX})
@ -115,7 +45,7 @@ index 9c2b85374307..5531ceeb2eeb 100644
if(WIN32 AND "${type}" STREQUAL "SHARED") if(WIN32 AND "${type}" STREQUAL "SHARED")
set(output_dir bin) set(output_dir bin)
endif() endif()
@@ -1879,7 +1879,7 @@ function(llvm_install_library_symlink name dest type) @@ -1913,7 +1913,7 @@ function(llvm_install_library_symlink name dest type)
endfunction() endfunction()
@ -124,7 +54,7 @@ index 9c2b85374307..5531ceeb2eeb 100644
cmake_parse_arguments(ARG "ALWAYS_GENERATE" "COMPONENT" "" ${ARGN}) cmake_parse_arguments(ARG "ALWAYS_GENERATE" "COMPONENT" "" ${ARGN})
foreach(path ${CMAKE_MODULE_PATH}) foreach(path ${CMAKE_MODULE_PATH})
if(EXISTS ${path}/LLVMInstallSymlink.cmake) if(EXISTS ${path}/LLVMInstallSymlink.cmake)
@@ -1902,7 +1902,7 @@ function(llvm_install_symlink name dest) @@ -1936,7 +1936,7 @@ function(llvm_install_symlink name dest)
set(full_dest ${dest}${CMAKE_EXECUTABLE_SUFFIX}) set(full_dest ${dest}${CMAKE_EXECUTABLE_SUFFIX})
install(SCRIPT ${INSTALL_SYMLINK} install(SCRIPT ${INSTALL_SYMLINK}
@ -133,7 +63,7 @@ index 9c2b85374307..5531ceeb2eeb 100644
COMPONENT ${component}) COMPONENT ${component})
if (NOT LLVM_ENABLE_IDE AND NOT ARG_ALWAYS_GENERATE) if (NOT LLVM_ENABLE_IDE AND NOT ARG_ALWAYS_GENERATE)
@@ -1985,7 +1985,8 @@ function(add_llvm_tool_symlink link_name target) @@ -2019,7 +2019,8 @@ function(add_llvm_tool_symlink link_name target)
endif() endif()
if ((TOOL_IS_TOOLCHAIN OR NOT LLVM_INSTALL_TOOLCHAIN_ONLY) AND LLVM_BUILD_TOOLS) if ((TOOL_IS_TOOLCHAIN OR NOT LLVM_INSTALL_TOOLCHAIN_ONLY) AND LLVM_BUILD_TOOLS)
@ -143,7 +73,7 @@ index 9c2b85374307..5531ceeb2eeb 100644
endif() endif()
endif() endif()
endfunction() endfunction()
@@ -2114,9 +2115,9 @@ function(llvm_setup_rpath name) @@ -2148,9 +2149,9 @@ function(llvm_setup_rpath name)
# Since BUILD_SHARED_LIBS is only recommended for use by developers, # Since BUILD_SHARED_LIBS is only recommended for use by developers,
# hardcode the rpath to build/install lib dir first in this mode. # hardcode the rpath to build/install lib dir first in this mode.
# FIXME: update this when there is better solution. # FIXME: update this when there is better solution.
@ -156,10 +86,10 @@ index 9c2b85374307..5531ceeb2eeb 100644
set_property(TARGET ${name} APPEND_STRING PROPERTY set_property(TARGET ${name} APPEND_STRING PROPERTY
LINK_FLAGS " -Wl,-z,origin ") LINK_FLAGS " -Wl,-z,origin ")
diff --git a/cmake/modules/AddOCaml.cmake b/cmake/modules/AddOCaml.cmake diff --git a/cmake/modules/AddOCaml.cmake b/cmake/modules/AddOCaml.cmake
index 554046b20edf..4d1ad980641e 100644 index 891c9e6d618c..8d963f3b0069 100644
--- a/cmake/modules/AddOCaml.cmake --- a/cmake/modules/AddOCaml.cmake
+++ b/cmake/modules/AddOCaml.cmake +++ b/cmake/modules/AddOCaml.cmake
@@ -144,9 +144,9 @@ function(add_ocaml_library name) @@ -147,9 +147,9 @@ function(add_ocaml_library name)
endforeach() endforeach()
if( APPLE ) if( APPLE )
@ -171,86 +101,55 @@ index 554046b20edf..4d1ad980641e 100644
endif() endif()
list(APPEND ocaml_flags "-ldopt" "-Wl,-rpath,${ocaml_rpath}") list(APPEND ocaml_flags "-ldopt" "-Wl,-rpath,${ocaml_rpath}")
diff --git a/cmake/modules/AddSphinxTarget.cmake b/cmake/modules/AddSphinxTarget.cmake
index e80c3b5c1cac..482f6d715ef5 100644
--- a/cmake/modules/AddSphinxTarget.cmake
+++ b/cmake/modules/AddSphinxTarget.cmake
@@ -90,7 +90,7 @@ function (add_sphinx_target builder project)
endif()
elseif (builder STREQUAL html)
string(TOUPPER "${project}" project_upper)
- set(${project_upper}_INSTALL_SPHINX_HTML_DIR "share/doc/${project}/html"
+ set(${project_upper}_INSTALL_SPHINX_HTML_DIR "${CMAKE_INSTALL_DOCDIR}/${project}/html"
CACHE STRING "HTML documentation install directory for ${project}")
# '/.' indicates: copy the contents of the directory directly into
diff --git a/cmake/modules/CMakeLists.txt b/cmake/modules/CMakeLists.txt diff --git a/cmake/modules/CMakeLists.txt b/cmake/modules/CMakeLists.txt
index 51b6a4fdc284..4adc2acfc074 100644 index cea0c1df0a14..eedcd9450312 100644
--- a/cmake/modules/CMakeLists.txt --- a/cmake/modules/CMakeLists.txt
+++ b/cmake/modules/CMakeLists.txt +++ b/cmake/modules/CMakeLists.txt
@@ -1,6 +1,6 @@ @@ -2,7 +2,7 @@ include(ExtendPath)
include(LLVMDistributionSupport) include(LLVMDistributionSupport)
include(FindPrefixFromConfig)
-set(LLVM_INSTALL_PACKAGE_DIR lib${LLVM_LIBDIR_SUFFIX}/cmake/llvm) -set(LLVM_INSTALL_PACKAGE_DIR lib${LLVM_LIBDIR_SUFFIX}/cmake/llvm)
+set(LLVM_INSTALL_PACKAGE_DIR ${LLVM_INSTALL_CMAKE_DIR} CACHE STRING "Path for CMake subdirectory (defaults to 'cmake/llvm')") +set(LLVM_INSTALL_PACKAGE_DIR ${LLVM_INSTALL_CMAKE_DIR} CACHE STRING "Path for CMake subdirectory (defaults to 'cmake/llvm')")
set(llvm_cmake_builddir "${LLVM_BINARY_DIR}/${LLVM_INSTALL_PACKAGE_DIR}") set(llvm_cmake_builddir "${LLVM_BINARY_DIR}/${LLVM_INSTALL_PACKAGE_DIR}")
# First for users who use an installed LLVM, create the LLVMExports.cmake file. # First for users who use an installed LLVM, create the LLVMExports.cmake file.
@@ -109,13 +109,13 @@ foreach(p ${_count}) @@ -122,7 +122,7 @@ set(LLVM_CONFIG_INCLUDE_DIRS
set(LLVM_CONFIG_CODE "${LLVM_CONFIG_CODE} )
get_filename_component(LLVM_INSTALL_PREFIX \"\${LLVM_INSTALL_PREFIX}\" PATH)") list(REMOVE_DUPLICATES LLVM_CONFIG_INCLUDE_DIRS)
endforeach(p)
-set(LLVM_CONFIG_INCLUDE_DIRS "\${LLVM_INSTALL_PREFIX}/include")
+set(LLVM_CONFIG_INCLUDE_DIRS "\${LLVM_INSTALL_PREFIX}/${CMAKE_INSTALL_INCLUDEDIR}")
set(LLVM_CONFIG_INCLUDE_DIR "${LLVM_CONFIG_INCLUDE_DIRS}")
set(LLVM_CONFIG_MAIN_INCLUDE_DIR "${LLVM_CONFIG_INCLUDE_DIRS}")
-set(LLVM_CONFIG_LIBRARY_DIRS "\${LLVM_INSTALL_PREFIX}/lib\${LLVM_LIBDIR_SUFFIX}")
+set(LLVM_CONFIG_LIBRARY_DIRS "\${LLVM_INSTALL_PREFIX}/${CMAKE_INSTALL_LIBDIR}\${LLVM_LIBDIR_SUFFIX}")
set(LLVM_CONFIG_CMAKE_DIR "\${LLVM_INSTALL_PREFIX}/${LLVM_INSTALL_PACKAGE_DIR}")
set(LLVM_CONFIG_BINARY_DIR "\${LLVM_INSTALL_PREFIX}")
-set(LLVM_CONFIG_TOOLS_BINARY_DIR "\${LLVM_INSTALL_PREFIX}/bin")
+set(LLVM_CONFIG_TOOLS_BINARY_DIR "\${LLVM_INSTALL_PREFIX}/${CMAKE_INSTALL_BINDIR}")
# Generate a default location for lit -extend_path(LLVM_CONFIG_LIBRARY_DIR "\${LLVM_INSTALL_PREFIX}" "lib\${LLVM_LIBDIR_SUFFIX}")
if (LLVM_INSTALL_UTILS AND LLVM_BUILD_UTILS) +extend_path(LLVM_CONFIG_LIBRARY_DIR "\${LLVM_INSTALL_PREFIX}" "${CMAKE_INSTALL_LIBDIR}\${LLVM_LIBDIR_SUFFIX}")
set(LLVM_CONFIG_LIBRARY_DIRS
"${LLVM_CONFIG_LIBRARY_DIR}"
# FIXME: Should there be other entries here?
diff --git a/cmake/modules/LLVMInstallSymlink.cmake b/cmake/modules/LLVMInstallSymlink.cmake diff --git a/cmake/modules/LLVMInstallSymlink.cmake b/cmake/modules/LLVMInstallSymlink.cmake
index 3e6a2c9a2648..52e14d955c60 100644 index b5c35f706cb7..9261ab797de6 100644
--- a/cmake/modules/LLVMInstallSymlink.cmake --- a/cmake/modules/LLVMInstallSymlink.cmake
+++ b/cmake/modules/LLVMInstallSymlink.cmake +++ b/cmake/modules/LLVMInstallSymlink.cmake
@@ -4,7 +4,7 @@ @@ -6,7 +6,7 @@ include(GNUInstallDirs)
function(install_symlink name target outdir) function(install_symlink name target outdir)
set(DESTDIR $ENV{DESTDIR}) set(DESTDIR $ENV{DESTDIR})
- set(bindir "${DESTDIR}${CMAKE_INSTALL_PREFIX}/${outdir}/") - set(bindir "${DESTDIR}${CMAKE_INSTALL_PREFIX}/${outdir}")
+ set(bindir "${DESTDIR}${outdir}/") + set(bindir "${DESTDIR}${outdir}/")
message(STATUS "Creating ${name}") message(STATUS "Creating ${name}")
diff --git a/docs/CMake.rst b/docs/CMake.rst diff --git a/docs/CMake.rst b/docs/CMake.rst
index f1ac2c7d4934..c6e1469b5e13 100644 index 044ec8a4d39d..504d0eac3ade 100644
--- a/docs/CMake.rst --- a/docs/CMake.rst
+++ b/docs/CMake.rst +++ b/docs/CMake.rst
@@ -202,7 +202,7 @@ CMake manual, or execute ``cmake --help-variable VARIABLE_NAME``. @@ -224,7 +224,7 @@ description is in `LLVM-related variables`_ below.
**LLVM_LIBDIR_SUFFIX**:STRING **LLVM_LIBDIR_SUFFIX**:STRING
Extra suffix to append to the directory where libraries are to be Extra suffix to append to the directory where libraries are to be
installed. On a 64-bit architecture, one could use ``-DLLVM_LIBDIR_SUFFIX=64`` installed. On a 64-bit architecture, one could use ``-DLLVM_LIBDIR_SUFFIX=64``
- to install libraries to ``/usr/lib64``. - to install libraries to ``/usr/lib64``.
+ to install libraries to ``/usr/lib64``. See also ``CMAKE_INSTALL_LIBDIR``. + to install libraries to ``/usr/lib64``. See also ``CMAKE_INSTALL_LIBDIR``.
Rarely-used CMake variables **LLVM_PARALLEL_{COMPILE,LINK}_JOBS**:STRING
--------------------------- Building the llvm toolchain can use a lot of resources, particularly
@@ -551,8 +551,8 @@ LLVM-specific variables @@ -910,9 +910,11 @@ the ``cmake`` command or by setting it directly in ``ccmake`` or ``cmake-gui``).
**LLVM_INSTALL_DOXYGEN_HTML_DIR**:STRING
The path to install Doxygen-generated HTML documentation to. This path can
- either be absolute or relative to the CMAKE_INSTALL_PREFIX. Defaults to
- `share/doc/llvm/doxygen-html`.
+ either be absolute or relative to the ``CMAKE_INSTALL_PREFIX``. Defaults to
+ `${CMAKE_INSTALL_DOCDIR}/${project}/doxygen-html`.
**LLVM_LINK_LLVM_DYLIB**:BOOL
If enabled, tools will be linked with the libLLVM shared library. Defaults
@@ -792,9 +792,11 @@ the ``cmake`` command or by setting it directly in ``ccmake`` or ``cmake-gui``).
This file is available in two different locations. This file is available in two different locations.
@ -265,18 +164,6 @@ index f1ac2c7d4934..c6e1469b5e13 100644
* ``<LLVM_BUILD_ROOT>/lib/cmake/llvm/LLVMConfig.cmake`` where * ``<LLVM_BUILD_ROOT>/lib/cmake/llvm/LLVMConfig.cmake`` where
``<LLVM_BUILD_ROOT>`` is the root of the LLVM build tree. **Note: this is only ``<LLVM_BUILD_ROOT>`` is the root of the LLVM build tree. **Note: this is only
diff --git a/examples/Bye/CMakeLists.txt b/examples/Bye/CMakeLists.txt
index bb96edb4b4bf..678c22fb43c8 100644
--- a/examples/Bye/CMakeLists.txt
+++ b/examples/Bye/CMakeLists.txt
@@ -14,6 +14,6 @@ if (NOT WIN32)
BUILDTREE_ONLY
)
- install(TARGETS ${name} RUNTIME DESTINATION examples)
+ install(TARGETS ${name} RUNTIME DESTINATION ${CMAKE_INSTALL_DOCDIR}/examples)
set_target_properties(${name} PROPERTIES FOLDER "Examples")
endif()
diff --git a/include/llvm/CMakeLists.txt b/include/llvm/CMakeLists.txt diff --git a/include/llvm/CMakeLists.txt b/include/llvm/CMakeLists.txt
index b46319f24fc8..2feabd1954e4 100644 index b46319f24fc8..2feabd1954e4 100644
--- a/include/llvm/CMakeLists.txt --- a/include/llvm/CMakeLists.txt
@ -289,93 +176,45 @@ index b46319f24fc8..2feabd1954e4 100644
+ configure_file(module.modulemap.build ${LLVM_INCLUDE_DIR}/module.modulemap COPYONLY) + configure_file(module.modulemap.build ${LLVM_INCLUDE_DIR}/module.modulemap COPYONLY)
endif (NOT "${CMAKE_SOURCE_DIR}" STREQUAL "${CMAKE_BINARY_DIR}") endif (NOT "${CMAKE_SOURCE_DIR}" STREQUAL "${CMAKE_BINARY_DIR}")
diff --git a/tools/llvm-config/BuildVariables.inc.in b/tools/llvm-config/BuildVariables.inc.in diff --git a/tools/llvm-config/BuildVariables.inc.in b/tools/llvm-config/BuildVariables.inc.in
index ebe5b73a5c65..70c497be12f5 100644 index abbb8a450da6..70c497be12f5 100644
--- a/tools/llvm-config/BuildVariables.inc.in --- a/tools/llvm-config/BuildVariables.inc.in
+++ b/tools/llvm-config/BuildVariables.inc.in +++ b/tools/llvm-config/BuildVariables.inc.in
@@ -23,6 +23,10 @@ @@ -23,7 +23,10 @@
#define LLVM_CXXFLAGS "@LLVM_CXXFLAGS@" #define LLVM_CXXFLAGS "@LLVM_CXXFLAGS@"
#define LLVM_BUILDMODE "@LLVM_BUILDMODE@" #define LLVM_BUILDMODE "@LLVM_BUILDMODE@"
#define LLVM_LIBDIR_SUFFIX "@LLVM_LIBDIR_SUFFIX@" #define LLVM_LIBDIR_SUFFIX "@LLVM_LIBDIR_SUFFIX@"
+#define LLVM_INSTALL_BINDIR "@CMAKE_INSTALL_BINDIR@" +#define LLVM_INSTALL_BINDIR "@CMAKE_INSTALL_BINDIR@"
+#define LLVM_INSTALL_LIBDIR "@CMAKE_INSTALL_LIBDIR@" +#define LLVM_INSTALL_LIBDIR "@CMAKE_INSTALL_LIBDIR@"
+#define LLVM_INSTALL_INCLUDEDIR "@CMAKE_INSTALL_INCLUDEDIR@" #define LLVM_INSTALL_INCLUDEDIR "@CMAKE_INSTALL_INCLUDEDIR@"
+#define LLVM_INSTALL_CMAKEDIR "@LLVM_INSTALL_CMAKE_DIR@" +#define LLVM_INSTALL_CMAKEDIR "@LLVM_INSTALL_CMAKE_DIR@"
#define LLVM_TARGETS_BUILT "@LLVM_TARGETS_BUILT@" #define LLVM_TARGETS_BUILT "@LLVM_TARGETS_BUILT@"
#define LLVM_SYSTEM_LIBS "@LLVM_SYSTEM_LIBS@" #define LLVM_SYSTEM_LIBS "@LLVM_SYSTEM_LIBS@"
#define LLVM_BUILD_SYSTEM "@LLVM_BUILD_SYSTEM@" #define LLVM_BUILD_SYSTEM "@LLVM_BUILD_SYSTEM@"
diff --git a/tools/llvm-config/llvm-config.cpp b/tools/llvm-config/llvm-config.cpp diff --git a/tools/llvm-config/llvm-config.cpp b/tools/llvm-config/llvm-config.cpp
index 1a2f04552d13..44fa7d3eec6b 100644 index 8ed88f33ead4..5e7184bab90d 100644
--- a/tools/llvm-config/llvm-config.cpp --- a/tools/llvm-config/llvm-config.cpp
+++ b/tools/llvm-config/llvm-config.cpp +++ b/tools/llvm-config/llvm-config.cpp
@@ -357,12 +357,26 @@ int main(int argc, char **argv) { @@ -363,12 +363,20 @@ int main(int argc, char **argv) {
("-I" + ActiveIncludeDir + " " + "-I" + ActiveObjRoot + "/include"); ActiveIncludeDir = std::string(Path.str());
} else { }
ActivePrefix = CurrentExecPrefix; {
- ActiveIncludeDir = ActivePrefix + "/include"; - SmallString<256> Path(LLVM_TOOLS_INSTALL_DIR);
- SmallString<256> path(StringRef(LLVM_TOOLS_INSTALL_DIR)); + SmallString<256> Path(LLVM_INSTALL_BINDIR);
- sys::fs::make_absolute(ActivePrefix, path); sys::fs::make_absolute(ActivePrefix, Path);
- ActiveBinDir = std::string(path.str()); ActiveBinDir = std::string(Path.str());
}
- ActiveLibDir = ActivePrefix + "/lib" + LLVM_LIBDIR_SUFFIX; - ActiveLibDir = ActivePrefix + "/lib" + LLVM_LIBDIR_SUFFIX;
- ActiveCMakeDir = ActiveLibDir + "/cmake/llvm"; - ActiveCMakeDir = ActiveLibDir + "/cmake/llvm";
+ { + {
+ SmallString<256> path(StringRef(LLVM_INSTALL_INCLUDEDIR)); + SmallString<256> Path(LLVM_INSTALL_LIBDIR LLVM_LIBDIR_SUFFIX);
+ sys::fs::make_absolute(ActivePrefix, path); + sys::fs::make_absolute(ActivePrefix, Path);
+ ActiveIncludeDir = std::string(path.str()); + ActiveLibDir = std::string(Path.str());
+ } + }
+ { + {
+ SmallString<256> path(StringRef(LLVM_INSTALL_BINDIR)); + SmallString<256> Path(LLVM_INSTALL_CMAKEDIR);
+ sys::fs::make_absolute(ActivePrefix, path); + sys::fs::make_absolute(ActivePrefix, Path);
+ ActiveBinDir = std::string(path.str()); + ActiveCMakeDir = std::string(Path.str());
+ }
+ {
+ SmallString<256> path(StringRef(LLVM_INSTALL_LIBDIR LLVM_LIBDIR_SUFFIX));
+ sys::fs::make_absolute(ActivePrefix, path);
+ ActiveLibDir = std::string(path.str());
+ }
+ {
+ SmallString<256> path(StringRef(LLVM_INSTALL_CMAKEDIR));
+ sys::fs::make_absolute(ActivePrefix, path);
+ ActiveCMakeDir = std::string(path.str());
+ } + }
ActiveIncludeOption = "-I" + ActiveIncludeDir; ActiveIncludeOption = "-I" + ActiveIncludeDir;
} }
diff --git a/tools/lto/CMakeLists.txt b/tools/lto/CMakeLists.txt
index 0af29ad762c5..37b99b83e35c 100644
--- a/tools/lto/CMakeLists.txt
+++ b/tools/lto/CMakeLists.txt
@@ -33,7 +33,7 @@ add_llvm_library(${LTO_LIBRARY_NAME} ${LTO_LIBRARY_TYPE} INSTALL_WITH_TOOLCHAIN
${SOURCES} DEPENDS intrinsics_gen)
install(FILES ${LLVM_MAIN_INCLUDE_DIR}/llvm-c/lto.h
- DESTINATION include/llvm-c
+ DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}/llvm-c
COMPONENT LTO)
if (APPLE)
diff --git a/tools/opt-viewer/CMakeLists.txt b/tools/opt-viewer/CMakeLists.txt
index ead73ec13a8f..250362021f17 100644
--- a/tools/opt-viewer/CMakeLists.txt
+++ b/tools/opt-viewer/CMakeLists.txt
@@ -8,7 +8,7 @@ set (files
foreach (file ${files})
install(PROGRAMS ${file}
- DESTINATION share/opt-viewer
+ DESTINATION ${CMAKE_INSTALL_DATADIR}/opt-viewer
COMPONENT opt-viewer)
endforeach (file)
diff --git a/tools/remarks-shlib/CMakeLists.txt b/tools/remarks-shlib/CMakeLists.txt
index 865436247270..ce1daa62f6ab 100644
--- a/tools/remarks-shlib/CMakeLists.txt
+++ b/tools/remarks-shlib/CMakeLists.txt
@@ -19,7 +19,7 @@ if(LLVM_ENABLE_PIC)
endif()
install(FILES ${LLVM_MAIN_INCLUDE_DIR}/llvm-c/Remarks.h
- DESTINATION include/llvm-c
+ DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}/llvm-c
COMPONENT Remarks)
if (APPLE)

View File

@ -34,14 +34,14 @@ let
in rec { in rec {
llvm-nac3 = pkgs.stdenvNoCC.mkDerivation rec { llvm-nac3 = pkgs.stdenvNoCC.mkDerivation rec {
pname = "llvm-nac3-msys2"; pname = "llvm-nac3-msys2";
version = "13.0.1"; version = "14.0.1";
src-llvm = pkgs.fetchurl { src-llvm = pkgs.fetchurl {
url = "https://github.com/llvm/llvm-project/releases/download/llvmorg-${version}/llvm-${version}.src.tar.xz"; url = "https://github.com/llvm/llvm-project/releases/download/llvmorg-${version}/llvm-${version}.src.tar.xz";
sha256 = "sha256-7GuA2Cw4SsrS3BkpA6bPLNuv+4ibhL+5janXHmMPyDQ="; sha256 = "sha256-W4kBfewnKTEasUNALwPaHeptDHndXHAbyTnPizTwHsI=";
}; };
src-clang = pkgs.fetchurl { src-clang = pkgs.fetchurl {
url = "https://github.com/llvm/llvm-project/releases/download/llvmorg-${version}/clang-${version}.src.tar.xz"; url = "https://github.com/llvm/llvm-project/releases/download/llvmorg-${version}/clang-${version}.src.tar.xz";
sha256 = "sha256-eHqeLZn1yHIKoXc+S+AJRhzTDTvUD90kWR5HNGfJF8k="; sha256 = "sha256-hE+O1cVEOPxTPQoW4KrPdfhLmKWaU9CEhT0tvsL9kqE=";
}; };
buildInputs = [ pkgs.wineWowPackages.stable ]; buildInputs = [ pkgs.wineWowPackages.stable ];
phases = [ "unpackPhase" "patchPhase" "configurePhase" "buildPhase" "installPhase" ]; phases = [ "unpackPhase" "patchPhase" "configurePhase" "buildPhase" "installPhase" ];
@ -49,6 +49,7 @@ in rec {
'' ''
mkdir llvm mkdir llvm
tar xf ${src-llvm} -C llvm --strip-components=1 tar xf ${src-llvm} -C llvm --strip-components=1
mv llvm/Modules/* llvm/cmake/modules # work around https://github.com/llvm/llvm-project/issues/53281
mkdir clang mkdir clang
tar xf ${src-clang} -C clang --strip-components=1 tar xf ${src-clang} -C clang --strip-components=1
cd llvm cd llvm
@ -64,7 +65,7 @@ in rec {
${silenceFontconfig} ${silenceFontconfig}
mkdir build mkdir build
cd build cd build
wine64 cmake .. -DCMAKE_BUILD_TYPE=Release -DLLVM_ENABLE_UNWIND_TABLES=OFF -DLLVM_ENABLE_THREADS=OFF -DLLVM_TARGETS_TO_BUILD=X86\;ARM\;RISCV -DLLVM_LINK_LLVM_DYLIB=OFF -DLLVM_ENABLE_FFI=OFF -DFFI_INCLUDE_DIR=fck-cmake -DFFI_LIBRARY_DIR=fck-cmake -DLLVM_ENABLE_LIBXML2=OFF -DLLVM_ENABLE_PROJECTS=clang -DCMAKE_INSTALL_PREFIX=Z:$out wine64 cmake .. -DCMAKE_BUILD_TYPE=Release -DLLVM_ENABLE_UNWIND_TABLES=OFF -DLLVM_ENABLE_THREADS=OFF -DLLVM_TARGETS_TO_BUILD=X86\;ARM\;RISCV -DLLVM_LINK_LLVM_DYLIB=OFF -DLLVM_ENABLE_FFI=OFF -DFFI_INCLUDE_DIR=fck-cmake -DFFI_LIBRARY_DIR=fck-cmake -DLLVM_ENABLE_LIBXML2=OFF -DLLVM_INCLUDE_BENCHMARKS=OFF -DLLVM_ENABLE_PROJECTS=clang -DCMAKE_INSTALL_PREFIX=Z:$out
''; '';
buildPhase = buildPhase =
'' ''
@ -79,7 +80,12 @@ in rec {
nac3artiq = pkgs.rustPlatform.buildRustPackage { nac3artiq = pkgs.rustPlatform.buildRustPackage {
name = "nac3artiq-msys2"; name = "nac3artiq-msys2";
src = ../../.; src = ../../.;
cargoLock = { lockFile = ../../Cargo.lock; }; cargoLock = {
lockFile = ../../Cargo.lock;
outputHashes = {
"inkwell-0.1.0" = "sha256-THGKoTqQCSusxMukOiksQ9pCnxdIBUO6MH3fiwQjYVA=";
};
};
nativeBuildInputs = [ pkgs.wineWowPackages.stable ]; nativeBuildInputs = [ pkgs.wineWowPackages.stable ];
buildPhase = buildPhase =
'' ''
@ -122,14 +128,19 @@ in rec {
}; };
lld = pkgs.stdenvNoCC.mkDerivation rec { lld = pkgs.stdenvNoCC.mkDerivation rec {
pname = "lld-msys2"; pname = "lld-msys2";
version = "13.0.1"; version = "14.0.1";
src = pkgs.fetchurl { src = pkgs.fetchurl {
url = "https://github.com/llvm/llvm-project/releases/download/llvmorg-${version}/lld-${version}.src.tar.xz"; url = "https://github.com/llvm/llvm-project/releases/download/llvmorg-${version}/lld-${version}.src.tar.xz";
sha256 = "sha256-Zmr3Rei/e2gFM7TRi3ox3HyrV1sebk0mGSK7r9lkTPs="; sha256 = "sha256-MbrFSILSfJ4hfqRFA0BGrAUkLlyOSyxQ8/mAL8ijeBo=";
}; };
buildInputs = [ pkgs.wineWowPackages.stable ]; buildInputs = [ pkgs.wineWowPackages.stable ];
phases = [ "unpackPhase" "patchPhase" "configurePhase" "buildPhase" "installPhase" ]; phases = [ "unpackPhase" "patchPhase" "configurePhase" "buildPhase" "installPhase" ];
patches = [ ./lld-disable-macho.diff ]; patches = [ ./lld-disable-macho.diff ];
setSourceRoot = # work around https://github.com/llvm/llvm-project/issues/53281
''
mv cmake/Modules/* lld-14.0.1.src/cmake/modules
sourceRoot=lld-14.0.1.src
'';
configurePhase = configurePhase =
'' ''
export HOME=`mktemp -d` export HOME=`mktemp -d`

View File

@ -1,7 +1,7 @@
diff '--color=auto' -Naur lld-13.0.1.src/CMakeLists.txt lld-13.0.1.src-new/CMakeLists.txt diff '--color=auto' -Naur lld-14.0.1.src.orig/CMakeLists.txt lld-14.0.1.src/CMakeLists.txt
--- lld-13.0.1.src/CMakeLists.txt 2022-01-21 05:31:59.000000000 +0800 --- lld-14.0.1.src.orig/CMakeLists.txt 2022-04-12 07:44:22.000000000 +0800
+++ lld-13.0.1.src-new/CMakeLists.txt 2022-03-27 18:26:30.284921982 +0800 +++ lld-14.0.1.src/CMakeLists.txt 2022-04-18 18:03:22.530746411 +0800
@@ -206,7 +206,6 @@ @@ -248,7 +248,6 @@
add_subdirectory(docs) add_subdirectory(docs)
add_subdirectory(COFF) add_subdirectory(COFF)
add_subdirectory(ELF) add_subdirectory(ELF)
@ -9,28 +9,26 @@ diff '--color=auto' -Naur lld-13.0.1.src/CMakeLists.txt lld-13.0.1.src-new/CMake
add_subdirectory(MinGW) add_subdirectory(MinGW)
add_subdirectory(wasm) add_subdirectory(wasm)
diff '--color=auto' -Naur lld-13.0.1.src/tools/lld/CMakeLists.txt lld-13.0.1.src-new/tools/lld/CMakeLists.txt diff '--color=auto' -Naur lld-14.0.1.src.orig/tools/lld/CMakeLists.txt lld-14.0.1.src/tools/lld/CMakeLists.txt
--- lld-13.0.1.src/tools/lld/CMakeLists.txt 2022-01-21 05:31:59.000000000 +0800 --- lld-14.0.1.src.orig/tools/lld/CMakeLists.txt 2022-04-12 07:44:22.000000000 +0800
+++ lld-13.0.1.src-new/tools/lld/CMakeLists.txt 2022-03-27 18:26:40.805046295 +0800 +++ lld-14.0.1.src/tools/lld/CMakeLists.txt 2022-04-18 18:03:34.434692221 +0800
@@ -15,7 +15,6 @@ @@ -14,7 +14,6 @@
lldCommon
lldCOFF lldCOFF
lldDriver
lldELF lldELF
- lldMachO2 - lldMachO
lldMinGW lldMinGW
lldWasm lldWasm
) )
diff '--color=auto' -Naur lld-13.0.1.src/tools/lld/lld.cpp lld-13.0.1.src-new/tools/lld/lld.cpp diff '--color=auto' -Naur lld-14.0.1.src.orig/tools/lld/lld.cpp lld-14.0.1.src/tools/lld/lld.cpp
--- lld-13.0.1.src/tools/lld/lld.cpp 2022-01-21 05:31:59.000000000 +0800 --- lld-14.0.1.src.orig/tools/lld/lld.cpp 2022-04-12 07:44:22.000000000 +0800
+++ lld-13.0.1.src-new/tools/lld/lld.cpp 2022-03-27 08:43:54.205524156 +0800 +++ lld-14.0.1.src/tools/lld/lld.cpp 2022-04-18 18:04:08.517537288 +0800
@@ -148,10 +148,6 @@ @@ -151,8 +151,6 @@
return !elf::link(args, exitEarly, stdoutOS, stderrOS); return elf::link;
case WinLink: else if (f == WinLink)
return !coff::link(args, exitEarly, stdoutOS, stderrOS); return coff::link;
- case Darwin: - else if (f == Darwin)
- return !macho::link(args, exitEarly, stdoutOS, stderrOS); - return macho::link;
- case DarwinOld: else if (f == Wasm)
- return !mach_o::link(args, exitEarly, stdoutOS, stderrOS); return lld::wasm::link;
case Wasm: else
return !lld::wasm::link(args, exitEarly, stdoutOS, stderrOS);
default:

View File

@ -47,12 +47,11 @@ pub extern "C" fn __nac3_personality(_state: u32, _exception_object: u32, _conte
unimplemented!(); unimplemented!();
} }
fn main() { fn main() {
let filename = env::args().nth(1).unwrap(); let filename = env::args().nth(1).unwrap();
unsafe { unsafe {
let lib = libloading::Library::new(filename).unwrap(); let lib = libloading::Library::new(filename).unwrap();
let func: libloading::Symbol<unsafe extern fn()> = lib.get(b"__modinit__").unwrap(); let func: libloading::Symbol<unsafe extern "C" fn()> = lib.get(b"__modinit__").unwrap();
func() func()
} }
} }