Error Message Include File Name (#70) and Minor Fix #154

Closed
ychenfo wants to merge 19 commits from ast_filename_location into master
59 changed files with 1256 additions and 600 deletions

169
Cargo.lock generated
View File

@ -4,9 +4,9 @@ version = 3
[[package]]
name = "ahash"
version = "0.7.4"
version = "0.7.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "43bb833f0bf979d8475d38fbf09ed3b8a55e1885fe93ad3f93239fc6a4f17b98"
checksum = "fcb51a0695d8f838b1ee009b3fbf66bda078cd64590202a864a8f3e8c4315c47"
dependencies = [
"getrandom",
"once_cell",
@ -77,9 +77,9 @@ checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610"
[[package]]
name = "cc"
version = "1.0.70"
version = "1.0.72"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d26a6ce4b6a484fa3edb70f7efa6fc430fd2b87285fe8b84304fd0936faa0dc0"
checksum = "22a9137b95ea06864e018375b72adfb7db6e6f68cfc8df5a04d00288050485ee"
[[package]]
name = "cfg-if"
@ -201,12 +201,6 @@ dependencies = [
"winapi",
]
[[package]]
name = "dtoa"
version = "0.4.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "56899898ce76aaf4a0f24d914c97ea6ed976d42fec6ad33fcbb0a1103e07b2b0"
[[package]]
name = "either"
version = "1.6.1"
@ -316,8 +310,9 @@ dependencies = [
[[package]]
name = "inkwell"
version = "0.1.0"
source = "git+https://github.com/TheDan64/inkwell?branch=master#d018ee22e4b5241dec2bc32ca67f3d4caaecee47"
version = "0.1.0-beta.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2223d0eba0ae6d40a3e4680c6a3209143471e1f38b41746ea309aa36dde9f90b"
dependencies = [
"either",
"inkwell_internals",
@ -330,8 +325,9 @@ dependencies = [
[[package]]
name = "inkwell_internals"
version = "0.3.0"
source = "git+https://github.com/TheDan64/inkwell?branch=master#d018ee22e4b5241dec2bc32ca67f3d4caaecee47"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3c7090af3d300424caa81976b8c97bca41cd70e861272c072e188ae082fb49f9"
dependencies = [
"proc-macro2",
"quote",
@ -340,9 +336,9 @@ dependencies = [
[[package]]
name = "insta"
version = "1.8.0"
version = "1.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "15226a375927344c78d39dc6b49e2d5562a5b0705e26a589093c6792e52eed8e"
checksum = "86c4e56d571b4cc829f0ce71506bd865a90369eeab5f3d3657ba96230beb8012"
dependencies = [
"console",
"lazy_static",
@ -355,27 +351,27 @@ dependencies = [
[[package]]
name = "instant"
version = "0.1.10"
version = "0.1.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bee0328b1209d157ef001c94dd85b4f8f64139adb0eac2659f4b08382b2f474d"
checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c"
dependencies = [
"cfg-if",
]
[[package]]
name = "itertools"
version = "0.10.1"
version = "0.10.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "69ddb889f9d0d08a67338271fa9b62996bc788c7796a5c18cf057420aaed5eaf"
checksum = "a9a9d19fa1e79b6215ff29b9d6880b706147f16e9b1dbb1e4e5947b5b02bc5e3"
dependencies = [
"either",
]
[[package]]
name = "itoa"
version = "0.4.8"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b71991ff56294aa922b450139ee08b3bfc70982c6b2c7562771375cf73542dd4"
checksum = "1aab8fc367588b89dcee83ab0fd66b72b50b72fa1904d7095045ace2b0c81c35"
[[package]]
name = "lalrpop"
@ -417,15 +413,15 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
[[package]]
name = "libc"
version = "0.2.102"
version = "0.2.112"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a2a5ac8f984bfcf3a823267e5fde638acc3325f6496633a5da6bb6eb2171e103"
checksum = "1b03d17f364a3a042d5e5d46b053bbbf82c92c9430c592dd4c064dc6ee997125"
[[package]]
name = "libloading"
version = "0.7.1"
version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c0cf036d15402bea3c5d4de17b3fce76b3e4a56ebc1f577be0e7a72f7c607cf0"
checksum = "afe203d669ec979b7128619bae5a63b7b42e9203c1b29146079ee05e2f604b52"
dependencies = [
"cfg-if",
"winapi",
@ -439,9 +435,9 @@ checksum = "7fb9b38af92608140b86b693604b9ffcc5824240a484d1ecd4795bacb2fe88f3"
[[package]]
name = "llvm-sys"
version = "120.2.1"
version = "130.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b4a810627ac62b396f5fd2214ba9bbd8748d4d6efdc4d2c1c1303ea7a75763ce"
checksum = "183612ff1acd400cd4faeb1cbf7cc725a868a46282e5c7b112ec5f0a5a49fae7"
dependencies = [
"cc",
"lazy_static",
@ -476,9 +472,9 @@ checksum = "308cc39be01b73d0d18f82a0e7b2a3df85245f84af96fdddc5d202d27e47b86a"
[[package]]
name = "memoffset"
version = "0.6.4"
version = "0.6.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "59accc507f1338036a0477ef61afdae33cde60840f4dfe481319ce3ad116ddf9"
checksum = "5aa361d4faea93603064a027415f07bd8e1d5c88c9fbf68bf56a285428fd79ce"
dependencies = [
"autocfg",
]
@ -501,7 +497,6 @@ version = "0.1.0"
dependencies = [
"fxhash",
"lazy_static",
"num-bigint 0.4.2",
"parking_lot",
"string-interner",
]
@ -516,8 +511,6 @@ dependencies = [
"insta",
"itertools",
"nac3parser",
"num-bigint 0.3.3",
"num-traits",
"parking_lot",
"rayon",
"test-case",
@ -533,8 +526,6 @@ dependencies = [
"lalrpop-util",
"log",
"nac3ast",
"num-bigint 0.4.2",
"num-traits",
"phf",
"unic-emoji-char",
"unic-ucd-ident",
@ -557,52 +548,11 @@ version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e4a24736216ec316047a1fc4252e27dabb04218aa4a3f37c6e7ddbf1f9782b54"
[[package]]
name = "num-bigint"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5f6f7833f2cbf2360a6cfd58cd41a53aa7a90bd4c202f5b1c7dd2ed73c57b2c3"
dependencies = [
"autocfg",
"num-integer",
"num-traits",
]
[[package]]
name = "num-bigint"
version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "74e768dff5fb39a41b3bcd30bb25cf989706c90d028d1ad71971987aa309d535"
dependencies = [
"autocfg",
"num-integer",
"num-traits",
]
[[package]]
name = "num-integer"
version = "0.1.44"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d2cc698a63b549a70bc047073d2949cce27cd1c7b0a4a862d08a8031bc2801db"
dependencies = [
"autocfg",
"num-traits",
]
[[package]]
name = "num-traits"
version = "0.2.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9a64b1ec5cda2586e284722486d802acf1f7dbdc623e2bfc57e65ca1cd099290"
dependencies = [
"autocfg",
]
[[package]]
name = "num_cpus"
version = "1.13.0"
version = "1.13.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "05499f3756671c15885fee9034446956fff3f243d6077b91e5767df161f766b3"
checksum = "19e64526ebdee182341572e50e9ad03965aa510cd94427a4549448f285e957a1"
dependencies = [
"hermit-abi",
"libc",
@ -610,9 +560,9 @@ dependencies = [
[[package]]
name = "once_cell"
version = "1.8.0"
version = "1.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "692fcb63b64b1758029e0a96ee63e049ce8c5948587f2f7208df04625e5f6b56"
checksum = "da32515d9f6e6e489d7bc9d84c71b060db7247dc035bbe44eac88cf87486d8d5"
[[package]]
name = "parking_lot"
@ -738,9 +688,9 @@ checksum = "db8bcd96cb740d03149cbad5518db9fd87126a10ab519c011893b1754134c468"
[[package]]
name = "ppv-lite86"
version = "0.2.10"
version = "0.2.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ac74c624d6b2d21f425f752262f42188365d7b8ff1aff74c82e45136510a4857"
checksum = "ed0cfbc8191465bed66e1718596ee0b0b35d5ee1f41c5df2189d0fe8bde535ba"
[[package]]
name = "precomputed-hash"
@ -756,9 +706,9 @@ checksum = "dbf0c48bc1d91375ae5c3cd81e3722dff1abcf81a30960240640d223f59fe0e5"
[[package]]
name = "proc-macro2"
version = "1.0.29"
version = "1.0.34"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b9f5105d4fdaab20335ca9565e106a5d9b82b6219b5ba735731124ac6711d23d"
checksum = "2f84e92c0f7c9d58328b85a78557813e4bd845130db68d7184635344399423b1"
dependencies = [
"unicode-xid",
]
@ -813,9 +763,9 @@ dependencies = [
[[package]]
name = "quote"
version = "1.0.9"
version = "1.0.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c3d0b9745dc2debf507c8422de05d7226cc1f0644216dfdfead988f9b1ab32a7"
checksum = "38bc8cc6a5f2e3655e0899c1b848643b2562f853f114bfec7be120678e3ace05"
dependencies = [
"proc-macro2",
]
@ -939,15 +889,15 @@ dependencies = [
[[package]]
name = "rustversion"
version = "1.0.5"
version = "1.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "61b3909d758bb75c79f23d4736fac9433868679d3ad2ea7a61e3c25cfda9a088"
checksum = "f2cc38e8fa666e2de3c4aba7edeb5ffc5246c1c2ed0e3d17e560aeeba736b23f"
[[package]]
name = "ryu"
version = "1.0.5"
version = "1.0.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "71d301d4193d031abdd79ff7e3dd721168a9572ef3fe51a1517aba235bd8f86e"
checksum = "73b4b750c782965c211b42f022f59af1fbceabdd026623714f104152f1ec149f"
[[package]]
name = "scopeguard"
@ -975,18 +925,18 @@ dependencies = [
[[package]]
name = "serde"
version = "1.0.130"
version = "1.0.132"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f12d06de37cf59146fbdecab66aa99f9fe4f78722e3607577a5375d66bd0c913"
checksum = "8b9875c23cf305cd1fd7eb77234cbb705f21ea6a72c637a5c6db5fe4b8e7f008"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
version = "1.0.130"
version = "1.0.132"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d7bc1a1ab1961464eae040d96713baa5a724a8152c1222492465b54322ec508b"
checksum = "ecc0db5cb2556c0e558887d9bbdcf6ac4471e83ff66cf696e5419024d1606276"
dependencies = [
"proc-macro2",
"quote",
@ -995,9 +945,9 @@ dependencies = [
[[package]]
name = "serde_json"
version = "1.0.68"
version = "1.0.73"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0f690853975602e1bfe1ccbf50504d67174e3bcf340f23b5ea9992e0587a52d8"
checksum = "bcbd0344bc6533bc7ec56df11d42fb70f1b912351c0825ccb7211b59d8af7cf5"
dependencies = [
"itoa",
"ryu",
@ -1006,21 +956,21 @@ dependencies = [
[[package]]
name = "serde_yaml"
version = "0.8.21"
version = "0.8.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d8c608a35705a5d3cdc9fbe403147647ff34b921f8e833e49306df898f9b20af"
checksum = "a4a521f2940385c165a24ee286aa8599633d162077a54bdcae2a6fd5a7bfa7a0"
dependencies = [
"dtoa",
"indexmap",
"ryu",
"serde",
"yaml-rust",
]
[[package]]
name = "similar"
version = "1.3.0"
version = "2.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1ad1d488a557b235fc46dae55512ffbfc429d2482b08b4d9435ab07384ca8aec"
checksum = "2e24979f63a11545f5f2c60141afe249d4f19f84581ea2138065e400941d83d3"
[[package]]
name = "siphasher"
@ -1030,9 +980,9 @@ checksum = "533494a8f9b724d33625ab53c6c4800f7cc445895924a8ef649222dcb76e938b"
[[package]]
name = "smallvec"
version = "1.6.1"
version = "1.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fe0f37c9e8f3c5a4a66ad655a93c74daac4ad00c441533bf5c6e7990bb42604e"
checksum = "1ecab6c735a6bb4139c0caafd0cc3635748bbb3acf4550e8138122099251f309"
[[package]]
name = "string-interner"
@ -1047,21 +997,22 @@ dependencies = [
[[package]]
name = "string_cache"
version = "0.8.1"
version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8ddb1139b5353f96e429e1a5e19fbaf663bddedaa06d1dbd49f82e352601209a"
checksum = "923f0f39b6267d37d23ce71ae7235602134b250ace715dd2c90421998ddac0c6"
dependencies = [
"lazy_static",
"new_debug_unreachable",
"parking_lot",
"phf_shared 0.8.0",
"precomputed-hash",
]
[[package]]
name = "syn"
version = "1.0.76"
version = "1.0.83"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c6f107db402c2c2055242dbf4d2af0e69197202e9faacbef9571bbe47f5a1b84"
checksum = "23a1dfb999630e338648c83e91c59a4e9fb7620f520c3194b6b89e276f2f1959"
dependencies = [
"proc-macro2",
"quote",
@ -1105,9 +1056,9 @@ dependencies = [
[[package]]
name = "test-case"
version = "1.2.0"
version = "1.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3b114ece25254e97bf48dd4bfc2a12bad0647adacfe4cae1247a9ca6ad302cec"
checksum = "c7cad0a06f9a61e94355aa3b3dc92d85ab9c83406722b1ca5e918d4297c12c23"
dependencies = [
"cfg-if",
"proc-macro2",

View File

@ -10,4 +10,3 @@ members = [
[profile.release]
debug = true

View File

@ -2,11 +2,11 @@
"nodes": {
"nixpkgs": {
"locked": {
"lastModified": 1638887115,
"narHash": "sha256-emjtIeqyJ84Eb3X7APJruTrwcfnHQKs55XGljj62prs=",
"lastModified": 1640264129,
"narHash": "sha256-gfTFmZhnHE9G+Tr0fdMmo6p/FHi5QpkO9oPPYSu6JO0=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "1bd4bbd49bef217a3d1adea43498270d6e779d65",
"rev": "1158f3463912d54cc981d61213839ec6c02570d3",
"type": "github"
},
"original": {

View File

@ -16,7 +16,6 @@
})
];
};
cargoSha256 = "sha256-otKLhr58HYMjVXAof6AdObNpggPnvK6qOl7I+4LWIP8=";
msys2-python-tar = pkgs.fetchurl {
url = "https://mirror.msys2.org/mingw/mingw64/mingw-w64-x86_64-python-3.9.7-4-any.pkg.tar.zst";
sha256 = "0iwlgbk4b457yn9djwqswid55xhyyi35qymz1lfh42xwdpxdm47c";
@ -54,7 +53,7 @@
pkgs.rustPlatform.buildRustPackage {
name = "nac3artiq";
src = self;
inherit cargoSha256;
cargoLock = { lockFile = ./Cargo.lock; };
nativeBuildInputs = [ pkgs.python3 llvm-nac3 ];
buildInputs = [ pkgs.python3 llvm-nac3 ];
cargoBuildFlags = [ "--package" "nac3artiq" ];
@ -67,15 +66,76 @@
'';
}
);
# LLVM PGO support
llvm-nac3-instrumented = pkgs.callPackage "${self}/llvm" {
stdenv = pkgs.llvmPackages_13.stdenv;
extraCmakeFlags = [ "-DLLVM_BUILD_INSTRUMENTED=IR" ];
};
nac3artiq-instrumented = pkgs.python3Packages.toPythonModule (
pkgs.rustPlatform.buildRustPackage {
name = "nac3artiq-instrumented";
src = self;
cargoLock = { lockFile = ./Cargo.lock; };
nativeBuildInputs = [ pkgs.python3 llvm-nac3-instrumented ];
buildInputs = [ pkgs.python3 llvm-nac3-instrumented ];
cargoBuildFlags = [ "--package" "nac3artiq" "--features" "init-llvm-profile" ];
doCheck = false;
configurePhase =
''
export CARGO_TARGET_X86_64_UNKNOWN_LINUX_GNU_RUSTFLAGS="-C link-arg=-L${pkgs.llvmPackages_13.compiler-rt}/lib/linux -C link-arg=-lclang_rt.profile-x86_64"
'';
installPhase =
''
TARGET_DIR=$out/${pkgs.python3Packages.python.sitePackages}
mkdir -p $TARGET_DIR
cp target/x86_64-unknown-linux-gnu/release/libnac3artiq.so $TARGET_DIR/nac3artiq.so
'';
}
);
nac3artiq-profile = pkgs.stdenvNoCC.mkDerivation {
name = "nac3artiq-profile";
src = self;
buildInputs = [ (pkgs.python3.withPackages(ps: [ ps.numpy nac3artiq-instrumented ])) pkgs.lld_13 pkgs.llvmPackages_13.libllvm ];
phases = [ "buildPhase" "installPhase" ];
# TODO: get more representative code.
buildPhase = "python $src/nac3artiq/demo/demo.py";
installPhase =
''
mkdir $out
llvm-profdata merge -o $out/llvm.profdata /build/llvm/build/profiles/*
'';
};
llvm-nac3-pgo = pkgs.callPackage "${self}/llvm" {
stdenv = pkgs.llvmPackages_13.stdenv;
extraCmakeFlags = [ "-DLLVM_PROFDATA_FILE=${nac3artiq-profile}/llvm.profdata" ];
};
nac3artiq-pgo = pkgs.python3Packages.toPythonModule (
pkgs.rustPlatform.buildRustPackage {
name = "nac3artiq-pgo";
src = self;
cargoLock = { lockFile = ./Cargo.lock; };
nativeBuildInputs = [ pkgs.python3 llvm-nac3-pgo ];
buildInputs = [ pkgs.python3 llvm-nac3-pgo ];
cargoBuildFlags = [ "--package" "nac3artiq" ];
cargoTestFlags = [ "--package" "nac3ast" "--package" "nac3parser" "--package" "nac3core" "--package" "nac3artiq" ];
installPhase =
''
TARGET_DIR=$out/${pkgs.python3Packages.python.sitePackages}
mkdir -p $TARGET_DIR
cp target/x86_64-unknown-linux-gnu/release/libnac3artiq.so $TARGET_DIR/nac3artiq.so
'';
}
);
};
packages.x86_64-w64-mingw32 = rec {
llvm-nac3 = pkgs-mingw.callPackage "${self}/llvm" { inherit (pkgs) llvmPackages_12; };
llvm-nac3 = pkgs-mingw.callPackage "${self}/llvm" { inherit (pkgs) llvmPackages_13; };
nac3artiq = pkgs-mingw.python3Packages.toPythonModule (
pkgs-mingw.rustPlatform.buildRustPackage {
name = "nac3artiq";
src = self;
inherit cargoSha256;
cargoLock = { lockFile = ./Cargo.lock; };
nativeBuildInputs = [ pkgs.zip ];
buildInputs = [ pkgs-mingw.zlib ];
configurePhase =
@ -113,8 +173,8 @@
name = "nac3-dev-shell";
buildInputs = with pkgs; [
packages.x86_64-linux.llvm-nac3
clang_12
lld_12
clang_13
lld_13
cargo
cargo-insta
rustc

View File

@ -7,17 +7,18 @@
, libbfd
, ncurses
, zlib
, llvmPackages_12
, which
, llvmPackages_13
, debugVersion ? false
, enableManpages ? false
, enableSharedLibraries ? false
, enablePolly ? false
, extraCmakeFlags ? []
}:
let
inherit (lib) optional optionals optionalString;
release_version = "12.0.1";
release_version = "13.0.0";
candidate = ""; # empty or "rcN"
dash-candidate = lib.optionalString (candidate != "") "-${candidate}";
version = "${release_version}${dash-candidate}"; # differentiating these (variables) is important for RCs
@ -34,16 +35,12 @@ in stdenv.mkDerivation (rec {
pname = "llvm";
inherit version;
src = fetch pname "1pzx9zrmd7r3481sbhwvkms68fwhffpp4mmz45dgrkjpyl2q96kx";
polly_src = fetch "polly" "1yfm9ixda4a2sx7ak5vswijx4ydk5lv1c1xh39xmd2kh299y4m12";
src = fetch pname "sha256-QI0RcIZD6oJvUZ/3l2H838EtZBolECKe7EWecvgWMCA=";
unpackPhase = ''
unpackFile $src
mv llvm-${release_version}* llvm
sourceRoot=$PWD/llvm
'' + optionalString enablePolly ''
unpackFile $polly_src
mv polly-* $sourceRoot/tools/polly
'';
outputs = [ "out" "lib" "dev" "python" ];
@ -56,17 +53,18 @@ in stdenv.mkDerivation (rec {
propagatedBuildInputs = optionals (stdenv.buildPlatform == stdenv.hostPlatform) [ ncurses ]
++ [ zlib ];
checkInputs = [ which ];
patches = [
./gnu-install-dirs.patch
# On older CPUs (e.g. Hydra/wendy) we'd be getting an error in this test.
# Fix random compiler crashes: https://bugs.llvm.org/show_bug.cgi?id=50611
(fetchpatch {
name = "uops-CMOV16rm-noreg.diff";
url = "https://github.com/llvm/llvm-project/commit/9e9f991ac033.diff";
sha256 = "sha256:12s8vr6ibri8b48h2z38f3afhwam10arfiqfy4yg37bmc054p5hi";
url = "https://raw.githubusercontent.com/archlinux/svntogit-packages/4764a4f8c920912a2bfd8b0eea57273acfe0d8a8/trunk/no-strict-aliasing-DwarfCompileUnit.patch";
sha256 = "18l6mrvm2vmwm77ckcnbjvh6ybvn72rhrb799d4qzwac4x2ifl7g";
stripLen = 1;
})
./llvm-future-riscv-abi.diff
] ++ lib.optional enablePolly ./gnu-install-dirs-polly.patch;
];
postPatch = optionalString stdenv.isDarwin ''
substituteInPlace cmake/modules/AddLLVM.cmake \
@ -85,8 +83,7 @@ in stdenv.mkDerivation (rec {
substituteInPlace unittests/IR/CMakeLists.txt \
--replace "PassBuilderCallbacksTest.cpp" ""
rm unittests/IR/PassBuilderCallbacksTest.cpp
# TODO: Fix failing tests:
rm test/DebugInfo/X86/vla-multi.ll
rm test/tools/llvm-objcopy/ELF/mirror-permissions-unix.test
'' + optionalString stdenv.hostPlatform.isMusl ''
patch -p1 -i ${../../TLI-musl.patch}
substituteInPlace unittests/Support/CMakeLists.txt \
@ -139,7 +136,7 @@ in stdenv.mkDerivation (rec {
"-DCAN_TARGET_i386=false"
] ++ optionals (stdenv.hostPlatform != stdenv.buildPlatform) [
"-DCMAKE_CROSSCOMPILING=True"
"-DLLVM_TABLEGEN=${llvmPackages_12.tools.llvm}/bin/llvm-tblgen"
"-DLLVM_TABLEGEN=${llvmPackages_13.tools.llvm}/bin/llvm-tblgen"
(
let
nativeCC = pkgsBuildBuild.targetPackages.stdenv.cc;
@ -153,7 +150,7 @@ in stdenv.mkDerivation (rec {
];
in "-DCROSS_TOOLCHAIN_FLAGS_NATIVE:list=${lib.concatStringsSep ";" nativeToolchainFlags}"
)
];
] ++ extraCmakeFlags;
postBuild = ''
rm -fR $out

View File

@ -1,105 +0,0 @@
diff --git a/tools/polly/CMakeLists.txt b/tools/polly/CMakeLists.txt
index ca7c04c565bb..6ed5db5dd4f8 100644
--- a/tools/polly/CMakeLists.txt
+++ b/tools/polly/CMakeLists.txt
@@ -2,7 +2,11 @@
if (NOT DEFINED LLVM_MAIN_SRC_DIR)
project(Polly)
cmake_minimum_required(VERSION 3.13.4)
+endif()
+
+include(GNUInstallDirs)
+if (NOT DEFINED LLVM_MAIN_SRC_DIR)
# Where is LLVM installed?
find_package(LLVM CONFIG REQUIRED)
set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} ${LLVM_CMAKE_DIR})
@@ -122,13 +126,13 @@ include_directories(
if (NOT LLVM_INSTALL_TOOLCHAIN_ONLY)
install(DIRECTORY include/
- DESTINATION include
+ DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}
FILES_MATCHING
PATTERN "*.h"
)
install(DIRECTORY ${POLLY_BINARY_DIR}/include/
- DESTINATION include
+ DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}
FILES_MATCHING
PATTERN "*.h"
PATTERN "CMakeFiles" EXCLUDE
diff --git a/tools/polly/cmake/CMakeLists.txt b/tools/polly/cmake/CMakeLists.txt
index 7cc129ba2e90..137be25e4b80 100644
--- a/tools/polly/cmake/CMakeLists.txt
+++ b/tools/polly/cmake/CMakeLists.txt
@@ -79,18 +79,18 @@ file(GENERATE
# Generate PollyConfig.cmake for the install tree.
unset(POLLY_EXPORTS)
-set(POLLY_INSTALL_PREFIX "${CMAKE_INSTALL_PREFIX}")
+set(POLLY_INSTALL_PREFIX "")
set(POLLY_CONFIG_LLVM_CMAKE_DIR "${LLVM_BINARY_DIR}/${LLVM_INSTALL_PACKAGE_DIR}")
-set(POLLY_CONFIG_CMAKE_DIR "${POLLY_INSTALL_PREFIX}/${POLLY_INSTALL_PACKAGE_DIR}")
-set(POLLY_CONFIG_LIBRARY_DIRS "${POLLY_INSTALL_PREFIX}/lib${LLVM_LIBDIR_SUFFIX}")
+set(POLLY_CONFIG_CMAKE_DIR "${POLLY_INSTALL_PREFIX}${CMAKE_INSTALL_PREFIX}/${POLLY_INSTALL_PACKAGE_DIR}")
+set(POLLY_CONFIG_LIBRARY_DIRS "${POLLY_INSTALL_PREFIX}${CMAKE_INSTALL_FULL_LIBDIR}${LLVM_LIBDIR_SUFFIX}")
if (POLLY_BUNDLED_ISL)
set(POLLY_CONFIG_INCLUDE_DIRS
- "${POLLY_INSTALL_PREFIX}/include"
- "${POLLY_INSTALL_PREFIX}/include/polly"
+ "${POLLY_INSTALL_PREFIX}${CMAKE_INSTALL_FULL_LIBDIR}"
+ "${POLLY_INSTALL_PREFIX}${CMAKE_INSTALL_FULL_LIBDIR}/polly"
)
else()
set(POLLY_CONFIG_INCLUDE_DIRS
- "${POLLY_INSTALL_PREFIX}/include"
+ "${POLLY_INSTALL_PREFIX}${CMAKE_INSTALL_FULL_INCLUDEDIR}"
${ISL_INCLUDE_DIRS}
)
endif()
@@ -100,12 +100,12 @@ endif()
foreach(tgt IN LISTS POLLY_CONFIG_EXPORTED_TARGETS)
get_target_property(tgt_type ${tgt} TYPE)
if (tgt_type STREQUAL "EXECUTABLE")
- set(tgt_prefix "bin/")
+ set(tgt_prefix "${CMAKE_INSTALL_BINDIR}/")
else()
- set(tgt_prefix "lib/")
+ set(tgt_prefix "${CMAKE_INSTALL_LIBDIR}/")
endif()
- set(tgt_path "${CMAKE_INSTALL_PREFIX}/${tgt_prefix}$<TARGET_FILE_NAME:${tgt}>")
+ set(tgt_path "${tgt_prefix}$<TARGET_FILE_NAME:${tgt}>")
file(RELATIVE_PATH tgt_path ${POLLY_CONFIG_CMAKE_DIR} ${tgt_path})
if (NOT tgt_type STREQUAL "INTERFACE_LIBRARY")
diff --git a/tools/polly/cmake/polly_macros.cmake b/tools/polly/cmake/polly_macros.cmake
index 518a09b45a42..bd9d6f5542ad 100644
--- a/tools/polly/cmake/polly_macros.cmake
+++ b/tools/polly/cmake/polly_macros.cmake
@@ -44,8 +44,8 @@ macro(add_polly_library name)
if (NOT LLVM_INSTALL_TOOLCHAIN_ONLY OR ${name} STREQUAL "LLVMPolly")
install(TARGETS ${name}
EXPORT LLVMExports
- LIBRARY DESTINATION lib${LLVM_LIBDIR_SUFFIX}
- ARCHIVE DESTINATION lib${LLVM_LIBDIR_SUFFIX})
+ LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR}${LLVM_LIBDIR_SUFFIX}
+ ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR}${LLVM_LIBDIR_SUFFIX})
endif()
set_property(GLOBAL APPEND PROPERTY LLVM_EXPORTS ${name})
endmacro(add_polly_library)
diff --git a/tools/polly/lib/External/CMakeLists.txt b/tools/polly/lib/External/CMakeLists.txt
index 8991094d92c7..178d8ad606bb 100644
--- a/tools/polly/lib/External/CMakeLists.txt
+++ b/tools/polly/lib/External/CMakeLists.txt
@@ -275,7 +275,7 @@ if (POLLY_BUNDLED_ISL)
install(DIRECTORY
${ISL_SOURCE_DIR}/include/
${ISL_BINARY_DIR}/include/
- DESTINATION include/polly
+ DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}/polly
FILES_MATCHING
PATTERN "*.h"
PATTERN "CMakeFiles" EXCLUDE

View File

@ -1,8 +1,8 @@
diff --git a/CMakeLists.txt b/CMakeLists.txt
index 277d0fe54d7b..af69c8be8745 100644
index 135036f509d2..265c36f8211b 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -256,15 +256,21 @@ if (CMAKE_BUILD_TYPE AND
@@ -270,15 +270,21 @@ if (CMAKE_BUILD_TYPE AND
message(FATAL_ERROR "Invalid value for CMAKE_BUILD_TYPE: ${CMAKE_BUILD_TYPE}")
endif()
@ -25,7 +25,7 @@ index 277d0fe54d7b..af69c8be8745 100644
# They are used as destination of target generators.
set(LLVM_RUNTIME_OUTPUT_INTDIR ${CMAKE_CURRENT_BINARY_DIR}/${CMAKE_CFG_INTDIR}/bin)
set(LLVM_LIBRARY_OUTPUT_INTDIR ${CMAKE_CURRENT_BINARY_DIR}/${CMAKE_CFG_INTDIR}/lib${LLVM_LIBDIR_SUFFIX})
@@ -567,9 +573,9 @@ option (LLVM_ENABLE_SPHINX "Use Sphinx to generate llvm documentation." OFF)
@@ -581,9 +587,9 @@ option (LLVM_ENABLE_SPHINX "Use Sphinx to generate llvm documentation." OFF)
option (LLVM_ENABLE_OCAMLDOC "Build OCaml bindings documentation." ON)
option (LLVM_ENABLE_BINDINGS "Build bindings." ON)
@ -37,7 +37,7 @@ index 277d0fe54d7b..af69c8be8745 100644
CACHE STRING "OCamldoc-generated HTML documentation install directory")
option (LLVM_BUILD_EXTERNAL_COMPILER_RT
@@ -1027,7 +1033,7 @@ endif()
@@ -1048,7 +1054,7 @@ endif()
if (NOT LLVM_INSTALL_TOOLCHAIN_ONLY)
install(DIRECTORY include/llvm include/llvm-c
@ -46,7 +46,7 @@ index 277d0fe54d7b..af69c8be8745 100644
COMPONENT llvm-headers
FILES_MATCHING
PATTERN "*.def"
@@ -1038,7 +1044,7 @@ if (NOT LLVM_INSTALL_TOOLCHAIN_ONLY)
@@ -1059,7 +1065,7 @@ if (NOT LLVM_INSTALL_TOOLCHAIN_ONLY)
)
install(DIRECTORY ${LLVM_INCLUDE_DIR}/llvm ${LLVM_INCLUDE_DIR}/llvm-c
@ -55,7 +55,7 @@ index 277d0fe54d7b..af69c8be8745 100644
COMPONENT llvm-headers
FILES_MATCHING
PATTERN "*.def"
@@ -1052,13 +1058,13 @@ if (NOT LLVM_INSTALL_TOOLCHAIN_ONLY)
@@ -1073,13 +1079,13 @@ if (NOT LLVM_INSTALL_TOOLCHAIN_ONLY)
if (LLVM_INSTALL_MODULEMAPS)
install(DIRECTORY include/llvm include/llvm-c
@ -72,11 +72,11 @@ index 277d0fe54d7b..af69c8be8745 100644
RENAME "module.extern.modulemap"
)
diff --git a/cmake/modules/AddLLVM.cmake b/cmake/modules/AddLLVM.cmake
index 97c9980c7de3..409e8b615f75 100644
index 9c2b85374307..5531ceeb2eeb 100644
--- a/cmake/modules/AddLLVM.cmake
+++ b/cmake/modules/AddLLVM.cmake
@@ -804,9 +804,9 @@ macro(add_llvm_library name)
@@ -818,9 +818,9 @@ macro(add_llvm_library name)
get_target_export_arg(${name} LLVM export_to_llvmexports ${umbrella})
install(TARGETS ${name}
${export_to_llvmexports}
- LIBRARY DESTINATION lib${LLVM_LIBDIR_SUFFIX} COMPONENT ${name}
@ -88,7 +88,7 @@ index 97c9980c7de3..409e8b615f75 100644
if (NOT LLVM_ENABLE_IDE)
add_llvm_install_targets(install-${name}
@@ -1022,7 +1022,7 @@ function(process_llvm_pass_plugins)
@@ -1036,7 +1036,7 @@ function(process_llvm_pass_plugins)
"set(LLVM_STATIC_EXTENSIONS ${LLVM_STATIC_EXTENSIONS})")
install(FILES
${llvm_cmake_builddir}/LLVMConfigExtensions.cmake
@ -97,7 +97,7 @@ index 97c9980c7de3..409e8b615f75 100644
COMPONENT cmake-exports)
set(ExtensionDef "${LLVM_BINARY_DIR}/include/llvm/Support/Extension.def")
@@ -1242,7 +1242,7 @@ macro(add_llvm_example name)
@@ -1250,7 +1250,7 @@ macro(add_llvm_example name)
endif()
add_llvm_executable(${name} ${ARGN})
if( LLVM_BUILD_EXAMPLES )
@ -106,7 +106,7 @@ index 97c9980c7de3..409e8b615f75 100644
endif()
set_target_properties(${name} PROPERTIES FOLDER "Examples")
endmacro(add_llvm_example name)
@@ -1854,7 +1854,7 @@ function(llvm_install_library_symlink name dest type)
@@ -1868,7 +1868,7 @@ function(llvm_install_library_symlink name dest type)
set(full_name ${CMAKE_${type}_LIBRARY_PREFIX}${name}${CMAKE_${type}_LIBRARY_SUFFIX})
set(full_dest ${CMAKE_${type}_LIBRARY_PREFIX}${dest}${CMAKE_${type}_LIBRARY_SUFFIX})
@ -115,8 +115,8 @@ index 97c9980c7de3..409e8b615f75 100644
if(WIN32 AND "${type}" STREQUAL "SHARED")
set(output_dir bin)
endif()
@@ -1871,7 +1871,7 @@ function(llvm_install_library_symlink name dest type)
endif()
@@ -1879,7 +1879,7 @@ function(llvm_install_library_symlink name dest type)
endfunction()
-function(llvm_install_symlink name dest)
@ -124,7 +124,7 @@ index 97c9980c7de3..409e8b615f75 100644
cmake_parse_arguments(ARG "ALWAYS_GENERATE" "COMPONENT" "" ${ARGN})
foreach(path ${CMAKE_MODULE_PATH})
if(EXISTS ${path}/LLVMInstallSymlink.cmake)
@@ -1894,7 +1894,7 @@ function(llvm_install_symlink name dest)
@@ -1902,7 +1902,7 @@ function(llvm_install_symlink name dest)
set(full_dest ${dest}${CMAKE_EXECUTABLE_SUFFIX})
install(SCRIPT ${INSTALL_SYMLINK}
@ -133,7 +133,7 @@ index 97c9980c7de3..409e8b615f75 100644
COMPONENT ${component})
if (NOT LLVM_ENABLE_IDE AND NOT ARG_ALWAYS_GENERATE)
@@ -1977,7 +1977,8 @@ function(add_llvm_tool_symlink link_name target)
@@ -1985,7 +1985,8 @@ function(add_llvm_tool_symlink link_name target)
endif()
if ((TOOL_IS_TOOLCHAIN OR NOT LLVM_INSTALL_TOOLCHAIN_ONLY) AND LLVM_BUILD_TOOLS)
@ -143,12 +143,12 @@ index 97c9980c7de3..409e8b615f75 100644
endif()
endif()
endfunction()
@@ -2100,9 +2101,9 @@ function(llvm_setup_rpath name)
if (APPLE)
set(_install_name_dir INSTALL_NAME_DIR "@rpath")
- set(_install_rpath "@loader_path/../lib${LLVM_LIBDIR_SUFFIX}" ${extra_libdir})
+ set(_install_rpath "@loader_path/../${CMAKE_INSTALL_LIBDIR}${LLVM_LIBDIR_SUFFIX}" ${extra_libdir})
@@ -2114,9 +2115,9 @@ function(llvm_setup_rpath name)
# Since BUILD_SHARED_LIBS is only recommended for use by developers,
# hardcode the rpath to build/install lib dir first in this mode.
# FIXME: update this when there is better solution.
- set(_install_rpath "${LLVM_LIBRARY_OUTPUT_INTDIR}" "${CMAKE_INSTALL_PREFIX}/lib${LLVM_LIBDIR_SUFFIX}" ${extra_libdir})
+ set(_install_rpath "${LLVM_LIBRARY_OUTPUT_INTDIR}" "${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_LIBDIR}${LLVM_LIBDIR_SUFFIX}" ${extra_libdir})
elseif(UNIX)
- set(_install_rpath "\$ORIGIN/../lib${LLVM_LIBDIR_SUFFIX}" ${extra_libdir})
+ set(_install_rpath "\$ORIGIN/../${CMAKE_INSTALL_LIBDIR}${LLVM_LIBDIR_SUFFIX}" ${extra_libdir})
@ -185,16 +185,18 @@ index e80c3b5c1cac..482f6d715ef5 100644
# '/.' indicates: copy the contents of the directory directly into
diff --git a/cmake/modules/CMakeLists.txt b/cmake/modules/CMakeLists.txt
index 505dc9a29d70..36e6c63af3f4 100644
index 51b6a4fdc284..4adc2acfc074 100644
--- a/cmake/modules/CMakeLists.txt
+++ b/cmake/modules/CMakeLists.txt
@@ -1,4 +1,4 @@
@@ -1,6 +1,6 @@
include(LLVMDistributionSupport)
-set(LLVM_INSTALL_PACKAGE_DIR lib${LLVM_LIBDIR_SUFFIX}/cmake/llvm)
+set(LLVM_INSTALL_PACKAGE_DIR ${LLVM_INSTALL_CMAKE_DIR} CACHE STRING "Path for CMake subdirectory (defaults to 'cmake/llvm')")
set(llvm_cmake_builddir "${LLVM_BINARY_DIR}/${LLVM_INSTALL_PACKAGE_DIR}")
# First for users who use an installed LLVM, create the LLVMExports.cmake file.
@@ -107,13 +107,13 @@ foreach(p ${_count})
@@ -109,13 +109,13 @@ foreach(p ${_count})
set(LLVM_CONFIG_CODE "${LLVM_CONFIG_CODE}
get_filename_component(LLVM_INSTALL_PREFIX \"\${LLVM_INSTALL_PREFIX}\" PATH)")
endforeach(p)
@ -212,32 +214,32 @@ index 505dc9a29d70..36e6c63af3f4 100644
# Generate a default location for lit
if (LLVM_INSTALL_UTILS AND LLVM_BUILD_UTILS)
diff --git a/cmake/modules/LLVMInstallSymlink.cmake b/cmake/modules/LLVMInstallSymlink.cmake
index 09fed8085c23..aa79f192abf0 100644
index 3e6a2c9a2648..52e14d955c60 100644
--- a/cmake/modules/LLVMInstallSymlink.cmake
+++ b/cmake/modules/LLVMInstallSymlink.cmake
@@ -10,7 +10,7 @@ function(install_symlink name target outdir)
set(LINK_OR_COPY copy)
endif()
@@ -4,7 +4,7 @@
function(install_symlink name target outdir)
set(DESTDIR $ENV{DESTDIR})
- set(bindir "${DESTDIR}${CMAKE_INSTALL_PREFIX}/${outdir}/")
+ set(bindir "${DESTDIR}${outdir}/")
message(STATUS "Creating ${name}")
diff --git a/docs/CMake.rst b/docs/CMake.rst
index bb821b417ad9..6a528f7c2ad3 100644
index f1ac2c7d4934..c6e1469b5e13 100644
--- a/docs/CMake.rst
+++ b/docs/CMake.rst
@@ -196,7 +196,7 @@ CMake manual, or execute ``cmake --help-variable VARIABLE_NAME``.
@@ -202,7 +202,7 @@ CMake manual, or execute ``cmake --help-variable VARIABLE_NAME``.
**LLVM_LIBDIR_SUFFIX**:STRING
Extra suffix to append to the directory where libraries are to be
installed. On a 64-bit architecture, one could use ``-DLLVM_LIBDIR_SUFFIX=64``
- to install libraries to ``/usr/lib64``.
+ to install libraries to ``/usr/lib64``. See also ``CMAKE_INSTALL_LIBDIR``.
**CMAKE_C_FLAGS**:STRING
Extra flags to use when compiling C source files.
@@ -550,8 +550,8 @@ LLVM-specific variables
Rarely-used CMake variables
---------------------------
@@ -551,8 +551,8 @@ LLVM-specific variables
**LLVM_INSTALL_DOXYGEN_HTML_DIR**:STRING
The path to install Doxygen-generated HTML documentation to. This path can
@ -246,47 +248,9 @@ index bb821b417ad9..6a528f7c2ad3 100644
+ either be absolute or relative to the ``CMAKE_INSTALL_PREFIX``. Defaults to
+ `${CMAKE_INSTALL_DOCDIR}/${project}/doxygen-html`.
**LLVM_ENABLE_SPHINX**:BOOL
If specified, CMake will search for the ``sphinx-build`` executable and will make
@@ -582,13 +582,33 @@ LLVM-specific variables
**LLVM_INSTALL_SPHINX_HTML_DIR**:STRING
The path to install Sphinx-generated HTML documentation to. This path can
- either be absolute or relative to the CMAKE_INSTALL_PREFIX. Defaults to
- `share/doc/llvm/html`.
+ either be absolute or relative to the ``CMAKE_INSTALL_PREFIX``. Defaults to
+ `${CMAKE_INSTALL_DOCDIR}/${project}/html`.
**LLVM_INSTALL_OCAMLDOC_HTML_DIR**:STRING
The path to install OCamldoc-generated HTML documentation to. This path can
- either be absolute or relative to the CMAKE_INSTALL_PREFIX. Defaults to
- `share/doc/llvm/ocaml-html`.
+ either be absolute or relative to the ``CMAKE_INSTALL_PREFIX``. Defaults to
+ `${CMAKE_INSTALL_DOCDIR}/${project}/ocaml-html`.
+
+**CMAKE_INSTALL_BINDIR**:STRING
+ The path to install binary tools, relative to the ``CMAKE_INSTALL_PREFIX``.
+ Defaults to `bin`.
+
+**CMAKE_INSTALL_LIBDIR**:STRING
+ The path to install libraries, relative to the ``CMAKE_INSTALL_PREFIX``.
+ Defaults to `lib`.
+
+**CMAKE_INSTALL_INCLUDEDIR**:STRING
+ The path to install header files, relative to the ``CMAKE_INSTALL_PREFIX``.
+ Defaults to `include`.
+
+**CMAKE_INSTALL_DOCDIR**:STRING
+ The path to install documentation, relative to the ``CMAKE_INSTALL_PREFIX``.
+ Defaults to `share/doc`.
+
+**CMAKE_INSTALL_MANDIR**:STRING
+ The path to install manpage files, relative to the ``CMAKE_INSTALL_PREFIX``.
+ Defaults to `share/man`.
**LLVM_CREATE_XCODE_TOOLCHAIN**:BOOL
macOS Only: If enabled CMake will generate a target named
@@ -786,9 +806,11 @@ the ``cmake`` command or by setting it directly in ``ccmake`` or ``cmake-gui``).
**LLVM_LINK_LLVM_DYLIB**:BOOL
If enabled, tools will be linked with the libLLVM shared library. Defaults
@@ -792,9 +792,11 @@ the ``cmake`` command or by setting it directly in ``ccmake`` or ``cmake-gui``).
This file is available in two different locations.
@ -377,11 +341,11 @@ index 1a2f04552d13..44fa7d3eec6b 100644
}
diff --git a/tools/lto/CMakeLists.txt b/tools/lto/CMakeLists.txt
index 2963f97cad88..69d66c9c9ca1 100644
index 0af29ad762c5..37b99b83e35c 100644
--- a/tools/lto/CMakeLists.txt
+++ b/tools/lto/CMakeLists.txt
@@ -25,7 +25,7 @@ add_llvm_library(LTO SHARED INSTALL_WITH_TOOLCHAIN ${SOURCES} DEPENDS
intrinsics_gen)
@@ -33,7 +33,7 @@ add_llvm_library(${LTO_LIBRARY_NAME} ${LTO_LIBRARY_TYPE} INSTALL_WITH_TOOLCHAIN
${SOURCES} DEPENDS intrinsics_gen)
install(FILES ${LLVM_MAIN_INCLUDE_DIR}/llvm-c/lto.h
- DESTINATION include/llvm-c

View File

@ -16,7 +16,9 @@ nac3parser = { path = "../nac3parser" }
nac3core = { path = "../nac3core" }
[dependencies.inkwell]
git = "https://github.com/TheDan64/inkwell"
branch = "master"
version = "0.1.0-beta.4"
default-features = false
features = ["llvm12-0", "target-x86", "target-arm", "target-riscv", "no-libffi-linking"]
features = ["llvm13-0", "target-x86", "target-arm", "target-riscv", "no-libffi-linking"]
[features]
init-llvm-profile = []

View File

@ -57,7 +57,7 @@ impl<'b> CodeGenerator for ArtiqCodeGenerator<'b> {
});
let max = ctx
.builder
.build_call(smax, &[old_end, now], "smax")
.build_call(smax, &[old_end.into(), now.into()], "smax")
.try_as_basic_value()
.left()
.unwrap();
@ -165,7 +165,7 @@ impl<'b> CodeGenerator for ArtiqCodeGenerator<'b> {
});
let max = ctx
.builder
.build_call(smax, &[end_val, outer_end_val], "smax")
.build_call(smax, &[end_val.into(), outer_end_val.into()], "smax")
.try_as_basic_value()
.left()
.unwrap();

View File

@ -91,16 +91,18 @@ impl Nac3 {
) -> PyResult<()> {
let (module_name, source_file) = Python::with_gil(|py| -> PyResult<(String, String)> {
let module: &PyAny = module.extract(py)?;
let os_mod = PyModule::import(py, "os")?;
let path_fun = os_mod.getattr("path")?.getattr("abspath")?;
Ok((
module.getattr("__name__")?.extract()?,
module.getattr("__file__")?.extract()?,
path_fun.call1((module.getattr("__file__")?,))?.extract()?
))
})?;
let source = fs::read_to_string(source_file).map_err(|e| {
let source = fs::read_to_string(&source_file).map_err(|e| {
exceptions::PyIOError::new_err(format!("failed to read input file: {}", e))
})?;
let parser_result = parser::parse_program(&source)
let parser_result = parser::parse_program(&source, source_file.into())
.map_err(|e| exceptions::PySyntaxError::new_err(format!("parse error: {}", e)))?;
for mut stmt in parser_result.into_iter() {
@ -437,7 +439,7 @@ impl Nac3 {
let (name, def_id, ty) = composer
.register_top_level(stmt.clone(), Some(resolver.clone()), path.clone())
.unwrap();
.map_err(|e| exceptions::PyRuntimeError::new_err(format!("nac3 compilation failure: {}", e)))?;
let id = *name_to_pyid.get(&name).unwrap();
id_to_def.insert(id, def_id);
if let Some(ty) = ty {
@ -476,7 +478,7 @@ impl Nac3 {
arg_names.join(", ")
)
};
let mut synthesized = parse_program(&synthesized).unwrap();
let mut synthesized = parse_program(&synthesized, Default::default()).unwrap();
let resolver = Arc::new(Resolver(Arc::new(InnerResolver {
id_to_type: self.builtins_ty.clone().into(),
id_to_def: self.builtins_def.clone().into(),
@ -515,7 +517,9 @@ impl Nac3 {
);
let signature = store.add_cty(signature);
composer.start_analysis(true).unwrap();
composer.start_analysis(true).map_err(|e| exceptions::PyRuntimeError::new_err(format!(
"nac3 compilation failure: {}", e
)))?;
let top_level = Arc::new(composer.make_top_level_context());
let instance = {
let defs = top_level.definitions.read();
@ -683,8 +687,18 @@ impl Nac3 {
}
}
#[cfg(feature = "init-llvm-profile")]
extern "C" {
fn __llvm_profile_initialize();
}
#[pymodule]
fn nac3artiq(_py: Python, m: &PyModule) -> PyResult<()> {
#[cfg(feature = "init-llvm-profile")]
unsafe {
__llvm_profile_initialize();
}
Target::initialize_all(&InitializationConfig::default());
m.add_class::<Nac3>()?;
Ok(())

View File

@ -332,7 +332,7 @@ impl TimeFns for ExternTimeFns {
.get_function("at_mu")
.unwrap_or_else(|| ctx.module.add_function("at_mu", ctx.ctx.void_type().fn_type(&[ctx.ctx.i64_type().into()], false), None));
ctx.builder
.build_call(at_mu, &[t], "at_mu");
.build_call(at_mu, &[t.into()], "at_mu");
}
fn emit_delay_mu<'ctx, 'a>(&self, ctx: &mut CodeGenContext<'ctx, 'a>, dt: BasicValueEnum<'ctx>) {
@ -341,7 +341,7 @@ impl TimeFns for ExternTimeFns {
.get_function("delay_mu")
.unwrap_or_else(|| ctx.module.add_function("delay_mu", ctx.ctx.void_type().fn_type(&[ctx.ctx.i64_type().into()], false), None));
ctx.builder
.build_call(delay_mu, &[dt], "delay_mu");
.build_call(delay_mu, &[dt.into()], "delay_mu");
}
}

View File

@ -10,7 +10,6 @@ constant-optimization = ["fold"]
fold = []
[dependencies]
num-bigint = "0.4.0"
lazy_static = "1.4.0"
parking_lot = "0.11.1"
string-interner = "0.13.0"

View File

@ -1,12 +1,10 @@
use num_bigint::BigInt;
#[derive(Clone, Debug, PartialEq)]
pub enum Constant {
None,
Bool(bool),
Str(String),
Bytes(Vec<u8>),
Int(BigInt),
Int(Option<i64>),
Tuple(Vec<Constant>),
Float(f64),
Complex { real: f64, imag: f64 },
@ -28,9 +26,14 @@ impl From<bool> for Constant {
Self::Bool(b)
}
}
impl From<BigInt> for Constant {
fn from(i: BigInt) -> Constant {
Self::Int(i)
impl From<i32> for Constant {
fn from(i: i32) -> Constant {
Self::Int(Some(i as i64))
}
}
impl From<i64> for Constant {
fn from(i: i64) -> Constant {
Self::Int(Some(i))
}
}
@ -124,7 +127,7 @@ mod tests {
use crate::fold::Fold;
use crate::*;
let location = Location::new(0, 0);
let location = Location::new(0, 0, Default::default());
let custom = ();
let ast = Located {
location,
@ -136,7 +139,7 @@ mod tests {
location,
custom,
node: ExprKind::Constant {
value: BigInt::from(1).into(),
value: 1.into(),
kind: None,
},
},
@ -144,7 +147,7 @@ mod tests {
location,
custom,
node: ExprKind::Constant {
value: BigInt::from(2).into(),
value: 2.into(),
kind: None,
},
},
@ -158,7 +161,7 @@ mod tests {
location,
custom,
node: ExprKind::Constant {
value: BigInt::from(3).into(),
value: 3.into(),
kind: None,
},
},
@ -166,7 +169,7 @@ mod tests {
location,
custom,
node: ExprKind::Constant {
value: BigInt::from(4).into(),
value: 4.into(),
kind: None,
},
},
@ -174,7 +177,7 @@ mod tests {
location,
custom,
node: ExprKind::Constant {
value: BigInt::from(5).into(),
value: 5.into(),
kind: None,
},
},
@ -194,12 +197,12 @@ mod tests {
custom,
node: ExprKind::Constant {
value: Constant::Tuple(vec![
BigInt::from(1).into(),
BigInt::from(2).into(),
1.into(),
2.into(),
Constant::Tuple(vec![
BigInt::from(3).into(),
BigInt::from(4).into(),
BigInt::from(5).into(),
3.into(),
4.into(),
5.into(),
])
]),
kind: None

View File

@ -9,6 +9,6 @@ mod impls;
mod location;
pub use ast_gen::*;
pub use location::Location;
pub use location::{Location, FileName};
pub type Suite<U = ()> = Vec<Stmt<U>>;

View File

@ -1,17 +1,32 @@
//! Datatypes to support source location information.
use crate::ast_gen::StrRef;
use std::fmt;
#[derive(Clone, Copy, Debug, PartialEq)]
pub struct FileName(StrRef);
impl Default for FileName {
fn default() -> Self {
FileName("unknown".into())
}
}
impl From<String> for FileName {
fn from(s: String) -> Self {
FileName(s.into())
}
}
/// A location somewhere in the sourcecode.
#[derive(Clone, Copy, Debug, Default, PartialEq)]
pub struct Location {
row: usize,
column: usize,
file: FileName
}
impl fmt::Display for Location {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "line {} column {}", self.row, self.column)
write!(f, "{}:{}:{}", self.file.0, self.row, self.column)
}
}
@ -47,8 +62,8 @@ impl Location {
}
impl Location {
pub fn new(row: usize, column: usize) -> Self {
Location { row, column }
pub fn new(row: usize, column: usize, file: FileName) -> Self {
Location { row, column, file }
}
pub fn row(&self) -> usize {

View File

@ -5,8 +5,6 @@ authors = ["M-Labs"]
edition = "2018"
[dependencies]
num-bigint = "0.3"
num-traits = "0.2"
itertools = "0.10.1"
crossbeam = "0.8.1"
parking_lot = "0.11.1"
@ -14,10 +12,9 @@ rayon = "1.5.1"
nac3parser = { path = "../nac3parser" }
[dependencies.inkwell]
git = "https://github.com/TheDan64/inkwell"
branch = "master"
version = "0.1.0-beta.4"
default-features = false
features = ["llvm12-0", "target-x86", "target-arm", "target-riscv", "no-libffi-linking"]
features = ["llvm13-0", "target-x86", "target-arm", "target-riscv", "no-libffi-linking"]
[dev-dependencies]
test-case = "1.2.0"

View File

@ -115,7 +115,7 @@ impl<'ctx, 'a> CodeGenContext<'ctx, 'a> {
let ty = self.ctx.bool_type();
ty.const_int(if *v { 1 } else { 0 }, false).into()
}
Constant::Int(v) => {
Constant::Int(Some(val)) => {
let ty = if self.unifier.unioned(ty, self.primitives.int32) {
self.ctx.i32_type()
} else if self.unifier.unioned(ty, self.primitives.int64) {
@ -123,8 +123,7 @@ impl<'ctx, 'a> CodeGenContext<'ctx, 'a> {
} else {
unreachable!();
};
let val: i64 = v.try_into().unwrap();
ty.const_int(val as u64, false).into()
ty.const_int(*val as u64, false).into()
}
Constant::Float(v) => {
assert!(self.unifier.unioned(ty, self.primitives.float));
@ -411,7 +410,7 @@ pub fn gen_call<'ctx, 'a, G: CodeGenerator + ?Sized>(
format!("{}:{}", id, old_key)
};
param_vals =
real_params.into_iter().map(|p| p.to_basic_value_enum(ctx)).collect_vec();
real_params.into_iter().map(|p| p.to_basic_value_enum(ctx).into()).collect_vec();
instance_to_symbol.get(&key).cloned()
}
TopLevelDef::Class { .. } => {
@ -427,7 +426,7 @@ pub fn gen_call<'ctx, 'a, G: CodeGenerator + ?Sized>(
if let Some(obj) = &obj {
args.insert(0, FuncArg { name: "self".into(), ty: obj.0, default_value: None });
}
let params = args.iter().map(|arg| ctx.get_llvm_type(arg.ty)).collect_vec();
let params = args.iter().map(|arg| ctx.get_llvm_type(arg.ty).into()).collect_vec();
let fun_ty = if ctx.unifier.unioned(fun.0.ret, ctx.primitives.none) {
ctx.ctx.void_type().fn_type(&params, false)
} else {

View File

@ -351,7 +351,7 @@ pub fn gen_func<'ctx, G: CodeGenerator + ?Sized>(
let params = args
.iter()
.map(|arg| {
get_llvm_type(context, &mut unifier, top_level_ctx.as_ref(), &mut type_cache, arg.ty)
get_llvm_type(context, &mut unifier, top_level_ctx.as_ref(), &mut type_cache, arg.ty).into()
})
.collect_vec();

View File

@ -74,7 +74,7 @@ fn test_primitives() {
d = a if c == 1 else 0
return d
"};
let statements = parse_program(source).unwrap();
let statements = parse_program(source, Default::default()).unwrap();
let composer: TopLevelComposer = Default::default();
let mut unifier = composer.unifier.clone();
@ -193,12 +193,12 @@ fn test_simple_call() {
a = foo(a)
return a * 2
"};
let statements_1 = parse_program(source_1).unwrap();
let statements_1 = parse_program(source_1, Default::default()).unwrap();
let source_2 = indoc! { "
return a + 1
"};
let statements_2 = parse_program(source_2).unwrap();
let statements_2 = parse_program(source_2, Default::default()).unwrap();
let composer: TopLevelComposer = Default::default();
let mut unifier = composer.unifier.clone();

View File

@ -298,10 +298,10 @@ pub fn parse_type_annotation<T>(
if let Name { id, .. } = &value.node {
subscript_name_handle(id, slice, unifier)
} else {
Err("unsupported type expression".into())
Err(format!("unsupported type expression at {}", expr.location))
}
}
_ => Err("unsupported type expression".into()),
_ => Err(format!("unsupported type expression at {}", expr.location)),
}
}

View File

@ -214,7 +214,7 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
});
let val = ctx
.builder
.build_call(round_intrinsic, &[arg], "round")
.build_call(round_intrinsic, &[arg.into()], "round")
.try_as_basic_value()
.left()
.unwrap();
@ -251,7 +251,7 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
});
let val = ctx
.builder
.build_call(round_intrinsic, &[arg], "round")
.build_call(round_intrinsic, &[arg.into()], "round")
.try_as_basic_value()
.left()
.unwrap();
@ -431,7 +431,7 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
});
let val = ctx
.builder
.build_call(floor_intrinsic, &[arg], "floor")
.build_call(floor_intrinsic, &[arg.into()], "floor")
.try_as_basic_value()
.left()
.unwrap();
@ -468,7 +468,7 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
});
let val = ctx
.builder
.build_call(floor_intrinsic, &[arg], "floor")
.build_call(floor_intrinsic, &[arg.into()], "floor")
.try_as_basic_value()
.left()
.unwrap();
@ -505,7 +505,7 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
});
let val = ctx
.builder
.build_call(ceil_intrinsic, &[arg], "ceil")
.build_call(ceil_intrinsic, &[arg.into()], "ceil")
.try_as_basic_value()
.left()
.unwrap();
@ -542,7 +542,7 @@ pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
});
let val = ctx
.builder
.build_call(ceil_intrinsic, &[arg], "ceil")
.build_call(ceil_intrinsic, &[arg.into()], "ceil")
.try_as_basic_value()
.left()
.unwrap();

View File

@ -162,14 +162,22 @@ impl TopLevelComposer {
match &ast.node {
ast::StmtKind::ClassDef { name: class_name, body, .. } => {
if self.keyword_list.contains(class_name) {
return Err("cannot use keyword as a class name".into());
return Err(format!(
"cannot use keyword `{}` as a class name (at {})",
class_name,
ast.location
));
}
if !defined_names.insert({
let mut n = mod_path.clone();
n.push_str(&class_name.to_string());
n
}) {
return Err("duplicate definition of class".into());
return Err(format!(
"duplicate definition of class `{}` (at {})",
class_name,
ast.location
));
}
let class_name = *class_name;
@ -212,7 +220,11 @@ impl TopLevelComposer {
contains_constructor = true;
}
if self.keyword_list.contains(method_name) {
return Err("cannot use keyword as a method name".into());
return Err(format!(
"cannot use keyword `{}` as a method name (at {})",
method_name,
b.location
));
}
let global_class_method_name = {
let mut n = mod_path.clone();
@ -226,7 +238,11 @@ impl TopLevelComposer {
n
};
if !defined_names.insert(global_class_method_name.clone()) {
return Err("duplicate class method definition".into());
return Err(format!(
"class method `{}` defined twice (at {})",
&global_class_method_name[mod_path.len()..],
b.location
));
}
let method_def_id = self.definition_ast_list.len() + {
// plus 1 here since we already have the class def
@ -283,12 +299,16 @@ impl TopLevelComposer {
// return Err("cannot use keyword as a top level function name".into());
// }
let global_fun_name = {
let mut n = mod_path;
let mut n = mod_path.clone();
n.push_str(&name.to_string());
n
};
if !defined_names.insert(global_fun_name.clone()) {
return Err("duplicate top level function define".into());
return Err(format!(
"top level function `{}` defined twice (at {})",
&global_fun_name[mod_path.len()..],
ast.location
));
}
let fun_name = *name;
@ -314,7 +334,10 @@ impl TopLevelComposer {
))
}
_ => Err("only registrations of top level classes/functions are supported".into()),
_ => Err(format!(
"registrations of constructs other than top level classes/functions are not supported (at {})",
ast.location
)),
}
}
@ -376,7 +399,10 @@ impl TopLevelComposer {
if !is_generic {
is_generic = true;
} else {
return Err("Only single Generic[...] can be in bases".into());
return Err(format!(
"only single Generic[...] is allowed (at {})",
b.location
));
}
let type_var_list: Vec<&ast::Expr<()>>;
@ -414,7 +440,10 @@ impl TopLevelComposer {
})
};
if !all_unique_type_var {
return Err("expect unique type variables".into());
return Err(format!(
"duplicate type variable occurs (at {})",
slice.location
));
}
// add to TopLevelDef
@ -479,9 +508,11 @@ impl TopLevelComposer {
}
if has_base {
return Err("a class def can only have at most one base class \
declaration and one generic declaration"
.into());
return Err(format!(
"a class definition can only have at most one base class \
declaration and one generic declaration (at {})",
b.location
));
}
has_base = true;
@ -499,7 +530,10 @@ impl TopLevelComposer {
if let TypeAnnotation::CustomClass { .. } = &base_ty {
class_ancestors.push(base_ty);
} else {
return Err("class base declaration can only be custom class".into());
return Err(format!(
"class base declaration can only be custom class (at {})",
b.location,
));
}
}
}
@ -668,14 +702,14 @@ impl TopLevelComposer {
let arg_types = {
// make sure no duplicate parameter
let mut defined_paramter_name: HashSet<_> = HashSet::new();
let have_unique_fuction_parameter_name = args.args.iter().all(|x| {
defined_paramter_name.insert(x.node.arg)
&& !keyword_list.contains(&x.node.arg)
});
if !have_unique_fuction_parameter_name {
return Err("top level function must have unique parameter names \
and names thould not be the same as the keywords"
.into());
for x in args.args.iter() {
if !defined_paramter_name.insert(x.node.arg) || keyword_list.contains(&x.node.arg) {
return Err(format!(
"top level function must have unique parameter names \
and names should not be the same as the keywords (at {})",
x.location
));
}
}
let arg_with_default: Vec<(&ast::Located<ast::ArgData<()>>, Option<&ast::Expr>)> = args
@ -754,7 +788,7 @@ impl TopLevelComposer {
&type_annotation,
primitives_store,
unifier
).map_err(|err| format!("{} at {}", err, x.location))?;
).map_err(|err| format!("{} (at {})", err, x.location))?;
v
})
}
@ -825,7 +859,7 @@ impl TopLevelComposer {
));
unifier
.unify(*dummy_ty, function_ty)
.map_err(|old| format!("{} at {}", old, function_ast.location))?;
.map_err(|old| format!("{} (at {})", old, function_ast.location))?;
} else {
unreachable!("must be both function");
}
@ -902,20 +936,22 @@ impl TopLevelComposer {
// check method parameters cannot have same name
let mut defined_paramter_name: HashSet<_> = HashSet::new();
let zelf: StrRef = "self".into();
let have_unique_fuction_parameter_name = args.args.iter().all(|x| {
defined_paramter_name.insert(x.node.arg)
&& (!keyword_list.contains(&x.node.arg) || x.node.arg == zelf)
});
if !have_unique_fuction_parameter_name {
return Err("class method must have unique parameter names \
and names thould not be the same as the keywords"
.into());
for x in args.args.iter() {
if !defined_paramter_name.insert(x.node.arg)
|| (keyword_list.contains(&x.node.arg) && x.node.arg != zelf) {
return Err(format!(
"top level function must have unique parameter names \
and names should not be the same as the keywords (at {})",
x.location
))
}
}
if name == &"__init__".into() && !defined_paramter_name.contains(&zelf) {
return Err("__init__ function must have a `self` parameter".into());
return Err(format!("__init__ method must have a `self` parameter (at {})", b.location));
}
if !defined_paramter_name.contains(&zelf) {
return Err("currently does not support static method".into());
return Err(format!("class method must have a `self` parameter (at {})", b.location));
}
let mut result = Vec::new();
@ -981,12 +1017,12 @@ impl TopLevelComposer {
None => None,
Some(default) => {
if name == "self".into() {
return Err(format!("`self` parameter cannot take default value at {}", x.location));
return Err(format!("`self` parameter cannot take default value (at {})", x.location));
}
Some({
let v = Self::parse_parameter_default_value(default, class_resolver)?;
Self::check_default_param_type(&v, &type_ann, primitives, unifier)
.map_err(|err| format!("{} at {}", err, x.location))?;
.map_err(|err| format!("{} (at {})", err, x.location))?;
v
})
}
@ -1090,7 +1126,7 @@ impl TopLevelComposer {
};
class_fields_def.push((*attr, dummy_field_type, mutable));
let annotation = parse_ast_to_type_annotation_kinds(
let parsed_annotation = parse_ast_to_type_annotation_kinds(
class_resolver,
temp_def_list,
unifier,
@ -1100,30 +1136,42 @@ impl TopLevelComposer {
)?;
// find type vars within this return type annotation
let type_vars_within =
get_type_var_contained_in_type_annotation(&annotation);
get_type_var_contained_in_type_annotation(&parsed_annotation);
// handle the class type var and the method type var
for type_var_within in type_vars_within {
if let TypeAnnotation::TypeVar(t) = type_var_within {
if !class_type_vars_def.contains(&t) {
return Err("class fields can only use type \
vars declared as class generic type vars"
.into());
return Err(format!(
"class fields can only use type \
vars declared as class generic type vars (at {})",
annotation.location
));
}
} else {
unreachable!("must be type var annotation");
}
}
type_var_to_concrete_def.insert(dummy_field_type, annotation);
type_var_to_concrete_def.insert(dummy_field_type, parsed_annotation);
} else {
return Err("same class fields defined twice".into());
return Err(format!(
"same class fields `{}` defined twice (at {})",
attr,
target.location
));
}
} else {
return Err("unsupported statement type in class definition body".into());
return Err(format!(
"unsupported statement type in class definition body (at {})",
target.location
));
}
}
ast::StmtKind::Pass { .. } => {}
ast::StmtKind::Expr { value: _, .. } => {} // typically a docstring; ignoring all expressions matches CPython behavior
_ => return Err("unsupported statement type in class definition body".into()),
_ => return Err(format!(
"unsupported statement in class definition body (at {})",
b.location
)),
}
}
Ok(())
@ -1186,7 +1234,10 @@ impl TopLevelComposer {
type_var_to_concrete_def,
);
if !ok {
return Err("method has same name as ancestors' method, but incompatible type".into());
return Err(format!(
"method {} has same name as ancestors' method, but incompatible type",
class_method_name
));
}
// mark it as added
is_override.insert(*class_method_name);
@ -1294,7 +1345,7 @@ impl TopLevelComposer {
));
self.unifier
.unify(constructor.unwrap(), contor_type)
.map_err(|old| format!("{} at {}", old, ast.as_ref().unwrap().location))?;
.map_err(|old| format!("{} (at {})", old, ast.as_ref().unwrap().location))?;
// class field instantiation check
if let (Some(init_id), false) = (init_id, fields.is_empty()) {
@ -1308,9 +1359,10 @@ impl TopLevelComposer {
for (f, _, _) in fields {
if !all_inited.contains(f) {
return Err(format!(
"fields `{}` of class `{}` not fully initialized",
"fields `{}` of class `{}` not fully initialized in the initializer (at {})",
f,
class_name
class_name,
body[0].location,
));
}
}
@ -1510,7 +1562,7 @@ impl TopLevelComposer {
&mut |id| format!("tvar{}", id),
);
return Err(format!(
"expected return type of `{}` in function `{}` at {}",
"expected return type of `{}` in function `{}` (at {})",
ret_str,
name,
ast.as_ref().unwrap().location

View File

@ -412,13 +412,23 @@ pub fn parse_parameter_default_value(default: &ast::Expr, resolver: &(dyn Symbol
fn handle_constant(val: &Constant, loc: &Location) -> Result<SymbolValue, String> {
match val {
Constant::Int(v) => {
if let Ok(v) = v.try_into() {
Ok(SymbolValue::I32(v))
} else {
Err(format!(
"integer value out of range at {}",
loc
))
match v {
Some(v) => {
if let Ok(v) = (*v).try_into() {
Ok(SymbolValue::I32(v))
} else {
Err(format!(
"integer value out of range at {}",
loc
))
}
},
None => {
Err(format!(
"integer value out of range at {}",
loc
))
}
}
}
Constant::Float(v) => Ok(SymbolValue::Double(*v)),
@ -439,8 +449,8 @@ pub fn parse_parameter_default_value(default: &ast::Expr, resolver: &(dyn Symbol
} => {
if args.len() == 1 {
match &args[0].node {
ast::ExprKind::Constant { value: Constant::Int(v), .. } =>
Ok(SymbolValue::I64(v.try_into().unwrap())),
ast::ExprKind::Constant { value: Constant::Int(Some(v)), .. } =>
Ok(SymbolValue::I64(*v)),
_ => Err(format!("only allow constant integer here at {}", default.location))
}
} else {

View File

@ -103,7 +103,7 @@ fn test_simple_register(source: Vec<&str>) {
let mut composer: TopLevelComposer = Default::default();
for s in source {
let ast = parse_program(s).unwrap();
let ast = parse_program(s, Default::default()).unwrap();
let ast = ast[0].clone();
composer.register_top_level(ast, None, "".into()).unwrap();
@ -149,7 +149,7 @@ fn test_simple_function_analyze(source: Vec<&str>, tys: Vec<&str>, names: Vec<&s
Arc::new(Resolver(internal_resolver.clone())) as Arc<dyn SymbolResolver + Send + Sync>;
for s in source {
let ast = parse_program(s).unwrap();
let ast = parse_program(s, Default::default()).unwrap();
let ast = ast[0].clone();
let (id, def_id, ty) =
@ -333,7 +333,7 @@ fn test_simple_function_analyze(source: Vec<&str>, tys: Vec<&str>, names: Vec<&s
pass
"}
],
vec!["application of type vars to generic class is not currently supported"];
vec!["application of type vars to generic class is not currently supported (at unknown:4:24)"];
"err no type var in generic app"
)]
#[test_case(
@ -389,7 +389,7 @@ fn test_simple_function_analyze(source: Vec<&str>, tys: Vec<&str>, names: Vec<&s
def __init__():
pass
"}],
vec!["__init__ function must have a `self` parameter"];
vec!["__init__ method must have a `self` parameter (at unknown:2:5)"];
"err no self_1"
)]
#[test_case(
@ -411,7 +411,7 @@ fn test_simple_function_analyze(source: Vec<&str>, tys: Vec<&str>, names: Vec<&s
"}
],
vec!["a class def can only have at most one base class declaration and one generic declaration"];
vec!["a class definition can only have at most one base class declaration and one generic declaration (at unknown:1:24)"];
"err multiple inheritance"
)]
#[test_case(
@ -436,7 +436,7 @@ fn test_simple_function_analyze(source: Vec<&str>, tys: Vec<&str>, names: Vec<&s
pass
"}
],
vec!["method has same name as ancestors' method, but incompatible type"];
vec!["method fun has same name as ancestors' method, but incompatible type"];
"err_incompatible_inheritance_method"
)]
#[test_case(
@ -479,7 +479,7 @@ fn test_simple_function_analyze(source: Vec<&str>, tys: Vec<&str>, names: Vec<&s
pass
"}
],
vec!["duplicate definition of class"];
vec!["duplicate definition of class `A` (at unknown:1:1)"];
"class same name"
)]
fn test_analyze(source: Vec<&str>, res: Vec<&str>) {
@ -499,7 +499,7 @@ fn test_analyze(source: Vec<&str>, res: Vec<&str>) {
Arc::new(Resolver(internal_resolver.clone())) as Arc<dyn SymbolResolver + Send + Sync>;
for s in source {
let ast = parse_program(s).unwrap();
let ast = parse_program(s, Default::default()).unwrap();
let ast = ast[0].clone();
let (id, def_id, ty) = {
@ -683,7 +683,7 @@ fn test_inference(source: Vec<&str>, res: Vec<&str>) {
Arc::new(Resolver(internal_resolver.clone())) as Arc<dyn SymbolResolver + Send + Sync>;
for s in source {
let ast = parse_program(s).unwrap();
let ast = parse_program(s, Default::default()).unwrap();
let ast = ast[0].clone();
let (id, def_id, ty) = {

View File

@ -70,7 +70,10 @@ pub fn parse_ast_to_type_annotation_kinds<T>(
if let TopLevelDef::Class { type_vars, .. } = &*def_read {
type_vars.clone()
} else {
return Err("function cannot be used as a type".into());
return Err(format!(
"function cannot be used as a type (at {})",
expr.location
));
}
} else {
locked.get(&obj_id).unwrap().clone()
@ -79,8 +82,9 @@ pub fn parse_ast_to_type_annotation_kinds<T>(
// check param number here
if !type_vars.is_empty() {
return Err(format!(
"expect {} type variable parameter but got 0",
type_vars.len()
"expect {} type variable parameter but got 0 (at {})",
type_vars.len(),
expr.location,
));
}
Ok(TypeAnnotation::CustomClass { id: obj_id, params: vec![] })
@ -88,10 +92,13 @@ pub fn parse_ast_to_type_annotation_kinds<T>(
if let TypeEnum::TVar { .. } = unifier.get_ty(ty).as_ref() {
Ok(TypeAnnotation::TypeVar(ty))
} else {
Err("not a type variable identifier".into())
Err(format!(
"not a type variable identifier at {}",
expr.location
))
}
} else {
Err("name cannot be parsed as a type annotation".into())
Err(format!("unknown type annotation at {}", expr.location))
}
};
@ -100,7 +107,7 @@ pub fn parse_ast_to_type_annotation_kinds<T>(
if vec!["virtual".into(), "Generic".into(), "list".into(), "tuple".into()]
.contains(id)
{
return Err("keywords cannot be class name".into());
return Err(format!("keywords cannot be class name (at {})", expr.location));
}
let obj_id = resolver
.get_identifier_def(*id)
@ -126,13 +133,14 @@ pub fn parse_ast_to_type_annotation_kinds<T>(
};
if type_vars.len() != params_ast.len() {
return Err(format!(
"expect {} type parameters but got {}",
"expect {} type parameters but got {} (at {})",
type_vars.len(),
params_ast.len()
params_ast.len(),
params_ast[0].location,
));
}
let result = params_ast
.into_iter()
.iter()
.map(|x| {
parse_ast_to_type_annotation_kinds(
resolver,
@ -154,9 +162,11 @@ pub fn parse_ast_to_type_annotation_kinds<T>(
if no_type_var {
result
} else {
return Err("application of type vars to generic class \
is not currently supported"
.into());
return Err(format!(
"application of type vars to generic class \
is not currently supported (at {})",
params_ast[0].location
));
}
};
Ok(TypeAnnotation::CustomClass { id: obj_id, params: param_type_infos })
@ -206,24 +216,27 @@ pub fn parse_ast_to_type_annotation_kinds<T>(
matches!(&value.node, ast::ExprKind::Name { id, .. } if id == &"tuple".into())
} =>
{
if let ast::ExprKind::Tuple { elts, .. } = &slice.node {
let type_annotations = elts
.iter()
.map(|e| {
parse_ast_to_type_annotation_kinds(
resolver,
top_level_defs,
unifier,
primitives,
e,
locked.clone(),
)
})
.collect::<Result<Vec<_>, _>>()?;
Ok(TypeAnnotation::Tuple(type_annotations))
} else {
Err("Expect multiple elements for tuple".into())
}
let tup_elts = {
if let ast::ExprKind::Tuple { elts, .. } = &slice.node {
elts.as_slice()
} else {
std::slice::from_ref(slice.as_ref())
}
};
let type_annotations = tup_elts
.iter()
.map(|e| {
parse_ast_to_type_annotation_kinds(
resolver,
top_level_defs,
unifier,
primitives,
e,
locked.clone(),
)
})
.collect::<Result<Vec<_>, _>>()?;
Ok(TypeAnnotation::Tuple(type_annotations))
}
// custom class
@ -231,11 +244,11 @@ pub fn parse_ast_to_type_annotation_kinds<T>(
if let ast::ExprKind::Name { id, .. } = &value.node {
class_name_handle(id, slice, unifier, locked)
} else {
Err("unsupported expression type for class name".into())
Err(format!("unsupported expression type for class name at {}", value.location))
}
}
_ => Err("unsupported expression for type annotation".into()),
_ => Err(format!("unsupported expression for type annotation at {}", expr.location)),
}
}

View File

@ -680,11 +680,8 @@ impl<'a> Inferencer<'a> {
if let ExprKind::Constant { value: ast::Constant::Int(val), kind } =
&args[0].node
{
let int64: Result<i64, _> = val.try_into();
let custom;
if int64.is_ok() {
custom = Some(self.primitives.int64);
} else {
let custom = Some(self.primitives.int64);
if val.is_none() {
return Err("Integer out of bound".into());
}
return Ok(Located {
@ -777,12 +774,17 @@ impl<'a> Inferencer<'a> {
match constant {
ast::Constant::Bool(_) => Ok(self.primitives.bool),
ast::Constant::Int(val) => {
let int32: Result<i32, _> = val.try_into();
// int64 would be handled separately in functions
if int32.is_ok() {
Ok(self.primitives.int32)
} else {
Err("Integer out of bound".into())
match val {
Some(val) => {
let int32: Result<i32, _> = (*val).try_into();
// int64 is handled separately in functions
if int32.is_ok() {
Ok(self.primitives.int32)
} else {
Err("Integer out of bound".into())
}
},
None => Err("Integer out of bound".into())
}
}
ast::Constant::Float(_) => Ok(self.primitives.float),
@ -907,7 +909,11 @@ impl<'a> Inferencer<'a> {
}
ast::ExprKind::Constant { value: ast::Constant::Int(val), .. } => {
// the index is a constant, so value can be a sequence.
let ind: i32 = val.try_into().map_err(|_| "Index must be int32".to_string())?;
let ind: Option<i32> = match val {
Some(val) => (*val).try_into().ok(),
None => None,
};
let ind = ind.ok_or_else(|| "Index must be int32".to_string())?;
let map = once((ind, ty)).collect();
let seq = self.unifier.add_sequence(map);
self.constrain(value.custom.unwrap(), seq, &value.location)?;

View File

@ -461,7 +461,7 @@ fn test_basic(source: &str, mapping: HashMap<&str, &str>, virtuals: &[(&str, &st
defined_identifiers.insert("virtual".into());
let mut inferencer = env.get_inferencer();
inferencer.defined_identifiers = defined_identifiers.clone();
let statements = parse_program(source).unwrap();
let statements = parse_program(source, Default::default()).unwrap();
let statements = statements
.into_iter()
.map(|v| inferencer.fold_stmt(v))
@ -603,7 +603,7 @@ fn test_primitive_magic_methods(source: &str, mapping: HashMap<&str, &str>) {
defined_identifiers.insert("virtual".into());
let mut inferencer = env.get_inferencer();
inferencer.defined_identifiers = defined_identifiers.clone();
let statements = parse_program(source).unwrap();
let statements = parse_program(source, Default::default()).unwrap();
let statements = statements
.into_iter()
.map(|v| inferencer.fold_stmt(v))

View File

@ -14,8 +14,6 @@ lalrpop = "0.19.6"
nac3ast = { path = "../nac3ast" }
lalrpop-util = "0.19.6"
log = "0.4.1"
num-bigint = "0.4.0"
num-traits = "0.2"
unic-emoji-char = "0.9"
unic-ucd-ident = "0.9"
unicode_names2 = "0.4"

View File

@ -3,14 +3,12 @@
//! This means source code is translated into separate tokens.
pub use super::token::Tok;
use crate::ast::Location;
use crate::ast::{Location, FileName};
use crate::error::{LexicalError, LexicalErrorType};
use num_bigint::BigInt;
use num_traits::identities::Zero;
use num_traits::Num;
use std::char;
use std::cmp::Ordering;
use std::str::FromStr;
use std::num::IntErrorKind;
use unic_emoji_char::is_emoji_presentation;
use unic_ucd_ident::{is_xid_continue, is_xid_start};
@ -113,8 +111,8 @@ pub type Spanned = (Location, Tok, Location);
pub type LexResult = Result<Spanned, LexicalError>;
#[inline]
pub fn make_tokenizer(source: &str) -> impl Iterator<Item = LexResult> + '_ {
make_tokenizer_located(source, Location::new(0, 0))
pub fn make_tokenizer(source: &str, file: FileName) -> impl Iterator<Item = LexResult> + '_ {
make_tokenizer_located(source, Location::new(0, 0, file))
}
pub fn make_tokenizer_located(
@ -287,10 +285,18 @@ where
fn lex_number_radix(&mut self, start_pos: Location, radix: u32) -> LexResult {
let value_text = self.radix_run(radix);
let end_pos = self.get_pos();
let value = BigInt::from_str_radix(&value_text, radix).map_err(|e| LexicalError {
error: LexicalErrorType::OtherError(format!("{:?}", e)),
location: start_pos,
})?;
let value = match i64::from_str_radix(&value_text, radix) {
Ok(value) => Some(value),
Err(e) => {
match e.kind() {
IntErrorKind::PosOverflow | IntErrorKind::NegOverflow => None,
_ => return Err(LexicalError {
error: LexicalErrorType::OtherError(format!("{:?}", e)),
location: start_pos,
}),
}
}
};
Ok((start_pos, Tok::Int { value }, end_pos))
}
@ -353,8 +359,14 @@ where
Ok((start_pos, Tok::Complex { real: 0.0, imag }, end_pos))
} else {
let end_pos = self.get_pos();
let value = value_text.parse::<BigInt>().unwrap();
if start_is_zero && !value.is_zero() {
// assumption: value_text contains a valid integer.
// parse should only fail because of overflow.
let value = value_text.parse::<i64>().ok();
let nonzero = match value {
Some(value) => value != 0i64,
None => true
};
if start_is_zero && nonzero {
return Err(LexicalError {
error: LexicalErrorType::OtherError("Invalid Token".to_owned()),
location: self.get_pos(),
@ -1320,15 +1332,15 @@ where
#[cfg(test)]
mod tests {
use super::{make_tokenizer, NewlineHandler, Tok};
use num_bigint::BigInt;
const WINDOWS_EOL: &str = "\r\n";
const MAC_EOL: &str = "\r";
const UNIX_EOL: &str = "\n";
pub fn lex_source(source: &str) -> Vec<Tok> {
let lexer = make_tokenizer(source);
let lexer = make_tokenizer(source, Default::default());
lexer.map(|x| x.unwrap().1).collect()
}
@ -1449,16 +1461,16 @@ class Foo(A, B):
tokens,
vec![
Tok::Int {
value: BigInt::from(47),
value: Some(47i64),
},
Tok::Int {
value: BigInt::from(13),
value: Some(13i64),
},
Tok::Int {
value: BigInt::from(0),
value: Some(0i64),
},
Tok::Int {
value: BigInt::from(123),
value: Some(123i64),
},
Tok::Float { value: 0.2 },
Tok::Complex {
@ -1481,7 +1493,7 @@ class Foo(A, B):
fn $name() {
let source = format!(r"99232 # {}", $eol);
let tokens = lex_source(&source);
assert_eq!(tokens, vec![Tok::Int { value: BigInt::from(99232) }, Tok::Newline]);
assert_eq!(tokens, vec![Tok::Int { value: Some(99232i64) }, Tok::Newline]);
}
)*
}
@ -1504,9 +1516,9 @@ class Foo(A, B):
assert_eq!(
tokens,
vec![
Tok::Int { value: BigInt::from(123) },
Tok::Int { value: Some(123i64) },
Tok::Newline,
Tok::Int { value: BigInt::from(456) },
Tok::Int { value: Some(456i64) },
Tok::Newline,
]
)
@ -1533,15 +1545,15 @@ class Foo(A, B):
},
Tok::Equal,
Tok::Int {
value: BigInt::from(99)
value: Some(99i64)
},
Tok::Plus,
Tok::Int {
value: BigInt::from(2)
value: Some(2i64)
},
Tok::Minus,
Tok::Int {
value: BigInt::from(0)
value: Some(0i64)
},
Tok::Newline,
]
@ -1568,7 +1580,7 @@ class Foo(A, B):
Tok::Newline,
Tok::Indent,
Tok::Return,
Tok::Int { value: BigInt::from(99) },
Tok::Int { value: Some(99i64) },
Tok::Newline,
Tok::Dedent,
]
@ -1611,7 +1623,7 @@ class Foo(A, B):
Tok::Newline,
Tok::Indent,
Tok::Return,
Tok::Int { value: BigInt::from(99) },
Tok::Int { value: Some(99i64) },
Tok::Newline,
Tok::Dedent,
Tok::Dedent,
@ -1649,7 +1661,7 @@ class Foo(A, B):
Tok::Newline,
Tok::Indent,
Tok::Return,
Tok::Int { value: BigInt::from(99) },
Tok::Int { value: Some(99i64) },
Tok::Newline,
Tok::Dedent,
Tok::Dedent,
@ -1687,9 +1699,9 @@ class Foo(A, B):
},
Tok::Equal,
Tok::Lsqb,
Tok::Int { value: BigInt::from(1) },
Tok::Int { value: Some(1i64) },
Tok::Comma,
Tok::Int { value: BigInt::from(2) },
Tok::Int { value: Some(2i64) },
Tok::Rsqb,
Tok::Newline,
]

View File

@ -7,7 +7,7 @@
use std::iter;
use crate::ast;
use crate::ast::{self, FileName};
use crate::error::ParseError;
use crate::lexer;
pub use crate::mode::Mode;
@ -20,8 +20,8 @@ use crate::python;
*/
/// Parse a full python program, containing usually multiple lines.
pub fn parse_program(source: &str) -> Result<ast::Suite, ParseError> {
parse(source, Mode::Module).map(|top| match top {
pub fn parse_program(source: &str, file: FileName) -> Result<ast::Suite, ParseError> {
parse(source, Mode::Module, file).map(|top| match top {
ast::Mod::Module { body, .. } => body,
_ => unreachable!(),
})
@ -31,18 +31,17 @@ pub fn parse_program(source: &str) -> Result<ast::Suite, ParseError> {
///
/// # Example
/// ```
/// extern crate num_bigint;
/// use nac3parser::{parser, ast};
/// let expr = parser::parse_expression("1 + 2").unwrap();
///
/// assert_eq!(
/// expr,
/// ast::Expr {
/// location: ast::Location::new(1, 3),
/// location: ast::Location::new(1, 3, Default::default()),
/// custom: (),
/// node: ast::ExprKind::BinOp {
/// left: Box::new(ast::Expr {
/// location: ast::Location::new(1, 1),
/// location: ast::Location::new(1, 1, Default::default()),
/// custom: (),
/// node: ast::ExprKind::Constant {
/// value: ast::Constant::Int(1.into()),
@ -51,7 +50,7 @@ pub fn parse_program(source: &str) -> Result<ast::Suite, ParseError> {
/// }),
/// op: ast::Operator::Add,
/// right: Box::new(ast::Expr {
/// location: ast::Location::new(1, 5),
/// location: ast::Location::new(1, 5, Default::default()),
/// custom: (),
/// node: ast::ExprKind::Constant {
/// value: ast::Constant::Int(2.into()),
@ -64,15 +63,15 @@ pub fn parse_program(source: &str) -> Result<ast::Suite, ParseError> {
///
/// ```
pub fn parse_expression(source: &str) -> Result<ast::Expr, ParseError> {
parse(source, Mode::Expression).map(|top| match top {
parse(source, Mode::Expression, Default::default()).map(|top| match top {
ast::Mod::Expression { body } => *body,
_ => unreachable!(),
})
}
// Parse a given source code
pub fn parse(source: &str, mode: Mode) -> Result<ast::Mod, ParseError> {
let lxr = lexer::make_tokenizer(source);
pub fn parse(source: &str, mode: Mode, file: FileName) -> Result<ast::Mod, ParseError> {
let lxr = lexer::make_tokenizer(source, file);
let marker_token = (Default::default(), mode.to_marker(), Default::default());
let tokenizer = iter::once(Ok(marker_token)).chain(lxr);
@ -87,42 +86,42 @@ mod tests {
#[test]
fn test_parse_empty() {
let parse_ast = parse_program("").unwrap();
let parse_ast = parse_program("", Default::default()).unwrap();
insta::assert_debug_snapshot!(parse_ast);
}
#[test]
fn test_parse_print_hello() {
let source = String::from("print('Hello world')");
let parse_ast = parse_program(&source).unwrap();
let parse_ast = parse_program(&source, Default::default()).unwrap();
insta::assert_debug_snapshot!(parse_ast);
}
#[test]
fn test_parse_print_2() {
let source = String::from("print('Hello world', 2)");
let parse_ast = parse_program(&source).unwrap();
let parse_ast = parse_program(&source, Default::default()).unwrap();
insta::assert_debug_snapshot!(parse_ast);
}
#[test]
fn test_parse_kwargs() {
let source = String::from("my_func('positional', keyword=2)");
let parse_ast = parse_program(&source).unwrap();
let parse_ast = parse_program(&source, Default::default()).unwrap();
insta::assert_debug_snapshot!(parse_ast);
}
#[test]
fn test_parse_if_elif_else() {
let source = String::from("if 1: 10\nelif 2: 20\nelse: 30");
let parse_ast = parse_program(&source).unwrap();
let parse_ast = parse_program(&source, Default::default()).unwrap();
insta::assert_debug_snapshot!(parse_ast);
}
#[test]
fn test_parse_lambda() {
let source = "lambda x, y: x * y"; // lambda(x, y): x * y";
let parse_ast = parse_program(&source).unwrap();
let parse_ast = parse_program(&source, Default::default()).unwrap();
insta::assert_debug_snapshot!(parse_ast);
}
@ -130,7 +129,7 @@ mod tests {
fn test_parse_tuples() {
let source = "a, b = 4, 5";
insta::assert_debug_snapshot!(parse_program(&source).unwrap());
insta::assert_debug_snapshot!(parse_program(&source, Default::default()).unwrap());
}
#[test]
@ -141,7 +140,7 @@ class Foo(A, B):
pass
def method_with_default(self, arg='default'):
pass";
insta::assert_debug_snapshot!(parse_program(&source).unwrap());
insta::assert_debug_snapshot!(parse_program(&source, Default::default()).unwrap());
}
#[test]
@ -184,7 +183,7 @@ while i < 2: # nac3: 4
# nac3: if1
if 1: # nac3: if2
3";
insta::assert_debug_snapshot!(parse_program(&source).unwrap());
insta::assert_debug_snapshot!(parse_program(&source, Default::default()).unwrap());
}
#[test]
@ -197,7 +196,7 @@ while test: # nac3: while3
# nac3: simple assign0
a = 3 # nac3: simple assign1
";
insta::assert_debug_snapshot!(parse_program(&source).unwrap());
insta::assert_debug_snapshot!(parse_program(&source, Default::default()).unwrap());
}
#[test]
@ -216,7 +215,7 @@ if a: # nac3: small2
for i in a: # nac3: for1
pass
";
insta::assert_debug_snapshot!(parse_program(&source).unwrap());
insta::assert_debug_snapshot!(parse_program(&source, Default::default()).unwrap());
}
#[test]
@ -225,6 +224,6 @@ for i in a: # nac3: for1
if a: # nac3: something
a = 3
";
assert!(parse_program(&source).is_err());
assert!(parse_program(&source, Default::default()).is_err());
}
}

View File

@ -14,7 +14,6 @@ use crate::lexer;
use crate::config_comment_helper::*;
use lalrpop_util::ParseError;
use num_bigint::BigInt;
grammar;
@ -920,7 +919,7 @@ Factor: ast::Expr = {
match (&op, &e.node) {
(ast::Unaryop::USub, ast::ExprKind::Constant { value: Constant::Int(val), kind }) => {
ast::ExprKind::Constant {
value: Constant::Int(-val),
value: if let Some(val) = val { Constant::Int(Some(-val)) } else { Constant::Int(None) },
kind: kind.clone()
}
}
@ -1362,7 +1361,7 @@ extern {
"True" => lexer::Tok::True,
"False" => lexer::Tok::False,
"None" => lexer::Tok::None,
int => lexer::Tok::Int { value: <BigInt> },
int => lexer::Tok::Int { value: <Option<i64>> },
float => lexer::Tok::Float { value: <f64> },
complex => lexer::Tok::Complex { real: <f64>, imag: <f64> },
string => lexer::Tok::String { value: <String>, is_fstring: <bool> },

View File

@ -1,11 +1,16 @@
---
source: parser/src/fstring.rs
source: nac3parser/src/fstring.rs
assertion_line: 327
expression: parse_ast
---
Located {
location: Location {
row: 0,
column: 0,
file: FileName(
"unknown",
),
},
custom: (),
node: JoinedStr {
@ -14,6 +19,9 @@ Located {
location: Location {
row: 0,
column: 0,
file: FileName(
"unknown",
),
},
custom: (),
node: Constant {
@ -27,6 +35,9 @@ Located {
location: Location {
row: 0,
column: 0,
file: FileName(
"unknown",
),
},
custom: (),
node: Constant {
@ -40,6 +51,9 @@ Located {
location: Location {
row: 0,
column: 0,
file: FileName(
"unknown",
),
},
custom: (),
node: FormattedValue {
@ -47,6 +61,9 @@ Located {
location: Location {
row: 1,
column: 2,
file: FileName(
"unknown",
),
},
custom: (),
node: Name {

View File

@ -1,11 +1,16 @@
---
source: parser/src/fstring.rs
source: nac3parser/src/fstring.rs
assertion_line: 335
expression: parse_ast
---
Located {
location: Location {
row: 0,
column: 0,
file: FileName(
"unknown",
),
},
custom: (),
node: JoinedStr {
@ -14,6 +19,9 @@ Located {
location: Location {
row: 0,
column: 0,
file: FileName(
"unknown",
),
},
custom: (),
node: Constant {
@ -27,6 +35,9 @@ Located {
location: Location {
row: 0,
column: 0,
file: FileName(
"unknown",
),
},
custom: (),
node: Constant {
@ -40,6 +51,9 @@ Located {
location: Location {
row: 0,
column: 0,
file: FileName(
"unknown",
),
},
custom: (),
node: Constant {
@ -53,6 +67,9 @@ Located {
location: Location {
row: 0,
column: 0,
file: FileName(
"unknown",
),
},
custom: (),
node: FormattedValue {
@ -60,6 +77,9 @@ Located {
location: Location {
row: 1,
column: 2,
file: FileName(
"unknown",
),
},
custom: (),
node: Name {
@ -75,6 +95,9 @@ Located {
location: Location {
row: 0,
column: 0,
file: FileName(
"unknown",
),
},
custom: (),
node: Constant {
@ -88,6 +111,9 @@ Located {
location: Location {
row: 0,
column: 0,
file: FileName(
"unknown",
),
},
custom: (),
node: Constant {
@ -101,6 +127,9 @@ Located {
location: Location {
row: 0,
column: 0,
file: FileName(
"unknown",
),
},
custom: (),
node: Constant {
@ -114,6 +143,9 @@ Located {
location: Location {
row: 0,
column: 0,
file: FileName(
"unknown",
),
},
custom: (),
node: FormattedValue {
@ -121,6 +153,9 @@ Located {
location: Location {
row: 1,
column: 2,
file: FileName(
"unknown",
),
},
custom: (),
node: Name {

View File

@ -1,11 +1,16 @@
---
source: parser/src/fstring.rs
source: nac3parser/src/fstring.rs
assertion_line: 343
expression: parse_ast
---
Located {
location: Location {
row: 0,
column: 0,
file: FileName(
"unknown",
),
},
custom: (),
node: JoinedStr {
@ -14,6 +19,9 @@ Located {
location: Location {
row: 0,
column: 0,
file: FileName(
"unknown",
),
},
custom: (),
node: Constant {
@ -27,6 +35,9 @@ Located {
location: Location {
row: 0,
column: 0,
file: FileName(
"unknown",
),
},
custom: (),
node: Constant {
@ -40,6 +51,9 @@ Located {
location: Location {
row: 0,
column: 0,
file: FileName(
"unknown",
),
},
custom: (),
node: FormattedValue {
@ -47,6 +61,9 @@ Located {
location: Location {
row: 1,
column: 2,
file: FileName(
"unknown",
),
},
custom: (),
node: Name {
@ -60,6 +77,9 @@ Located {
location: Location {
row: 0,
column: 0,
file: FileName(
"unknown",
),
},
custom: (),
node: Constant {

View File

@ -1,11 +1,16 @@
---
source: parser/src/fstring.rs
source: nac3parser/src/fstring.rs
assertion_line: 319
expression: "parse_fstring(\"\").unwrap()"
---
Located {
location: Location {
row: 0,
column: 0,
file: FileName(
"unknown",
),
},
custom: (),
node: Constant {

View File

@ -1,11 +1,16 @@
---
source: parser/src/fstring.rs
source: nac3parser/src/fstring.rs
assertion_line: 298
expression: parse_ast
---
Located {
location: Location {
row: 0,
column: 0,
file: FileName(
"unknown",
),
},
custom: (),
node: JoinedStr {
@ -14,6 +19,9 @@ Located {
location: Location {
row: 0,
column: 0,
file: FileName(
"unknown",
),
},
custom: (),
node: FormattedValue {
@ -21,6 +29,9 @@ Located {
location: Location {
row: 1,
column: 2,
file: FileName(
"unknown",
),
},
custom: (),
node: Name {
@ -36,6 +47,9 @@ Located {
location: Location {
row: 0,
column: 0,
file: FileName(
"unknown",
),
},
custom: (),
node: FormattedValue {
@ -43,6 +57,9 @@ Located {
location: Location {
row: 1,
column: 3,
file: FileName(
"unknown",
),
},
custom: (),
node: Name {
@ -58,6 +75,9 @@ Located {
location: Location {
row: 0,
column: 0,
file: FileName(
"unknown",
),
},
custom: (),
node: Constant {

View File

@ -1,5 +1,6 @@
---
source: parser/src/fstring.rs
source: nac3parser/src/fstring.rs
assertion_line: 382
expression: parse_ast
---
@ -7,6 +8,9 @@ Located {
location: Location {
row: 0,
column: 0,
file: FileName(
"unknown",
),
},
custom: (),
node: FormattedValue {
@ -14,6 +18,9 @@ Located {
location: Location {
row: 1,
column: 5,
file: FileName(
"unknown",
),
},
custom: (),
node: Compare {
@ -21,11 +28,16 @@ Located {
location: Location {
row: 1,
column: 2,
file: FileName(
"unknown",
),
},
custom: (),
node: Constant {
value: Int(
42,
Some(
42,
),
),
kind: None,
},
@ -38,11 +50,16 @@ Located {
location: Location {
row: 1,
column: 8,
file: FileName(
"unknown",
),
},
custom: (),
node: Constant {
value: Int(
42,
Some(
42,
),
),
kind: None,
},

View File

@ -1,11 +1,16 @@
---
source: parser/src/fstring.rs
source: nac3parser/src/fstring.rs
assertion_line: 306
expression: parse_ast
---
Located {
location: Location {
row: 0,
column: 0,
file: FileName(
"unknown",
),
},
custom: (),
node: FormattedValue {
@ -13,6 +18,9 @@ Located {
location: Location {
row: 1,
column: 2,
file: FileName(
"unknown",
),
},
custom: (),
node: Name {
@ -26,6 +34,9 @@ Located {
location: Location {
row: 0,
column: 0,
file: FileName(
"unknown",
),
},
custom: (),
node: FormattedValue {
@ -33,6 +44,9 @@ Located {
location: Location {
row: 1,
column: 2,
file: FileName(
"unknown",
),
},
custom: (),
node: Name {

View File

@ -1,5 +1,6 @@
---
source: parser/src/fstring.rs
source: nac3parser/src/fstring.rs
assertion_line: 375
expression: parse_ast
---
@ -7,6 +8,9 @@ Located {
location: Location {
row: 0,
column: 0,
file: FileName(
"unknown",
),
},
custom: (),
node: FormattedValue {
@ -14,6 +18,9 @@ Located {
location: Location {
row: 1,
column: 4,
file: FileName(
"unknown",
),
},
custom: (),
node: Compare {
@ -21,11 +28,16 @@ Located {
location: Location {
row: 1,
column: 2,
file: FileName(
"unknown",
),
},
custom: (),
node: Constant {
value: Int(
1,
Some(
1,
),
),
kind: None,
},
@ -38,11 +50,16 @@ Located {
location: Location {
row: 1,
column: 7,
file: FileName(
"unknown",
),
},
custom: (),
node: Constant {
value: Int(
2,
Some(
2,
),
),
kind: None,
},

View File

@ -1,11 +1,16 @@
---
source: parser/src/fstring.rs
source: nac3parser/src/fstring.rs
assertion_line: 314
expression: parse_ast
---
Located {
location: Location {
row: 0,
column: 0,
file: FileName(
"unknown",
),
},
custom: (),
node: FormattedValue {
@ -13,6 +18,9 @@ Located {
location: Location {
row: 1,
column: 2,
file: FileName(
"unknown",
),
},
custom: (),
node: Name {
@ -26,6 +34,9 @@ Located {
location: Location {
row: 0,
column: 0,
file: FileName(
"unknown",
),
},
custom: (),
node: Constant {

View File

@ -1,11 +1,16 @@
---
source: parser/src/fstring.rs
source: nac3parser/src/fstring.rs
assertion_line: 389
expression: parse_ast
---
Located {
location: Location {
row: 0,
column: 0,
file: FileName(
"unknown",
),
},
custom: (),
node: JoinedStr {
@ -14,6 +19,9 @@ Located {
location: Location {
row: 0,
column: 0,
file: FileName(
"unknown",
),
},
custom: (),
node: Constant {
@ -27,6 +35,9 @@ Located {
location: Location {
row: 0,
column: 0,
file: FileName(
"unknown",
),
},
custom: (),
node: Constant {
@ -40,6 +51,9 @@ Located {
location: Location {
row: 0,
column: 0,
file: FileName(
"unknown",
),
},
custom: (),
node: FormattedValue {
@ -47,6 +61,9 @@ Located {
location: Location {
row: 1,
column: 2,
file: FileName(
"unknown",
),
},
custom: (),
node: Name {

View File

@ -1,11 +1,16 @@
---
source: parser/src/fstring.rs
source: nac3parser/src/fstring.rs
assertion_line: 396
expression: parse_ast
---
Located {
location: Location {
row: 0,
column: 0,
file: FileName(
"unknown",
),
},
custom: (),
node: JoinedStr {
@ -14,6 +19,9 @@ Located {
location: Location {
row: 0,
column: 0,
file: FileName(
"unknown",
),
},
custom: (),
node: Constant {
@ -27,6 +35,9 @@ Located {
location: Location {
row: 0,
column: 0,
file: FileName(
"unknown",
),
},
custom: (),
node: Constant {
@ -40,6 +51,9 @@ Located {
location: Location {
row: 0,
column: 0,
file: FileName(
"unknown",
),
},
custom: (),
node: FormattedValue {
@ -47,6 +61,9 @@ Located {
location: Location {
row: 1,
column: 2,
file: FileName(
"unknown",
),
},
custom: (),
node: Name {

View File

@ -1,11 +1,16 @@
---
source: parser/src/fstring.rs
source: nac3parser/src/fstring.rs
assertion_line: 403
expression: parse_ast
---
Located {
location: Location {
row: 0,
column: 0,
file: FileName(
"unknown",
),
},
custom: (),
node: FormattedValue {
@ -13,6 +18,9 @@ Located {
location: Location {
row: 1,
column: 2,
file: FileName(
"unknown",
),
},
custom: (),
node: Yield {

View File

@ -1,6 +1,7 @@
---
source: nac3parser/src/parser.rs
expression: parse_program(&source).unwrap()
assertion_line: 218
expression: "parse_program(&source, Default::default()).unwrap()"
---
[
@ -8,6 +9,9 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 1,
column: 1,
file: FileName(
"unknown",
),
},
custom: (),
node: If {
@ -15,6 +19,9 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 1,
column: 4,
file: FileName(
"unknown",
),
},
custom: (),
node: Name {
@ -27,6 +34,9 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 1,
column: 7,
file: FileName(
"unknown",
),
},
custom: (),
node: Expr {
@ -34,6 +44,9 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 1,
column: 7,
file: FileName(
"unknown",
),
},
custom: (),
node: Name {
@ -55,6 +68,9 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 2,
column: 1,
file: FileName(
"unknown",
),
},
custom: (),
node: If {
@ -62,6 +78,9 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 2,
column: 4,
file: FileName(
"unknown",
),
},
custom: (),
node: Name {
@ -74,6 +93,9 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 2,
column: 7,
file: FileName(
"unknown",
),
},
custom: (),
node: Expr {
@ -81,6 +103,9 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 2,
column: 7,
file: FileName(
"unknown",
),
},
custom: (),
node: Name {
@ -102,6 +127,9 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 3,
column: 1,
file: FileName(
"unknown",
),
},
custom: (),
node: If {
@ -109,6 +137,9 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 3,
column: 4,
file: FileName(
"unknown",
),
},
custom: (),
node: Name {
@ -121,6 +152,9 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 4,
column: 5,
file: FileName(
"unknown",
),
},
custom: (),
node: Expr {
@ -128,6 +162,9 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 4,
column: 5,
file: FileName(
"unknown",
),
},
custom: (),
node: Name {
@ -142,6 +179,9 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 4,
column: 8,
file: FileName(
"unknown",
),
},
custom: (),
node: Expr {
@ -149,6 +189,9 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 4,
column: 10,
file: FileName(
"unknown",
),
},
custom: (),
node: BinOp {
@ -156,6 +199,9 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 4,
column: 8,
file: FileName(
"unknown",
),
},
custom: (),
node: Name {
@ -168,11 +214,16 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 4,
column: 12,
file: FileName(
"unknown",
),
},
custom: (),
node: Constant {
value: Int(
3,
Some(
3,
),
),
kind: None,
},
@ -195,6 +246,9 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 5,
column: 1,
file: FileName(
"unknown",
),
},
custom: (),
node: Assign {
@ -203,6 +257,9 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 5,
column: 1,
file: FileName(
"unknown",
),
},
custom: (),
node: Name {
@ -215,11 +272,16 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 5,
column: 5,
file: FileName(
"unknown",
),
},
custom: (),
node: Constant {
value: Int(
3,
Some(
3,
),
),
kind: None,
},
@ -232,6 +294,9 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 5,
column: 8,
file: FileName(
"unknown",
),
},
custom: (),
node: Expr {
@ -239,6 +304,9 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 5,
column: 10,
file: FileName(
"unknown",
),
},
custom: (),
node: BinOp {
@ -246,6 +314,9 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 5,
column: 8,
file: FileName(
"unknown",
),
},
custom: (),
node: Name {
@ -258,11 +329,16 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 5,
column: 12,
file: FileName(
"unknown",
),
},
custom: (),
node: Constant {
value: Int(
3,
Some(
3,
),
),
kind: None,
},
@ -276,6 +352,9 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 5,
column: 15,
file: FileName(
"unknown",
),
},
custom: (),
node: Assign {
@ -284,6 +363,9 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 5,
column: 15,
file: FileName(
"unknown",
),
},
custom: (),
node: Name {
@ -296,6 +378,9 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 5,
column: 19,
file: FileName(
"unknown",
),
},
custom: (),
node: Name {
@ -313,6 +398,9 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 8,
column: 1,
file: FileName(
"unknown",
),
},
custom: (),
node: Assign {
@ -321,6 +409,9 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 8,
column: 1,
file: FileName(
"unknown",
),
},
custom: (),
node: Name {
@ -333,11 +424,16 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 8,
column: 6,
file: FileName(
"unknown",
),
},
custom: (),
node: Constant {
value: Int(
3,
Some(
3,
),
),
kind: None,
},
@ -354,6 +450,9 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 9,
column: 1,
file: FileName(
"unknown",
),
},
custom: (),
node: If {
@ -361,6 +460,9 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 9,
column: 4,
file: FileName(
"unknown",
),
},
custom: (),
node: Name {
@ -373,6 +475,9 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 10,
column: 5,
file: FileName(
"unknown",
),
},
custom: (),
node: Expr {
@ -380,6 +485,9 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 10,
column: 5,
file: FileName(
"unknown",
),
},
custom: (),
node: Name {
@ -401,6 +509,9 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 11,
column: 1,
file: FileName(
"unknown",
),
},
custom: (),
node: For {
@ -408,6 +519,9 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 11,
column: 5,
file: FileName(
"unknown",
),
},
custom: (),
node: Name {
@ -419,6 +533,9 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 11,
column: 10,
file: FileName(
"unknown",
),
},
custom: (),
node: Name {
@ -431,6 +548,9 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 12,
column: 5,
file: FileName(
"unknown",
),
},
custom: (),
node: Pass {

View File

@ -1,6 +1,7 @@
---
source: nac3parser/src/parser.rs
expression: parse_program(&source).unwrap()
assertion_line: 186
expression: "parse_program(&source, Default::default()).unwrap()"
---
[
@ -8,6 +9,9 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 1,
column: 1,
file: FileName(
"unknown",
),
},
custom: (),
node: AnnAssign {
@ -15,6 +19,9 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 1,
column: 1,
file: FileName(
"unknown",
),
},
custom: (),
node: Name {
@ -26,6 +33,9 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 1,
column: 4,
file: FileName(
"unknown",
),
},
custom: (),
node: Name {
@ -44,6 +54,9 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 2,
column: 1,
file: FileName(
"unknown",
),
},
custom: (),
node: For {
@ -51,6 +64,9 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 3,
column: 5,
file: FileName(
"unknown",
),
},
custom: (),
node: Name {
@ -62,6 +78,9 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 3,
column: 11,
file: FileName(
"unknown",
),
},
custom: (),
node: Tuple {
@ -70,11 +89,16 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 3,
column: 11,
file: FileName(
"unknown",
),
},
custom: (),
node: Constant {
value: Int(
1,
Some(
1,
),
),
kind: None,
},
@ -83,6 +107,9 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 3,
column: 15,
file: FileName(
"unknown",
),
},
custom: (),
node: Constant {
@ -101,6 +128,9 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 4,
column: 5,
file: FileName(
"unknown",
),
},
custom: (),
node: AnnAssign {
@ -108,6 +138,9 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 4,
column: 5,
file: FileName(
"unknown",
),
},
custom: (),
node: Name {
@ -119,6 +152,9 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 4,
column: 8,
file: FileName(
"unknown",
),
},
custom: (),
node: Name {
@ -144,6 +180,9 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 5,
column: 1,
file: FileName(
"unknown",
),
},
custom: (),
node: While {
@ -151,6 +190,9 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 7,
column: 9,
file: FileName(
"unknown",
),
},
custom: (),
node: Compare {
@ -158,6 +200,9 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 7,
column: 7,
file: FileName(
"unknown",
),
},
custom: (),
node: Name {
@ -173,11 +218,16 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 7,
column: 11,
file: FileName(
"unknown",
),
},
custom: (),
node: Constant {
value: Int(
2,
Some(
2,
),
),
kind: None,
},
@ -190,6 +240,9 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 9,
column: 5,
file: FileName(
"unknown",
),
},
custom: (),
node: Pass {
@ -202,6 +255,9 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 12,
column: 5,
file: FileName(
"unknown",
),
},
custom: (),
node: Expr {
@ -209,6 +265,9 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 12,
column: 7,
file: FileName(
"unknown",
),
},
custom: (),
node: BinOp {
@ -216,11 +275,16 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 12,
column: 5,
file: FileName(
"unknown",
),
},
custom: (),
node: Constant {
value: Int(
1,
Some(
1,
),
),
kind: None,
},
@ -230,11 +294,16 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 12,
column: 9,
file: FileName(
"unknown",
),
},
custom: (),
node: Constant {
value: Int(
2,
Some(
2,
),
),
kind: None,
},
@ -252,6 +321,9 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 13,
column: 5,
file: FileName(
"unknown",
),
},
custom: (),
node: If {
@ -259,11 +331,16 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 15,
column: 8,
file: FileName(
"unknown",
),
},
custom: (),
node: Constant {
value: Int(
1,
Some(
1,
),
),
kind: None,
},
@ -273,6 +350,9 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 16,
column: 9,
file: FileName(
"unknown",
),
},
custom: (),
node: Expr {
@ -280,11 +360,16 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 16,
column: 9,
file: FileName(
"unknown",
),
},
custom: (),
node: Constant {
value: Int(
3,
Some(
3,
),
),
kind: None,
},

View File

@ -1,6 +1,7 @@
---
source: nac3parser/src/parser.rs
expression: parse_program(&source).unwrap()
assertion_line: 143
expression: "parse_program(&source, Default::default()).unwrap()"
---
[
@ -8,6 +9,9 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 1,
column: 1,
file: FileName(
"unknown",
),
},
custom: (),
node: ClassDef {
@ -17,6 +21,9 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 1,
column: 11,
file: FileName(
"unknown",
),
},
custom: (),
node: Name {
@ -28,6 +35,9 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 1,
column: 14,
file: FileName(
"unknown",
),
},
custom: (),
node: Name {
@ -42,6 +52,9 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 2,
column: 2,
file: FileName(
"unknown",
),
},
custom: (),
node: FunctionDef {
@ -53,6 +66,9 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 2,
column: 15,
file: FileName(
"unknown",
),
},
custom: (),
node: ArgData {
@ -73,6 +89,9 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 3,
column: 3,
file: FileName(
"unknown",
),
},
custom: (),
node: Pass {
@ -90,6 +109,9 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 4,
column: 2,
file: FileName(
"unknown",
),
},
custom: (),
node: FunctionDef {
@ -101,6 +123,9 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 4,
column: 26,
file: FileName(
"unknown",
),
},
custom: (),
node: ArgData {
@ -113,6 +138,9 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 4,
column: 32,
file: FileName(
"unknown",
),
},
custom: (),
node: ArgData {
@ -131,6 +159,9 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 4,
column: 37,
file: FileName(
"unknown",
),
},
custom: (),
node: Constant {
@ -147,6 +178,9 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 5,
column: 3,
file: FileName(
"unknown",
),
},
custom: (),
node: Pass {

View File

@ -1,11 +1,16 @@
---
source: parser/src/parser.rs
source: nac3parser/src/parser.rs
assertion_line: 150
expression: parse_ast
---
Located {
location: Location {
row: 1,
column: 1,
file: FileName(
"unknown",
),
},
custom: (),
node: DictComp {
@ -13,6 +18,9 @@ Located {
location: Location {
row: 1,
column: 2,
file: FileName(
"unknown",
),
},
custom: (),
node: Name {
@ -24,6 +32,9 @@ Located {
location: Location {
row: 1,
column: 6,
file: FileName(
"unknown",
),
},
custom: (),
node: Name {
@ -37,6 +48,9 @@ Located {
location: Location {
row: 1,
column: 13,
file: FileName(
"unknown",
),
},
custom: (),
node: Name {
@ -48,6 +62,9 @@ Located {
location: Location {
row: 1,
column: 18,
file: FileName(
"unknown",
),
},
custom: (),
node: Name {

View File

@ -1,5 +1,6 @@
---
source: parser/src/parser.rs
source: nac3parser/src/parser.rs
assertion_line: 164
expression: parse_ast
---
@ -7,6 +8,9 @@ Located {
location: Location {
row: 1,
column: 1,
file: FileName(
"unknown",
),
},
custom: (),
node: ListComp {
@ -14,6 +18,9 @@ Located {
location: Location {
row: 1,
column: 2,
file: FileName(
"unknown",
),
},
custom: (),
node: Name {
@ -27,6 +34,9 @@ Located {
location: Location {
row: 1,
column: 8,
file: FileName(
"unknown",
),
},
custom: (),
node: Tuple {
@ -35,6 +45,9 @@ Located {
location: Location {
row: 1,
column: 8,
file: FileName(
"unknown",
),
},
custom: (),
node: Name {
@ -46,6 +59,9 @@ Located {
location: Location {
row: 1,
column: 11,
file: FileName(
"unknown",
),
},
custom: (),
node: Name {
@ -61,6 +77,9 @@ Located {
location: Location {
row: 1,
column: 17,
file: FileName(
"unknown",
),
},
custom: (),
node: Name {
@ -76,6 +95,9 @@ Located {
location: Location {
row: 1,
column: 23,
file: FileName(
"unknown",
),
},
custom: (),
node: Name {
@ -87,6 +109,9 @@ Located {
location: Location {
row: 1,
column: 28,
file: FileName(
"unknown",
),
},
custom: (),
node: Name {
@ -99,6 +124,9 @@ Located {
location: Location {
row: 1,
column: 35,
file: FileName(
"unknown",
),
},
custom: (),
node: Compare {
@ -106,6 +134,9 @@ Located {
location: Location {
row: 1,
column: 33,
file: FileName(
"unknown",
),
},
custom: (),
node: Name {
@ -121,11 +152,16 @@ Located {
location: Location {
row: 1,
column: 37,
file: FileName(
"unknown",
),
},
custom: (),
node: Constant {
value: Int(
5,
Some(
5,
),
),
kind: None,
},
@ -137,6 +173,9 @@ Located {
location: Location {
row: 1,
column: 44,
file: FileName(
"unknown",
),
},
custom: (),
node: Compare {
@ -144,6 +183,9 @@ Located {
location: Location {
row: 1,
column: 42,
file: FileName(
"unknown",
),
},
custom: (),
node: Name {
@ -159,11 +201,16 @@ Located {
location: Location {
row: 1,
column: 46,
file: FileName(
"unknown",
),
},
custom: (),
node: Constant {
value: Int(
10,
Some(
10,
),
),
kind: None,
},

View File

@ -1,5 +1,6 @@
---
source: nac3parser/src/parser.rs
assertion_line: 118
expression: parse_ast
---
@ -8,6 +9,9 @@ expression: parse_ast
location: Location {
row: 1,
column: 1,
file: FileName(
"unknown",
),
},
custom: (),
node: If {
@ -15,11 +19,16 @@ expression: parse_ast
location: Location {
row: 1,
column: 4,
file: FileName(
"unknown",
),
},
custom: (),
node: Constant {
value: Int(
1,
Some(
1,
),
),
kind: None,
},
@ -29,6 +38,9 @@ expression: parse_ast
location: Location {
row: 1,
column: 7,
file: FileName(
"unknown",
),
},
custom: (),
node: Expr {
@ -36,11 +48,16 @@ expression: parse_ast
location: Location {
row: 1,
column: 7,
file: FileName(
"unknown",
),
},
custom: (),
node: Constant {
value: Int(
10,
Some(
10,
),
),
kind: None,
},
@ -54,6 +71,9 @@ expression: parse_ast
location: Location {
row: 2,
column: 1,
file: FileName(
"unknown",
),
},
custom: (),
node: If {
@ -61,11 +81,16 @@ expression: parse_ast
location: Location {
row: 2,
column: 6,
file: FileName(
"unknown",
),
},
custom: (),
node: Constant {
value: Int(
2,
Some(
2,
),
),
kind: None,
},
@ -75,6 +100,9 @@ expression: parse_ast
location: Location {
row: 2,
column: 9,
file: FileName(
"unknown",
),
},
custom: (),
node: Expr {
@ -82,11 +110,16 @@ expression: parse_ast
location: Location {
row: 2,
column: 9,
file: FileName(
"unknown",
),
},
custom: (),
node: Constant {
value: Int(
20,
Some(
20,
),
),
kind: None,
},
@ -100,6 +133,9 @@ expression: parse_ast
location: Location {
row: 3,
column: 7,
file: FileName(
"unknown",
),
},
custom: (),
node: Expr {
@ -107,11 +143,16 @@ expression: parse_ast
location: Location {
row: 3,
column: 7,
file: FileName(
"unknown",
),
},
custom: (),
node: Constant {
value: Int(
30,
Some(
30,
),
),
kind: None,
},

View File

@ -1,5 +1,6 @@
---
source: nac3parser/src/parser.rs
assertion_line: 111
expression: parse_ast
---
@ -8,6 +9,9 @@ expression: parse_ast
location: Location {
row: 1,
column: 1,
file: FileName(
"unknown",
),
},
custom: (),
node: Expr {
@ -15,6 +19,9 @@ expression: parse_ast
location: Location {
row: 1,
column: 8,
file: FileName(
"unknown",
),
},
custom: (),
node: Call {
@ -22,6 +29,9 @@ expression: parse_ast
location: Location {
row: 1,
column: 1,
file: FileName(
"unknown",
),
},
custom: (),
node: Name {
@ -34,6 +44,9 @@ expression: parse_ast
location: Location {
row: 1,
column: 10,
file: FileName(
"unknown",
),
},
custom: (),
node: Constant {
@ -49,6 +62,9 @@ expression: parse_ast
location: Location {
row: 1,
column: 23,
file: FileName(
"unknown",
),
},
custom: (),
node: KeywordData {
@ -59,11 +75,16 @@ expression: parse_ast
location: Location {
row: 1,
column: 31,
file: FileName(
"unknown",
),
},
custom: (),
node: Constant {
value: Int(
2,
Some(
2,
),
),
kind: None,
},

View File

@ -1,5 +1,6 @@
---
source: nac3parser/src/parser.rs
assertion_line: 125
expression: parse_ast
---
@ -8,6 +9,9 @@ expression: parse_ast
location: Location {
row: 1,
column: 1,
file: FileName(
"unknown",
),
},
custom: (),
node: Expr {
@ -15,6 +19,9 @@ expression: parse_ast
location: Location {
row: 1,
column: 1,
file: FileName(
"unknown",
),
},
custom: (),
node: Lambda {
@ -25,6 +32,9 @@ expression: parse_ast
location: Location {
row: 1,
column: 8,
file: FileName(
"unknown",
),
},
custom: (),
node: ArgData {
@ -37,6 +47,9 @@ expression: parse_ast
location: Location {
row: 1,
column: 11,
file: FileName(
"unknown",
),
},
custom: (),
node: ArgData {
@ -56,6 +69,9 @@ expression: parse_ast
location: Location {
row: 1,
column: 16,
file: FileName(
"unknown",
),
},
custom: (),
node: BinOp {
@ -63,6 +79,9 @@ expression: parse_ast
location: Location {
row: 1,
column: 14,
file: FileName(
"unknown",
),
},
custom: (),
node: Name {
@ -75,6 +94,9 @@ expression: parse_ast
location: Location {
row: 1,
column: 18,
file: FileName(
"unknown",
),
},
custom: (),
node: Name {

View File

@ -1,11 +1,16 @@
---
source: parser/src/parser.rs
source: nac3parser/src/parser.rs
assertion_line: 157
expression: parse_ast
---
Located {
location: Location {
row: 1,
column: 1,
file: FileName(
"unknown",
),
},
custom: (),
node: ListComp {
@ -13,6 +18,9 @@ Located {
location: Location {
row: 1,
column: 2,
file: FileName(
"unknown",
),
},
custom: (),
node: Name {
@ -26,6 +34,9 @@ Located {
location: Location {
row: 1,
column: 8,
file: FileName(
"unknown",
),
},
custom: (),
node: Name {
@ -37,6 +48,9 @@ Located {
location: Location {
row: 1,
column: 13,
file: FileName(
"unknown",
),
},
custom: (),
node: Name {

View File

@ -1,5 +1,6 @@
---
source: nac3parser/src/parser.rs
assertion_line: 104
expression: parse_ast
---
@ -8,6 +9,9 @@ expression: parse_ast
location: Location {
row: 1,
column: 1,
file: FileName(
"unknown",
),
},
custom: (),
node: Expr {
@ -15,6 +19,9 @@ expression: parse_ast
location: Location {
row: 1,
column: 6,
file: FileName(
"unknown",
),
},
custom: (),
node: Call {
@ -22,6 +29,9 @@ expression: parse_ast
location: Location {
row: 1,
column: 1,
file: FileName(
"unknown",
),
},
custom: (),
node: Name {
@ -34,6 +44,9 @@ expression: parse_ast
location: Location {
row: 1,
column: 8,
file: FileName(
"unknown",
),
},
custom: (),
node: Constant {
@ -47,11 +60,16 @@ expression: parse_ast
location: Location {
row: 1,
column: 22,
file: FileName(
"unknown",
),
},
custom: (),
node: Constant {
value: Int(
2,
Some(
2,
),
),
kind: None,
},

View File

@ -1,5 +1,6 @@
---
source: nac3parser/src/parser.rs
assertion_line: 97
expression: parse_ast
---
@ -8,6 +9,9 @@ expression: parse_ast
location: Location {
row: 1,
column: 1,
file: FileName(
"unknown",
),
},
custom: (),
node: Expr {
@ -15,6 +19,9 @@ expression: parse_ast
location: Location {
row: 1,
column: 6,
file: FileName(
"unknown",
),
},
custom: (),
node: Call {
@ -22,6 +29,9 @@ expression: parse_ast
location: Location {
row: 1,
column: 1,
file: FileName(
"unknown",
),
},
custom: (),
node: Name {
@ -34,6 +44,9 @@ expression: parse_ast
location: Location {
row: 1,
column: 8,
file: FileName(
"unknown",
),
},
custom: (),
node: Constant {

View File

@ -1,6 +1,7 @@
---
source: nac3parser/src/parser.rs
expression: parse_program(&source).unwrap()
assertion_line: 132
expression: "parse_program(&source, Default::default()).unwrap()"
---
[
@ -8,6 +9,9 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 1,
column: 1,
file: FileName(
"unknown",
),
},
custom: (),
node: Assign {
@ -16,6 +20,9 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 1,
column: 1,
file: FileName(
"unknown",
),
},
custom: (),
node: Tuple {
@ -24,6 +31,9 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 1,
column: 1,
file: FileName(
"unknown",
),
},
custom: (),
node: Name {
@ -35,6 +45,9 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 1,
column: 4,
file: FileName(
"unknown",
),
},
custom: (),
node: Name {
@ -51,6 +64,9 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 1,
column: 8,
file: FileName(
"unknown",
),
},
custom: (),
node: Tuple {
@ -59,11 +75,16 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 1,
column: 8,
file: FileName(
"unknown",
),
},
custom: (),
node: Constant {
value: Int(
4,
Some(
4,
),
),
kind: None,
},
@ -72,11 +93,16 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 1,
column: 11,
file: FileName(
"unknown",
),
},
custom: (),
node: Constant {
value: Int(
5,
Some(
5,
),
),
kind: None,
},

View File

@ -1,6 +1,7 @@
---
source: nac3parser/src/parser.rs
expression: parse_program(&source).unwrap()
assertion_line: 199
expression: "parse_program(&source, Default::default()).unwrap()"
---
[
@ -8,6 +9,9 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 1,
column: 1,
file: FileName(
"unknown",
),
},
custom: (),
node: While {
@ -15,6 +19,9 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 4,
column: 7,
file: FileName(
"unknown",
),
},
custom: (),
node: Name {
@ -27,6 +34,9 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 6,
column: 5,
file: FileName(
"unknown",
),
},
custom: (),
node: Assign {
@ -35,6 +45,9 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 6,
column: 5,
file: FileName(
"unknown",
),
},
custom: (),
node: Name {
@ -47,11 +60,16 @@ expression: parse_program(&source).unwrap()
location: Location {
row: 6,
column: 9,
file: FileName(
"unknown",
),
},
custom: (),
node: Constant {
value: Int(
3,
Some(
3,
),
),
kind: None,
},

View File

@ -1,6 +1,5 @@
//! Different token definitions.
//! Loosely based on token.h from CPython source:
use num_bigint::BigInt;
use std::fmt::{self, Write};
use crate::ast;
@ -8,7 +7,7 @@ use crate::ast;
#[derive(Clone, Debug, PartialEq)]
pub enum Tok {
Name { name: ast::StrRef },
Int { value: BigInt },
Int { value: Option<i64> },
Float { value: f64 },
Complex { real: f64, imag: f64 },
String { value: String, is_fstring: bool },
@ -113,7 +112,7 @@ impl fmt::Display for Tok {
use Tok::*;
match self {
Name { name } => write!(f, "'{}'", ast::get_str_from_ref(&ast::get_str_ref_lock(), *name)),
Int { value } => write!(f, "'{}'", value),
Int { value } => if let Some(value) = value { write!(f, "'{}'", value) } else { write!(f, "'#OFL#'") },
Float { value } => write!(f, "'{}'", value),
Complex { real, imag } => write!(f, "{}j{}", real, imag),
String { value, is_fstring } => {

View File

@ -10,7 +10,6 @@ nac3parser = { path = "../nac3parser" }
nac3core = { path = "../nac3core" }
[dependencies.inkwell]
git = "https://github.com/TheDan64/inkwell"
branch = "master"
version = "0.1.0-beta.4"
default-features = false
features = ["llvm12-0", "target-x86", "target-arm", "target-riscv", "no-libffi-linking"]
features = ["llvm13-0", "target-x86", "target-arm", "target-riscv", "no-libffi-linking"]

View File

@ -35,7 +35,7 @@ fn main() {
Target::initialize_all(&InitializationConfig::default());
let program = match fs::read_to_string(demo_name + ".py") {
let program = match fs::read_to_string(demo_name.clone() + ".py") {
Ok(program) => program,
Err(err) => {
println!("Cannot open input file: {}", err);
@ -64,7 +64,7 @@ fn main() {
setup_time.duration_since(start).unwrap().as_millis()
);
let parser_result = parser::parse_program(&program).unwrap();
let parser_result = parser::parse_program(&program, format!("{}.py", demo_name).into()).unwrap();
let parse_time = SystemTime::now();
println!(
"parse time: {}ms",