Merge remote-tracking branch 'origin/master' into err_msg_improvement
This commit is contained in:
commit
fc2c86da5d
47
README.md
47
README.md
|
@ -1,25 +1,30 @@
|
|||
# NAC3 compiler
|
||||
# NAC3
|
||||
|
||||
NAC3 is a major, backward-incompatible rewrite of the compiler for the [ARTIQ](https://m-labs.hk/artiq) physics experiment control and data acquisition system. It features greatly improved compilation speeds, a much better type system, and more predictable and transparent operation.
|
||||
|
||||
NAC3 has a modular design and its applicability reaches beyond ARTIQ. The ``nac3core`` module does not contain anything specific to ARTIQ, and can be used in any project that requires compiling Python to machine code.
|
||||
|
||||
**WARNING: NAC3 is currently experimental software and several important features are not implemented yet.**
|
||||
|
||||
## Packaging
|
||||
|
||||
NAC3 is packaged using the [Nix](https://nixos.org) Flakes system. Install Nix 2.4+ and enable flakes by adding ``experimental-features = nix-command flakes`` to ``nix.conf`` (e.g. ``~/.config/nix/nix.conf``).
|
||||
|
||||
## Try NAC3
|
||||
|
||||
After setting up Nix as above, use ``nix shell github:m-labs/artiq/nac3`` to get a shell with the NAC3 version of ARTIQ. See the ``examples`` directory in ARTIQ (``nac3`` Git branch) for some samples of NAC3 kernel code.
|
||||
|
||||
## For developers
|
||||
|
||||
This repository contains:
|
||||
- nac3ast: Python abstract syntax tree definition (based on RustPython).
|
||||
- nac3parser: Python parser (based on RustPython).
|
||||
- nac3core: Core compiler library, containing type-checking and code
|
||||
generation.
|
||||
- nac3standalone: Standalone compiler tool (core language only).
|
||||
- nac3artiq: Integration with ARTIQ and implementation of ARTIQ-specific
|
||||
extensions to the core language.
|
||||
- runkernel: Simple program that runs compiled ARTIQ kernels on the host
|
||||
and displays RTIO operations. Useful for testing without hardware.
|
||||
- ``nac3ast``: Python abstract syntax tree definition (based on RustPython).
|
||||
- ``nac3parser``: Python parser (based on RustPython).
|
||||
- ``nac3core``: Core compiler library, containing type-checking and code generation.
|
||||
- ``nac3standalone``: Standalone compiler tool (core language only).
|
||||
- ``nac3artiq``: Integration with ARTIQ and implementation of ARTIQ-specific extensions to the core language.
|
||||
- ``runkernel``: Simple program that runs compiled ARTIQ kernels on the host and displays RTIO operations. Useful for testing without hardware.
|
||||
|
||||
Use ``nix develop`` in this repository to enter a development shell.
|
||||
If you are using a different shell than bash you can use e.g. ``nix develop --command fish``.
|
||||
|
||||
The core compiler knows nothing about symbol resolution, host variables
|
||||
etc. nac3artiq and nac3standalone provide (implement) the
|
||||
symbol resolver to the core compiler for resolving the type and value for
|
||||
unknown symbols. The core compiler only type checks classes and functions
|
||||
requested by nac3artiq/nac3standalone (the API should allow the
|
||||
caller to specify which methods should be compiled). After type checking, the
|
||||
compiler analyses the set of functions/classes that are used and performs
|
||||
code generation.
|
||||
|
||||
value could be integer values, boolean values, bytes (for memcpy), function ID
|
||||
(full name + concrete type)
|
||||
Build NAC3 with ``cargo build --release``. See the demonstrations in ``nac3artiq`` and ``nac3standalone``.
|
||||
|
|
|
@ -2,16 +2,16 @@
|
|||
"nodes": {
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1637636156,
|
||||
"narHash": "sha256-E2ym4Vcpqu9JYoQDXJZR48gVD+LPPbaCoYveIk7Xu3Y=",
|
||||
"lastModified": 1638887115,
|
||||
"narHash": "sha256-emjtIeqyJ84Eb3X7APJruTrwcfnHQKs55XGljj62prs=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "b026e1cf87a108dd06fe521f224fdc72fd0b013d",
|
||||
"rev": "1bd4bbd49bef217a3d1adea43498270d6e779d65",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "NixOS",
|
||||
"ref": "release-21.11",
|
||||
"ref": "nixos-21.11",
|
||||
"repo": "nixpkgs",
|
||||
"type": "github"
|
||||
}
|
||||
|
|
111
flake.nix
111
flake.nix
|
@ -1,29 +1,62 @@
|
|||
{
|
||||
description = "The third-generation ARTIQ compiler";
|
||||
|
||||
inputs.nixpkgs.url = github:NixOS/nixpkgs/release-21.11;
|
||||
inputs.nixpkgs.url = github:NixOS/nixpkgs/nixos-21.11;
|
||||
|
||||
outputs = { self, nixpkgs }:
|
||||
let
|
||||
# We can't use overlays because llvm dependencies are handled internally in llvmPackages_xx
|
||||
pkgs-orig = import nixpkgs { system = "x86_64-linux"; };
|
||||
nixpkgs-patched = pkgs-orig.applyPatches {
|
||||
name = "nixpkgs";
|
||||
src = nixpkgs;
|
||||
patches = [ ./llvm-future-riscv-abi.diff ./llvm-restrict-targets.diff ];
|
||||
pkgs = import nixpkgs { system = "x86_64-linux"; };
|
||||
pkgs-mingw = import nixpkgs {
|
||||
system = "x86_64-linux";
|
||||
crossSystem = { config = "x86_64-w64-mingw32"; libc = "msvcrt"; };
|
||||
# work around https://github.com/NixOS/nixpkgs/issues/149593
|
||||
overlays = [
|
||||
(self: super: {
|
||||
openssh = super.openssh.overrideAttrs(oa: { doCheck = false; });
|
||||
})
|
||||
];
|
||||
};
|
||||
cargoSha256 = "sha256-otKLhr58HYMjVXAof6AdObNpggPnvK6qOl7I+4LWIP8=";
|
||||
msys2-python-tar = pkgs.fetchurl {
|
||||
url = "https://mirror.msys2.org/mingw/mingw64/mingw-w64-x86_64-python-3.9.7-4-any.pkg.tar.zst";
|
||||
sha256 = "0iwlgbk4b457yn9djwqswid55xhyyi35qymz1lfh42xwdpxdm47c";
|
||||
};
|
||||
msys2-python = pkgs.stdenvNoCC.mkDerivation {
|
||||
name = "msys2-python";
|
||||
src = msys2-python-tar;
|
||||
buildInputs = [ pkgs.gnutar pkgs.zstd ];
|
||||
phases = [ "installPhase" ];
|
||||
installPhase =
|
||||
''
|
||||
mkdir $out
|
||||
tar xf $src -C $out
|
||||
'';
|
||||
};
|
||||
pyo3-mingw-config = pkgs.writeTextFile {
|
||||
name = "pyo3-mingw-config";
|
||||
text =
|
||||
''
|
||||
implementation=CPython
|
||||
version=3.9
|
||||
shared=true
|
||||
abi3=false
|
||||
lib_name=python3.9
|
||||
lib_dir=${msys2-python}/mingw64/lib
|
||||
pointer_width=64
|
||||
build_flags=WITH_THREAD
|
||||
suppress_build_script_link_lines=false
|
||||
'';
|
||||
};
|
||||
pkgs = import nixpkgs-patched { system = "x86_64-linux"; };
|
||||
in rec {
|
||||
inherit nixpkgs-patched;
|
||||
|
||||
packages.x86_64-linux = {
|
||||
packages.x86_64-linux = rec {
|
||||
llvm-nac3 = pkgs.callPackage "${self}/llvm" {};
|
||||
nac3artiq = pkgs.python3Packages.toPythonModule (
|
||||
pkgs.rustPlatform.buildRustPackage {
|
||||
name = "nac3artiq";
|
||||
src = self;
|
||||
cargoSha256 = "sha256-otKLhr58HYMjVXAof6AdObNpggPnvK6qOl7I+4LWIP8=";
|
||||
nativeBuildInputs = [ pkgs.python3 pkgs.llvm_12 ];
|
||||
buildInputs = [ pkgs.python3 pkgs.libffi pkgs.libxml2 pkgs.llvm_12 ];
|
||||
inherit cargoSha256;
|
||||
nativeBuildInputs = [ pkgs.python3 llvm-nac3 ];
|
||||
buildInputs = [ pkgs.python3 llvm-nac3 ];
|
||||
cargoBuildFlags = [ "--package" "nac3artiq" ];
|
||||
cargoTestFlags = [ "--package" "nac3ast" "--package" "nac3parser" "--package" "nac3core" "--package" "nac3artiq" ];
|
||||
installPhase =
|
||||
|
@ -36,25 +69,65 @@
|
|||
);
|
||||
};
|
||||
|
||||
packages.x86_64-w64-mingw32 = rec {
|
||||
llvm-nac3 = pkgs-mingw.callPackage "${self}/llvm" { inherit (pkgs) llvmPackages_12; };
|
||||
nac3artiq = pkgs-mingw.python3Packages.toPythonModule (
|
||||
pkgs-mingw.rustPlatform.buildRustPackage {
|
||||
name = "nac3artiq";
|
||||
src = self;
|
||||
inherit cargoSha256;
|
||||
nativeBuildInputs = [ pkgs.zip ];
|
||||
buildInputs = [ pkgs-mingw.zlib ];
|
||||
configurePhase =
|
||||
''
|
||||
export PYO3_CONFIG_FILE=${pyo3-mingw-config}
|
||||
|
||||
mkdir llvm-cfg
|
||||
cat << EOF > llvm-cfg/llvm-config
|
||||
#!${pkgs.bash}/bin/bash
|
||||
set -e
|
||||
# Gross hack to work around llvm-config asking for the wrong system libraries.
|
||||
exec ${llvm-nac3.dev}/bin/llvm-config-native \$@ | ${pkgs.gnused}/bin/sed s/-lrt\ -ldl\ -lpthread\ -lm//
|
||||
EOF
|
||||
chmod +x llvm-cfg/llvm-config
|
||||
export PATH=$PATH:`pwd`/llvm-cfg
|
||||
|
||||
export CARGO_TARGET_X86_64_PC_WINDOWS_GNU_RUSTFLAGS="-C link-arg=-lz -C link-arg=-luuid -C link-arg=-lole32 -C link-arg=-lmcfgthread"
|
||||
'';
|
||||
cargoBuildFlags = [ "--package" "nac3artiq" ];
|
||||
doCheck = false;
|
||||
installPhase =
|
||||
''
|
||||
mkdir -p $out $out/nix-support
|
||||
ln -s target/x86_64-pc-windows-gnu/release/nac3artiq.dll nac3artiq.pyd
|
||||
zip $out/nac3artiq.zip nac3artiq.pyd
|
||||
echo file binary-dist $out/nac3artiq.zip >> $out/nix-support/hydra-build-products
|
||||
'';
|
||||
dontFixup = true;
|
||||
meta.platforms = ["x86_64-windows"];
|
||||
}
|
||||
);
|
||||
};
|
||||
|
||||
devShell.x86_64-linux = pkgs.mkShell {
|
||||
name = "nac3-dev-shell";
|
||||
buildInputs = with pkgs; [
|
||||
llvm_12
|
||||
packages.x86_64-linux.llvm-nac3
|
||||
clang_12
|
||||
lld_12
|
||||
cargo
|
||||
cargo-insta
|
||||
rustc
|
||||
libffi
|
||||
libxml2
|
||||
clippy
|
||||
(python3.withPackages(ps: [ ps.numpy ]))
|
||||
];
|
||||
};
|
||||
|
||||
hydraJobs = {
|
||||
inherit (packages.x86_64-linux) nac3artiq;
|
||||
} // (pkgs.lib.foldr (a: b: {"${pkgs.lib.strings.getName a}" = a;} // b) {} devShell.x86_64-linux.buildInputs);
|
||||
inherit (packages.x86_64-linux) llvm-nac3 nac3artiq;
|
||||
llvm-nac3-mingw = packages.x86_64-w64-mingw32.llvm-nac3;
|
||||
nac3artiq-mingw = packages.x86_64-w64-mingw32.nac3artiq;
|
||||
};
|
||||
};
|
||||
|
||||
nixConfig = {
|
||||
|
|
|
@ -1,61 +0,0 @@
|
|||
commit 6e2dea56207b4e52ade9d1eee6a4f198336dd0a6
|
||||
Author: Sebastien Bourdeauducq <sb@m-labs.hk>
|
||||
Date: Thu Nov 11 23:32:13 2021 +0800
|
||||
|
||||
llvm: switch RISC-V ABI when FPU is present
|
||||
|
||||
diff --git a/pkgs/development/compilers/llvm/12/llvm/default.nix b/pkgs/development/compilers/llvm/12/llvm/default.nix
|
||||
index 30a1a7a16df..41b7211b2a5 100644
|
||||
--- a/pkgs/development/compilers/llvm/12/llvm/default.nix
|
||||
+++ b/pkgs/development/compilers/llvm/12/llvm/default.nix
|
||||
@@ -66,6 +66,7 @@ in stdenv.mkDerivation (rec {
|
||||
sha256 = "sha256:12s8vr6ibri8b48h2z38f3afhwam10arfiqfy4yg37bmc054p5hi";
|
||||
stripLen = 1;
|
||||
})
|
||||
+ ./llvm-future-riscv-abi.diff
|
||||
] ++ lib.optional enablePolly ./gnu-install-dirs-polly.patch;
|
||||
|
||||
postPatch = optionalString stdenv.isDarwin ''
|
||||
@@ -183,7 +184,7 @@ in stdenv.mkDerivation (rec {
|
||||
cp NATIVE/bin/llvm-config $dev/bin/llvm-config-native
|
||||
'';
|
||||
|
||||
- doCheck = stdenv.isLinux && (!stdenv.isx86_32) && (!stdenv.hostPlatform.isMusl);
|
||||
+ doCheck = false; # the ABI change breaks RISC-V FP tests
|
||||
|
||||
checkTarget = "check-all";
|
||||
|
||||
diff --git a/pkgs/development/compilers/llvm/12/llvm/llvm-future-riscv-abi.diff b/pkgs/development/compilers/llvm/12/llvm/llvm-future-riscv-abi.diff
|
||||
new file mode 100644
|
||||
index 00000000000..2427ed0e02c
|
||||
--- /dev/null
|
||||
+++ b/pkgs/development/compilers/llvm/12/llvm/llvm-future-riscv-abi.diff
|
||||
@@ -0,0 +1,28 @@
|
||||
+diff --git a/lib/Target/RISCV/MCTargetDesc/RISCVBaseInfo.cpp b/lib/Target/RISCV/MCTargetDesc/RISCVBaseInfo.cpp
|
||||
+index 0aba18b20..9bb75e7f4 100644
|
||||
+--- a/lib/Target/RISCV/MCTargetDesc/RISCVBaseInfo.cpp
|
||||
++++ b/lib/Target/RISCV/MCTargetDesc/RISCVBaseInfo.cpp
|
||||
+@@ -33,6 +33,8 @@ ABI computeTargetABI(const Triple &TT, FeatureBitset FeatureBits,
|
||||
+ auto TargetABI = getTargetABI(ABIName);
|
||||
+ bool IsRV64 = TT.isArch64Bit();
|
||||
+ bool IsRV32E = FeatureBits[RISCV::FeatureRV32E];
|
||||
++ bool IsRV32D = FeatureBits[RISCV::FeatureStdExtD];
|
||||
++ bool IsRV32F = FeatureBits[RISCV::FeatureStdExtF];
|
||||
+
|
||||
+ if (!ABIName.empty() && TargetABI == ABI_Unknown) {
|
||||
+ errs()
|
||||
+@@ -56,10 +58,10 @@ ABI computeTargetABI(const Triple &TT, FeatureBitset FeatureBits,
|
||||
+ if (TargetABI != ABI_Unknown)
|
||||
+ return TargetABI;
|
||||
+
|
||||
+- // For now, default to the ilp32/ilp32e/lp64 ABI if no explicit ABI is given
|
||||
+- // or an invalid/unrecognised string is given. In the future, it might be
|
||||
+- // worth changing this to default to ilp32f/lp64f and ilp32d/lp64d when
|
||||
+- // hardware support for floating point is present.
|
||||
++ if (IsRV32D)
|
||||
++ return ABI_ILP32D;
|
||||
++ if (IsRV32F)
|
||||
++ return ABI_ILP32F;
|
||||
+ if (IsRV32E)
|
||||
+ return ABI_ILP32E;
|
||||
+ if (IsRV64)
|
|
@ -1,12 +0,0 @@
|
|||
diff --git a/pkgs/development/compilers/llvm/12/llvm/default.nix b/pkgs/development/compilers/llvm/12/llvm/default.nix
|
||||
index 41b7211b2a5..dfc707f034d 100644
|
||||
--- a/pkgs/development/compilers/llvm/12/llvm/default.nix
|
||||
+++ b/pkgs/development/compilers/llvm/12/llvm/default.nix
|
||||
@@ -127,6 +127,7 @@ in stdenv.mkDerivation (rec {
|
||||
"-DLLVM_HOST_TRIPLE=${stdenv.hostPlatform.config}"
|
||||
"-DLLVM_DEFAULT_TARGET_TRIPLE=${stdenv.hostPlatform.config}"
|
||||
"-DLLVM_ENABLE_DUMP=ON"
|
||||
+ "-DLLVM_TARGETS_TO_BUILD=X86;ARM;RISCV"
|
||||
] ++ optionals enableSharedLibraries [
|
||||
"-DLLVM_LINK_LLVM_DYLIB=ON"
|
||||
] ++ optionals enableManpages [
|
|
@ -0,0 +1,230 @@
|
|||
{ lib, stdenv
|
||||
, pkgsBuildBuild
|
||||
, fetchurl
|
||||
, fetchpatch
|
||||
, cmake
|
||||
, python3
|
||||
, libbfd
|
||||
, ncurses
|
||||
, zlib
|
||||
, llvmPackages_12
|
||||
, debugVersion ? false
|
||||
, enableManpages ? false
|
||||
, enableSharedLibraries ? false
|
||||
, enablePolly ? false
|
||||
}:
|
||||
|
||||
let
|
||||
inherit (lib) optional optionals optionalString;
|
||||
|
||||
release_version = "12.0.1";
|
||||
candidate = ""; # empty or "rcN"
|
||||
dash-candidate = lib.optionalString (candidate != "") "-${candidate}";
|
||||
version = "${release_version}${dash-candidate}"; # differentiating these (variables) is important for RCs
|
||||
fetch = name: sha256: fetchurl {
|
||||
url = "https://github.com/llvm/llvm-project/releases/download/llvmorg-${version}/${name}-${release_version}${candidate}.src.tar.xz";
|
||||
inherit sha256;
|
||||
};
|
||||
|
||||
# Used when creating a version-suffixed symlink of libLLVM.dylib
|
||||
shortVersion = with lib;
|
||||
concatStringsSep "." (take 1 (splitString "." release_version));
|
||||
|
||||
in stdenv.mkDerivation (rec {
|
||||
pname = "llvm";
|
||||
inherit version;
|
||||
|
||||
src = fetch pname "1pzx9zrmd7r3481sbhwvkms68fwhffpp4mmz45dgrkjpyl2q96kx";
|
||||
polly_src = fetch "polly" "1yfm9ixda4a2sx7ak5vswijx4ydk5lv1c1xh39xmd2kh299y4m12";
|
||||
|
||||
unpackPhase = ''
|
||||
unpackFile $src
|
||||
mv llvm-${release_version}* llvm
|
||||
sourceRoot=$PWD/llvm
|
||||
'' + optionalString enablePolly ''
|
||||
unpackFile $polly_src
|
||||
mv polly-* $sourceRoot/tools/polly
|
||||
'';
|
||||
|
||||
outputs = [ "out" "lib" "dev" "python" ];
|
||||
|
||||
nativeBuildInputs = [ cmake python3 ]
|
||||
++ optionals enableManpages [ python3.pkgs.sphinx python3.pkgs.recommonmark ];
|
||||
|
||||
buildInputs = [ ];
|
||||
|
||||
propagatedBuildInputs = optionals (stdenv.buildPlatform == stdenv.hostPlatform) [ ncurses ]
|
||||
++ [ zlib ];
|
||||
|
||||
patches = [
|
||||
./gnu-install-dirs.patch
|
||||
# On older CPUs (e.g. Hydra/wendy) we'd be getting an error in this test.
|
||||
(fetchpatch {
|
||||
name = "uops-CMOV16rm-noreg.diff";
|
||||
url = "https://github.com/llvm/llvm-project/commit/9e9f991ac033.diff";
|
||||
sha256 = "sha256:12s8vr6ibri8b48h2z38f3afhwam10arfiqfy4yg37bmc054p5hi";
|
||||
stripLen = 1;
|
||||
})
|
||||
./llvm-future-riscv-abi.diff
|
||||
] ++ lib.optional enablePolly ./gnu-install-dirs-polly.patch;
|
||||
|
||||
postPatch = optionalString stdenv.isDarwin ''
|
||||
substituteInPlace cmake/modules/AddLLVM.cmake \
|
||||
--replace 'set(_install_name_dir INSTALL_NAME_DIR "@rpath")' "set(_install_name_dir)" \
|
||||
--replace 'set(_install_rpath "@loader_path/../''${CMAKE_INSTALL_LIBDIR}''${LLVM_LIBDIR_SUFFIX}" ''${extra_libdir})' ""
|
||||
''
|
||||
# Patch llvm-config to return correct library path based on --link-{shared,static}.
|
||||
+ ''
|
||||
substitute '${./outputs.patch}' ./outputs.patch --subst-var lib
|
||||
patch -p1 < ./outputs.patch
|
||||
'' + ''
|
||||
# FileSystem permissions tests fail with various special bits
|
||||
substituteInPlace unittests/Support/CMakeLists.txt \
|
||||
--replace "Path.cpp" ""
|
||||
rm unittests/Support/Path.cpp
|
||||
substituteInPlace unittests/IR/CMakeLists.txt \
|
||||
--replace "PassBuilderCallbacksTest.cpp" ""
|
||||
rm unittests/IR/PassBuilderCallbacksTest.cpp
|
||||
# TODO: Fix failing tests:
|
||||
rm test/DebugInfo/X86/vla-multi.ll
|
||||
'' + optionalString stdenv.hostPlatform.isMusl ''
|
||||
patch -p1 -i ${../../TLI-musl.patch}
|
||||
substituteInPlace unittests/Support/CMakeLists.txt \
|
||||
--replace "add_subdirectory(DynamicLibrary)" ""
|
||||
rm unittests/Support/DynamicLibrary/DynamicLibraryTest.cpp
|
||||
# valgrind unhappy with musl or glibc, but fails w/musl only
|
||||
rm test/CodeGen/AArch64/wineh4.mir
|
||||
'' + optionalString stdenv.hostPlatform.isAarch32 ''
|
||||
# skip failing X86 test cases on 32-bit ARM
|
||||
rm test/DebugInfo/X86/convert-debugloc.ll
|
||||
rm test/DebugInfo/X86/convert-inlined.ll
|
||||
rm test/DebugInfo/X86/convert-linked.ll
|
||||
rm test/tools/dsymutil/X86/op-convert.test
|
||||
'' + optionalString (stdenv.hostPlatform.system == "armv6l-linux") ''
|
||||
# Seems to require certain floating point hardware (NEON?)
|
||||
rm test/ExecutionEngine/frem.ll
|
||||
'' + ''
|
||||
patchShebangs test/BugPoint/compile-custom.ll.py
|
||||
'';
|
||||
|
||||
# hacky fix: created binaries need to be run before installation
|
||||
preBuild = ''
|
||||
mkdir -p $out/
|
||||
ln -sv $PWD/lib $out
|
||||
'';
|
||||
|
||||
# E.g. mesa.drivers use the build-id as a cache key (see #93946):
|
||||
LDFLAGS = optionalString (enableSharedLibraries && !stdenv.isDarwin) "-Wl,--build-id=sha1";
|
||||
|
||||
cmakeFlags = with stdenv; [
|
||||
"-DLLVM_INSTALL_CMAKE_DIR=${placeholder "dev"}/lib/cmake/llvm/"
|
||||
"-DCMAKE_BUILD_TYPE=${if debugVersion then "Debug" else "Release"}"
|
||||
"-DLLVM_BUILD_TESTS=${if stdenv.targetPlatform.isMinGW then "OFF" else "ON"}"
|
||||
"-DLLVM_HOST_TRIPLE=${stdenv.hostPlatform.config}"
|
||||
"-DLLVM_DEFAULT_TARGET_TRIPLE=${stdenv.hostPlatform.config}"
|
||||
"-DLLVM_ENABLE_UNWIND_TABLES=OFF"
|
||||
"-DLLVM_TARGETS_TO_BUILD=X86;ARM;RISCV"
|
||||
] ++ optionals enableSharedLibraries [
|
||||
"-DLLVM_LINK_LLVM_DYLIB=ON"
|
||||
] ++ optionals enableManpages [
|
||||
"-DLLVM_BUILD_DOCS=ON"
|
||||
"-DLLVM_ENABLE_SPHINX=ON"
|
||||
"-DSPHINX_OUTPUT_MAN=ON"
|
||||
"-DSPHINX_OUTPUT_HTML=OFF"
|
||||
"-DSPHINX_WARNINGS_AS_ERRORS=OFF"
|
||||
] ++ optionals (!isDarwin && !stdenv.targetPlatform.isMinGW) [
|
||||
"-DLLVM_BINUTILS_INCDIR=${libbfd.dev}/include"
|
||||
] ++ optionals isDarwin [
|
||||
"-DLLVM_ENABLE_LIBCXX=ON"
|
||||
"-DCAN_TARGET_i386=false"
|
||||
] ++ optionals (stdenv.hostPlatform != stdenv.buildPlatform) [
|
||||
"-DCMAKE_CROSSCOMPILING=True"
|
||||
"-DLLVM_TABLEGEN=${llvmPackages_12.tools.llvm}/bin/llvm-tblgen"
|
||||
(
|
||||
let
|
||||
nativeCC = pkgsBuildBuild.targetPackages.stdenv.cc;
|
||||
nativeBintools = nativeCC.bintools.bintools;
|
||||
nativeToolchainFlags = [
|
||||
"-DCMAKE_C_COMPILER=${nativeCC}/bin/${nativeCC.targetPrefix}cc"
|
||||
"-DCMAKE_CXX_COMPILER=${nativeCC}/bin/${nativeCC.targetPrefix}c++"
|
||||
"-DCMAKE_AR=${nativeBintools}/bin/${nativeBintools.targetPrefix}ar"
|
||||
"-DCMAKE_STRIP=${nativeBintools}/bin/${nativeBintools.targetPrefix}strip"
|
||||
"-DCMAKE_RANLIB=${nativeBintools}/bin/${nativeBintools.targetPrefix}ranlib"
|
||||
];
|
||||
in "-DCROSS_TOOLCHAIN_FLAGS_NATIVE:list=${lib.concatStringsSep ";" nativeToolchainFlags}"
|
||||
)
|
||||
];
|
||||
|
||||
postBuild = ''
|
||||
rm -fR $out
|
||||
'';
|
||||
|
||||
preCheck = ''
|
||||
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH''${LD_LIBRARY_PATH:+:}$PWD/lib
|
||||
'';
|
||||
|
||||
postInstall = ''
|
||||
mkdir -p $python/share
|
||||
mv $out/share/opt-viewer $python/share/opt-viewer
|
||||
moveToOutput "bin/llvm-config*" "$dev"
|
||||
substituteInPlace "$dev/lib/cmake/llvm/LLVMExports-${if debugVersion then "debug" else "release"}.cmake" \
|
||||
--replace "\''${_IMPORT_PREFIX}/lib/lib" "$lib/lib/lib" \
|
||||
--replace "$out/bin/llvm-config" "$dev/bin/llvm-config"
|
||||
substituteInPlace "$dev/lib/cmake/llvm/LLVMConfig.cmake" \
|
||||
--replace 'set(LLVM_BINARY_DIR "''${LLVM_INSTALL_PREFIX}")' 'set(LLVM_BINARY_DIR "''${LLVM_INSTALL_PREFIX}'"$lib"'")'
|
||||
''
|
||||
+ optionalString (stdenv.isDarwin && enableSharedLibraries) ''
|
||||
ln -s $lib/lib/libLLVM.dylib $lib/lib/libLLVM-${shortVersion}.dylib
|
||||
ln -s $lib/lib/libLLVM.dylib $lib/lib/libLLVM-${release_version}.dylib
|
||||
''
|
||||
+ optionalString (stdenv.buildPlatform != stdenv.hostPlatform) ''
|
||||
cp NATIVE/bin/llvm-config $dev/bin/llvm-config-native
|
||||
'';
|
||||
|
||||
doCheck = false; # the ABI change breaks RISC-V FP tests
|
||||
|
||||
checkTarget = "check-all";
|
||||
|
||||
requiredSystemFeatures = [ "big-parallel" ];
|
||||
meta = {
|
||||
homepage = "https://llvm.org/";
|
||||
description = "A collection of modular and reusable compiler and toolchain technologies";
|
||||
longDescription = ''
|
||||
The LLVM Project is a collection of modular and reusable compiler and
|
||||
toolchain technologies. Despite its name, LLVM has little to do with
|
||||
traditional virtual machines. The name "LLVM" itself is not an acronym; it
|
||||
is the full name of the project.
|
||||
LLVM began as a research project at the University of Illinois, with the
|
||||
goal of providing a modern, SSA-based compilation strategy capable of
|
||||
supporting both static and dynamic compilation of arbitrary programming
|
||||
languages. Since then, LLVM has grown to be an umbrella project consisting
|
||||
of a number of subprojects, many of which are being used in production by
|
||||
a wide variety of commercial and open source projects as well as being
|
||||
widely used in academic research. Code in the LLVM project is licensed
|
||||
under the "Apache 2.0 License with LLVM exceptions".
|
||||
'';
|
||||
};
|
||||
} // lib.optionalAttrs enableManpages {
|
||||
pname = "llvm-manpages";
|
||||
|
||||
buildPhase = ''
|
||||
make docs-llvm-man
|
||||
'';
|
||||
|
||||
propagatedBuildInputs = [];
|
||||
|
||||
installPhase = ''
|
||||
make -C docs install
|
||||
'';
|
||||
|
||||
postPatch = null;
|
||||
postInstall = null;
|
||||
|
||||
outputs = [ "out" ];
|
||||
|
||||
doCheck = false;
|
||||
|
||||
meta = {
|
||||
description = "man pages for LLVM ${version}";
|
||||
};
|
||||
})
|
|
@ -0,0 +1,105 @@
|
|||
diff --git a/tools/polly/CMakeLists.txt b/tools/polly/CMakeLists.txt
|
||||
index ca7c04c565bb..6ed5db5dd4f8 100644
|
||||
--- a/tools/polly/CMakeLists.txt
|
||||
+++ b/tools/polly/CMakeLists.txt
|
||||
@@ -2,7 +2,11 @@
|
||||
if (NOT DEFINED LLVM_MAIN_SRC_DIR)
|
||||
project(Polly)
|
||||
cmake_minimum_required(VERSION 3.13.4)
|
||||
+endif()
|
||||
+
|
||||
+include(GNUInstallDirs)
|
||||
|
||||
+if (NOT DEFINED LLVM_MAIN_SRC_DIR)
|
||||
# Where is LLVM installed?
|
||||
find_package(LLVM CONFIG REQUIRED)
|
||||
set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} ${LLVM_CMAKE_DIR})
|
||||
@@ -122,13 +126,13 @@ include_directories(
|
||||
|
||||
if (NOT LLVM_INSTALL_TOOLCHAIN_ONLY)
|
||||
install(DIRECTORY include/
|
||||
- DESTINATION include
|
||||
+ DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}
|
||||
FILES_MATCHING
|
||||
PATTERN "*.h"
|
||||
)
|
||||
|
||||
install(DIRECTORY ${POLLY_BINARY_DIR}/include/
|
||||
- DESTINATION include
|
||||
+ DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}
|
||||
FILES_MATCHING
|
||||
PATTERN "*.h"
|
||||
PATTERN "CMakeFiles" EXCLUDE
|
||||
diff --git a/tools/polly/cmake/CMakeLists.txt b/tools/polly/cmake/CMakeLists.txt
|
||||
index 7cc129ba2e90..137be25e4b80 100644
|
||||
--- a/tools/polly/cmake/CMakeLists.txt
|
||||
+++ b/tools/polly/cmake/CMakeLists.txt
|
||||
@@ -79,18 +79,18 @@ file(GENERATE
|
||||
|
||||
# Generate PollyConfig.cmake for the install tree.
|
||||
unset(POLLY_EXPORTS)
|
||||
-set(POLLY_INSTALL_PREFIX "${CMAKE_INSTALL_PREFIX}")
|
||||
+set(POLLY_INSTALL_PREFIX "")
|
||||
set(POLLY_CONFIG_LLVM_CMAKE_DIR "${LLVM_BINARY_DIR}/${LLVM_INSTALL_PACKAGE_DIR}")
|
||||
-set(POLLY_CONFIG_CMAKE_DIR "${POLLY_INSTALL_PREFIX}/${POLLY_INSTALL_PACKAGE_DIR}")
|
||||
-set(POLLY_CONFIG_LIBRARY_DIRS "${POLLY_INSTALL_PREFIX}/lib${LLVM_LIBDIR_SUFFIX}")
|
||||
+set(POLLY_CONFIG_CMAKE_DIR "${POLLY_INSTALL_PREFIX}${CMAKE_INSTALL_PREFIX}/${POLLY_INSTALL_PACKAGE_DIR}")
|
||||
+set(POLLY_CONFIG_LIBRARY_DIRS "${POLLY_INSTALL_PREFIX}${CMAKE_INSTALL_FULL_LIBDIR}${LLVM_LIBDIR_SUFFIX}")
|
||||
if (POLLY_BUNDLED_ISL)
|
||||
set(POLLY_CONFIG_INCLUDE_DIRS
|
||||
- "${POLLY_INSTALL_PREFIX}/include"
|
||||
- "${POLLY_INSTALL_PREFIX}/include/polly"
|
||||
+ "${POLLY_INSTALL_PREFIX}${CMAKE_INSTALL_FULL_LIBDIR}"
|
||||
+ "${POLLY_INSTALL_PREFIX}${CMAKE_INSTALL_FULL_LIBDIR}/polly"
|
||||
)
|
||||
else()
|
||||
set(POLLY_CONFIG_INCLUDE_DIRS
|
||||
- "${POLLY_INSTALL_PREFIX}/include"
|
||||
+ "${POLLY_INSTALL_PREFIX}${CMAKE_INSTALL_FULL_INCLUDEDIR}"
|
||||
${ISL_INCLUDE_DIRS}
|
||||
)
|
||||
endif()
|
||||
@@ -100,12 +100,12 @@ endif()
|
||||
foreach(tgt IN LISTS POLLY_CONFIG_EXPORTED_TARGETS)
|
||||
get_target_property(tgt_type ${tgt} TYPE)
|
||||
if (tgt_type STREQUAL "EXECUTABLE")
|
||||
- set(tgt_prefix "bin/")
|
||||
+ set(tgt_prefix "${CMAKE_INSTALL_BINDIR}/")
|
||||
else()
|
||||
- set(tgt_prefix "lib/")
|
||||
+ set(tgt_prefix "${CMAKE_INSTALL_LIBDIR}/")
|
||||
endif()
|
||||
|
||||
- set(tgt_path "${CMAKE_INSTALL_PREFIX}/${tgt_prefix}$<TARGET_FILE_NAME:${tgt}>")
|
||||
+ set(tgt_path "${tgt_prefix}$<TARGET_FILE_NAME:${tgt}>")
|
||||
file(RELATIVE_PATH tgt_path ${POLLY_CONFIG_CMAKE_DIR} ${tgt_path})
|
||||
|
||||
if (NOT tgt_type STREQUAL "INTERFACE_LIBRARY")
|
||||
diff --git a/tools/polly/cmake/polly_macros.cmake b/tools/polly/cmake/polly_macros.cmake
|
||||
index 518a09b45a42..bd9d6f5542ad 100644
|
||||
--- a/tools/polly/cmake/polly_macros.cmake
|
||||
+++ b/tools/polly/cmake/polly_macros.cmake
|
||||
@@ -44,8 +44,8 @@ macro(add_polly_library name)
|
||||
if (NOT LLVM_INSTALL_TOOLCHAIN_ONLY OR ${name} STREQUAL "LLVMPolly")
|
||||
install(TARGETS ${name}
|
||||
EXPORT LLVMExports
|
||||
- LIBRARY DESTINATION lib${LLVM_LIBDIR_SUFFIX}
|
||||
- ARCHIVE DESTINATION lib${LLVM_LIBDIR_SUFFIX})
|
||||
+ LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR}${LLVM_LIBDIR_SUFFIX}
|
||||
+ ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR}${LLVM_LIBDIR_SUFFIX})
|
||||
endif()
|
||||
set_property(GLOBAL APPEND PROPERTY LLVM_EXPORTS ${name})
|
||||
endmacro(add_polly_library)
|
||||
diff --git a/tools/polly/lib/External/CMakeLists.txt b/tools/polly/lib/External/CMakeLists.txt
|
||||
index 8991094d92c7..178d8ad606bb 100644
|
||||
--- a/tools/polly/lib/External/CMakeLists.txt
|
||||
+++ b/tools/polly/lib/External/CMakeLists.txt
|
||||
@@ -275,7 +275,7 @@ if (POLLY_BUNDLED_ISL)
|
||||
install(DIRECTORY
|
||||
${ISL_SOURCE_DIR}/include/
|
||||
${ISL_BINARY_DIR}/include/
|
||||
- DESTINATION include/polly
|
||||
+ DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}/polly
|
||||
FILES_MATCHING
|
||||
PATTERN "*.h"
|
||||
PATTERN "CMakeFiles" EXCLUDE
|
|
@ -0,0 +1,417 @@
|
|||
diff --git a/CMakeLists.txt b/CMakeLists.txt
|
||||
index 277d0fe54d7b..af69c8be8745 100644
|
||||
--- a/CMakeLists.txt
|
||||
+++ b/CMakeLists.txt
|
||||
@@ -256,15 +256,21 @@ if (CMAKE_BUILD_TYPE AND
|
||||
message(FATAL_ERROR "Invalid value for CMAKE_BUILD_TYPE: ${CMAKE_BUILD_TYPE}")
|
||||
endif()
|
||||
|
||||
+include(GNUInstallDirs)
|
||||
+
|
||||
set(LLVM_LIBDIR_SUFFIX "" CACHE STRING "Define suffix of library directory name (32/64)" )
|
||||
|
||||
-set(LLVM_TOOLS_INSTALL_DIR "bin" CACHE STRING "Path for binary subdirectory (defaults to 'bin')")
|
||||
+set(LLVM_TOOLS_INSTALL_DIR "${CMAKE_INSTALL_BINDIR}" CACHE STRING
|
||||
+ "Path for binary subdirectory (defaults to 'bin')")
|
||||
mark_as_advanced(LLVM_TOOLS_INSTALL_DIR)
|
||||
|
||||
set(LLVM_UTILS_INSTALL_DIR "${LLVM_TOOLS_INSTALL_DIR}" CACHE STRING
|
||||
"Path to install LLVM utilities (enabled by LLVM_INSTALL_UTILS=ON) (defaults to LLVM_TOOLS_INSTALL_DIR)")
|
||||
mark_as_advanced(LLVM_UTILS_INSTALL_DIR)
|
||||
|
||||
+set(LLVM_INSTALL_CMAKE_DIR "${CMAKE_INSTALL_LIBDIR}${LLVM_LIBDIR_SUFFIX}/cmake/llvm" CACHE STRING
|
||||
+ "Path for CMake subdirectory (defaults to lib/cmake/llvm)" )
|
||||
+
|
||||
# They are used as destination of target generators.
|
||||
set(LLVM_RUNTIME_OUTPUT_INTDIR ${CMAKE_CURRENT_BINARY_DIR}/${CMAKE_CFG_INTDIR}/bin)
|
||||
set(LLVM_LIBRARY_OUTPUT_INTDIR ${CMAKE_CURRENT_BINARY_DIR}/${CMAKE_CFG_INTDIR}/lib${LLVM_LIBDIR_SUFFIX})
|
||||
@@ -567,9 +573,9 @@ option (LLVM_ENABLE_SPHINX "Use Sphinx to generate llvm documentation." OFF)
|
||||
option (LLVM_ENABLE_OCAMLDOC "Build OCaml bindings documentation." ON)
|
||||
option (LLVM_ENABLE_BINDINGS "Build bindings." ON)
|
||||
|
||||
-set(LLVM_INSTALL_DOXYGEN_HTML_DIR "share/doc/llvm/doxygen-html"
|
||||
+set(LLVM_INSTALL_DOXYGEN_HTML_DIR "${CMAKE_INSTALL_DOCDIR}/${project}/doxygen-html"
|
||||
CACHE STRING "Doxygen-generated HTML documentation install directory")
|
||||
-set(LLVM_INSTALL_OCAMLDOC_HTML_DIR "share/doc/llvm/ocaml-html"
|
||||
+set(LLVM_INSTALL_OCAMLDOC_HTML_DIR "${CMAKE_INSTALL_DOCDIR}/${project}/ocaml-html"
|
||||
CACHE STRING "OCamldoc-generated HTML documentation install directory")
|
||||
|
||||
option (LLVM_BUILD_EXTERNAL_COMPILER_RT
|
||||
@@ -1027,7 +1033,7 @@ endif()
|
||||
|
||||
if (NOT LLVM_INSTALL_TOOLCHAIN_ONLY)
|
||||
install(DIRECTORY include/llvm include/llvm-c
|
||||
- DESTINATION include
|
||||
+ DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}
|
||||
COMPONENT llvm-headers
|
||||
FILES_MATCHING
|
||||
PATTERN "*.def"
|
||||
@@ -1038,7 +1044,7 @@ if (NOT LLVM_INSTALL_TOOLCHAIN_ONLY)
|
||||
)
|
||||
|
||||
install(DIRECTORY ${LLVM_INCLUDE_DIR}/llvm ${LLVM_INCLUDE_DIR}/llvm-c
|
||||
- DESTINATION include
|
||||
+ DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}
|
||||
COMPONENT llvm-headers
|
||||
FILES_MATCHING
|
||||
PATTERN "*.def"
|
||||
@@ -1052,13 +1058,13 @@ if (NOT LLVM_INSTALL_TOOLCHAIN_ONLY)
|
||||
|
||||
if (LLVM_INSTALL_MODULEMAPS)
|
||||
install(DIRECTORY include/llvm include/llvm-c
|
||||
- DESTINATION include
|
||||
+ DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}
|
||||
COMPONENT llvm-headers
|
||||
FILES_MATCHING
|
||||
PATTERN "module.modulemap"
|
||||
)
|
||||
install(FILES include/llvm/module.install.modulemap
|
||||
- DESTINATION include/llvm
|
||||
+ DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}/llvm
|
||||
COMPONENT llvm-headers
|
||||
RENAME "module.extern.modulemap"
|
||||
)
|
||||
diff --git a/cmake/modules/AddLLVM.cmake b/cmake/modules/AddLLVM.cmake
|
||||
index 97c9980c7de3..409e8b615f75 100644
|
||||
--- a/cmake/modules/AddLLVM.cmake
|
||||
+++ b/cmake/modules/AddLLVM.cmake
|
||||
@@ -804,9 +804,9 @@ macro(add_llvm_library name)
|
||||
|
||||
install(TARGETS ${name}
|
||||
${export_to_llvmexports}
|
||||
- LIBRARY DESTINATION lib${LLVM_LIBDIR_SUFFIX} COMPONENT ${name}
|
||||
- ARCHIVE DESTINATION lib${LLVM_LIBDIR_SUFFIX} COMPONENT ${name}
|
||||
- RUNTIME DESTINATION bin COMPONENT ${name})
|
||||
+ LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR}${LLVM_LIBDIR_SUFFIX} COMPONENT ${name}
|
||||
+ ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR}${LLVM_LIBDIR_SUFFIX} COMPONENT ${name}
|
||||
+ RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR} COMPONENT ${name})
|
||||
|
||||
if (NOT LLVM_ENABLE_IDE)
|
||||
add_llvm_install_targets(install-${name}
|
||||
@@ -1022,7 +1022,7 @@ function(process_llvm_pass_plugins)
|
||||
"set(LLVM_STATIC_EXTENSIONS ${LLVM_STATIC_EXTENSIONS})")
|
||||
install(FILES
|
||||
${llvm_cmake_builddir}/LLVMConfigExtensions.cmake
|
||||
- DESTINATION ${LLVM_INSTALL_PACKAGE_DIR}
|
||||
+ DESTINATION ${LLVM_INSTALL_CMAKE_DIR}
|
||||
COMPONENT cmake-exports)
|
||||
|
||||
set(ExtensionDef "${LLVM_BINARY_DIR}/include/llvm/Support/Extension.def")
|
||||
@@ -1242,7 +1242,7 @@ macro(add_llvm_example name)
|
||||
endif()
|
||||
add_llvm_executable(${name} ${ARGN})
|
||||
if( LLVM_BUILD_EXAMPLES )
|
||||
- install(TARGETS ${name} RUNTIME DESTINATION examples)
|
||||
+ install(TARGETS ${name} RUNTIME DESTINATION ${CMAKE_INSTALL_DOCDIR}/examples)
|
||||
endif()
|
||||
set_target_properties(${name} PROPERTIES FOLDER "Examples")
|
||||
endmacro(add_llvm_example name)
|
||||
@@ -1854,7 +1854,7 @@ function(llvm_install_library_symlink name dest type)
|
||||
set(full_name ${CMAKE_${type}_LIBRARY_PREFIX}${name}${CMAKE_${type}_LIBRARY_SUFFIX})
|
||||
set(full_dest ${CMAKE_${type}_LIBRARY_PREFIX}${dest}${CMAKE_${type}_LIBRARY_SUFFIX})
|
||||
|
||||
- set(output_dir lib${LLVM_LIBDIR_SUFFIX})
|
||||
+ set(output_dir ${CMAKE_INSTALL_FULL_LIBDIR}${LLVM_LIBDIR_SUFFIX})
|
||||
if(WIN32 AND "${type}" STREQUAL "SHARED")
|
||||
set(output_dir bin)
|
||||
endif()
|
||||
@@ -1871,7 +1871,7 @@ function(llvm_install_library_symlink name dest type)
|
||||
endif()
|
||||
endfunction()
|
||||
|
||||
-function(llvm_install_symlink name dest)
|
||||
+function(llvm_install_symlink name dest output_dir)
|
||||
cmake_parse_arguments(ARG "ALWAYS_GENERATE" "COMPONENT" "" ${ARGN})
|
||||
foreach(path ${CMAKE_MODULE_PATH})
|
||||
if(EXISTS ${path}/LLVMInstallSymlink.cmake)
|
||||
@@ -1894,7 +1894,7 @@ function(llvm_install_symlink name dest)
|
||||
set(full_dest ${dest}${CMAKE_EXECUTABLE_SUFFIX})
|
||||
|
||||
install(SCRIPT ${INSTALL_SYMLINK}
|
||||
- CODE "install_symlink(${full_name} ${full_dest} ${LLVM_TOOLS_INSTALL_DIR})"
|
||||
+ CODE "install_symlink(${full_name} ${full_dest} ${output_dir})"
|
||||
COMPONENT ${component})
|
||||
|
||||
if (NOT LLVM_ENABLE_IDE AND NOT ARG_ALWAYS_GENERATE)
|
||||
@@ -1977,7 +1977,8 @@ function(add_llvm_tool_symlink link_name target)
|
||||
endif()
|
||||
|
||||
if ((TOOL_IS_TOOLCHAIN OR NOT LLVM_INSTALL_TOOLCHAIN_ONLY) AND LLVM_BUILD_TOOLS)
|
||||
- llvm_install_symlink(${link_name} ${target})
|
||||
+ GNUInstallDirs_get_absolute_install_dir(output_dir LLVM_TOOLS_INSTALL_DIR)
|
||||
+ llvm_install_symlink(${link_name} ${target} ${output_dir})
|
||||
endif()
|
||||
endif()
|
||||
endfunction()
|
||||
@@ -2100,9 +2101,9 @@ function(llvm_setup_rpath name)
|
||||
|
||||
if (APPLE)
|
||||
set(_install_name_dir INSTALL_NAME_DIR "@rpath")
|
||||
- set(_install_rpath "@loader_path/../lib${LLVM_LIBDIR_SUFFIX}" ${extra_libdir})
|
||||
+ set(_install_rpath "@loader_path/../${CMAKE_INSTALL_LIBDIR}${LLVM_LIBDIR_SUFFIX}" ${extra_libdir})
|
||||
elseif(UNIX)
|
||||
- set(_install_rpath "\$ORIGIN/../lib${LLVM_LIBDIR_SUFFIX}" ${extra_libdir})
|
||||
+ set(_install_rpath "\$ORIGIN/../${CMAKE_INSTALL_LIBDIR}${LLVM_LIBDIR_SUFFIX}" ${extra_libdir})
|
||||
if(${CMAKE_SYSTEM_NAME} MATCHES "(FreeBSD|DragonFly)")
|
||||
set_property(TARGET ${name} APPEND_STRING PROPERTY
|
||||
LINK_FLAGS " -Wl,-z,origin ")
|
||||
diff --git a/cmake/modules/AddOCaml.cmake b/cmake/modules/AddOCaml.cmake
|
||||
index 554046b20edf..4d1ad980641e 100644
|
||||
--- a/cmake/modules/AddOCaml.cmake
|
||||
+++ b/cmake/modules/AddOCaml.cmake
|
||||
@@ -144,9 +144,9 @@ function(add_ocaml_library name)
|
||||
endforeach()
|
||||
|
||||
if( APPLE )
|
||||
- set(ocaml_rpath "@executable_path/../../../lib${LLVM_LIBDIR_SUFFIX}")
|
||||
+ set(ocaml_rpath "@executable_path/../../../${CMAKE_INSTALL_LIBDIR}${LLVM_LIBDIR_SUFFIX}")
|
||||
elseif( UNIX )
|
||||
- set(ocaml_rpath "\\$ORIGIN/../../../lib${LLVM_LIBDIR_SUFFIX}")
|
||||
+ set(ocaml_rpath "\\$ORIGIN/../../../${CMAKE_INSTALL_LIBDIR}${LLVM_LIBDIR_SUFFIX}")
|
||||
endif()
|
||||
list(APPEND ocaml_flags "-ldopt" "-Wl,-rpath,${ocaml_rpath}")
|
||||
|
||||
diff --git a/cmake/modules/AddSphinxTarget.cmake b/cmake/modules/AddSphinxTarget.cmake
|
||||
index e80c3b5c1cac..482f6d715ef5 100644
|
||||
--- a/cmake/modules/AddSphinxTarget.cmake
|
||||
+++ b/cmake/modules/AddSphinxTarget.cmake
|
||||
@@ -90,7 +90,7 @@ function (add_sphinx_target builder project)
|
||||
endif()
|
||||
elseif (builder STREQUAL html)
|
||||
string(TOUPPER "${project}" project_upper)
|
||||
- set(${project_upper}_INSTALL_SPHINX_HTML_DIR "share/doc/${project}/html"
|
||||
+ set(${project_upper}_INSTALL_SPHINX_HTML_DIR "${CMAKE_INSTALL_DOCDIR}/${project}/html"
|
||||
CACHE STRING "HTML documentation install directory for ${project}")
|
||||
|
||||
# '/.' indicates: copy the contents of the directory directly into
|
||||
diff --git a/cmake/modules/CMakeLists.txt b/cmake/modules/CMakeLists.txt
|
||||
index 505dc9a29d70..36e6c63af3f4 100644
|
||||
--- a/cmake/modules/CMakeLists.txt
|
||||
+++ b/cmake/modules/CMakeLists.txt
|
||||
@@ -1,4 +1,4 @@
|
||||
-set(LLVM_INSTALL_PACKAGE_DIR lib${LLVM_LIBDIR_SUFFIX}/cmake/llvm)
|
||||
+set(LLVM_INSTALL_PACKAGE_DIR ${LLVM_INSTALL_CMAKE_DIR} CACHE STRING "Path for CMake subdirectory (defaults to 'cmake/llvm')")
|
||||
set(llvm_cmake_builddir "${LLVM_BINARY_DIR}/${LLVM_INSTALL_PACKAGE_DIR}")
|
||||
|
||||
# First for users who use an installed LLVM, create the LLVMExports.cmake file.
|
||||
@@ -107,13 +107,13 @@ foreach(p ${_count})
|
||||
set(LLVM_CONFIG_CODE "${LLVM_CONFIG_CODE}
|
||||
get_filename_component(LLVM_INSTALL_PREFIX \"\${LLVM_INSTALL_PREFIX}\" PATH)")
|
||||
endforeach(p)
|
||||
-set(LLVM_CONFIG_INCLUDE_DIRS "\${LLVM_INSTALL_PREFIX}/include")
|
||||
+set(LLVM_CONFIG_INCLUDE_DIRS "\${LLVM_INSTALL_PREFIX}/${CMAKE_INSTALL_INCLUDEDIR}")
|
||||
set(LLVM_CONFIG_INCLUDE_DIR "${LLVM_CONFIG_INCLUDE_DIRS}")
|
||||
set(LLVM_CONFIG_MAIN_INCLUDE_DIR "${LLVM_CONFIG_INCLUDE_DIRS}")
|
||||
-set(LLVM_CONFIG_LIBRARY_DIRS "\${LLVM_INSTALL_PREFIX}/lib\${LLVM_LIBDIR_SUFFIX}")
|
||||
+set(LLVM_CONFIG_LIBRARY_DIRS "\${LLVM_INSTALL_PREFIX}/${CMAKE_INSTALL_LIBDIR}\${LLVM_LIBDIR_SUFFIX}")
|
||||
set(LLVM_CONFIG_CMAKE_DIR "\${LLVM_INSTALL_PREFIX}/${LLVM_INSTALL_PACKAGE_DIR}")
|
||||
set(LLVM_CONFIG_BINARY_DIR "\${LLVM_INSTALL_PREFIX}")
|
||||
-set(LLVM_CONFIG_TOOLS_BINARY_DIR "\${LLVM_INSTALL_PREFIX}/bin")
|
||||
+set(LLVM_CONFIG_TOOLS_BINARY_DIR "\${LLVM_INSTALL_PREFIX}/${CMAKE_INSTALL_BINDIR}")
|
||||
|
||||
# Generate a default location for lit
|
||||
if (LLVM_INSTALL_UTILS AND LLVM_BUILD_UTILS)
|
||||
diff --git a/cmake/modules/LLVMInstallSymlink.cmake b/cmake/modules/LLVMInstallSymlink.cmake
|
||||
index 09fed8085c23..aa79f192abf0 100644
|
||||
--- a/cmake/modules/LLVMInstallSymlink.cmake
|
||||
+++ b/cmake/modules/LLVMInstallSymlink.cmake
|
||||
@@ -10,7 +10,7 @@ function(install_symlink name target outdir)
|
||||
set(LINK_OR_COPY copy)
|
||||
endif()
|
||||
|
||||
- set(bindir "${DESTDIR}${CMAKE_INSTALL_PREFIX}/${outdir}/")
|
||||
+ set(bindir "${DESTDIR}${outdir}/")
|
||||
|
||||
message(STATUS "Creating ${name}")
|
||||
|
||||
diff --git a/docs/CMake.rst b/docs/CMake.rst
|
||||
index bb821b417ad9..6a528f7c2ad3 100644
|
||||
--- a/docs/CMake.rst
|
||||
+++ b/docs/CMake.rst
|
||||
@@ -196,7 +196,7 @@ CMake manual, or execute ``cmake --help-variable VARIABLE_NAME``.
|
||||
**LLVM_LIBDIR_SUFFIX**:STRING
|
||||
Extra suffix to append to the directory where libraries are to be
|
||||
installed. On a 64-bit architecture, one could use ``-DLLVM_LIBDIR_SUFFIX=64``
|
||||
- to install libraries to ``/usr/lib64``.
|
||||
+ to install libraries to ``/usr/lib64``. See also ``CMAKE_INSTALL_LIBDIR``.
|
||||
|
||||
**CMAKE_C_FLAGS**:STRING
|
||||
Extra flags to use when compiling C source files.
|
||||
@@ -550,8 +550,8 @@ LLVM-specific variables
|
||||
|
||||
**LLVM_INSTALL_DOXYGEN_HTML_DIR**:STRING
|
||||
The path to install Doxygen-generated HTML documentation to. This path can
|
||||
- either be absolute or relative to the CMAKE_INSTALL_PREFIX. Defaults to
|
||||
- `share/doc/llvm/doxygen-html`.
|
||||
+ either be absolute or relative to the ``CMAKE_INSTALL_PREFIX``. Defaults to
|
||||
+ `${CMAKE_INSTALL_DOCDIR}/${project}/doxygen-html`.
|
||||
|
||||
**LLVM_ENABLE_SPHINX**:BOOL
|
||||
If specified, CMake will search for the ``sphinx-build`` executable and will make
|
||||
@@ -582,13 +582,33 @@ LLVM-specific variables
|
||||
|
||||
**LLVM_INSTALL_SPHINX_HTML_DIR**:STRING
|
||||
The path to install Sphinx-generated HTML documentation to. This path can
|
||||
- either be absolute or relative to the CMAKE_INSTALL_PREFIX. Defaults to
|
||||
- `share/doc/llvm/html`.
|
||||
+ either be absolute or relative to the ``CMAKE_INSTALL_PREFIX``. Defaults to
|
||||
+ `${CMAKE_INSTALL_DOCDIR}/${project}/html`.
|
||||
|
||||
**LLVM_INSTALL_OCAMLDOC_HTML_DIR**:STRING
|
||||
The path to install OCamldoc-generated HTML documentation to. This path can
|
||||
- either be absolute or relative to the CMAKE_INSTALL_PREFIX. Defaults to
|
||||
- `share/doc/llvm/ocaml-html`.
|
||||
+ either be absolute or relative to the ``CMAKE_INSTALL_PREFIX``. Defaults to
|
||||
+ `${CMAKE_INSTALL_DOCDIR}/${project}/ocaml-html`.
|
||||
+
|
||||
+**CMAKE_INSTALL_BINDIR**:STRING
|
||||
+ The path to install binary tools, relative to the ``CMAKE_INSTALL_PREFIX``.
|
||||
+ Defaults to `bin`.
|
||||
+
|
||||
+**CMAKE_INSTALL_LIBDIR**:STRING
|
||||
+ The path to install libraries, relative to the ``CMAKE_INSTALL_PREFIX``.
|
||||
+ Defaults to `lib`.
|
||||
+
|
||||
+**CMAKE_INSTALL_INCLUDEDIR**:STRING
|
||||
+ The path to install header files, relative to the ``CMAKE_INSTALL_PREFIX``.
|
||||
+ Defaults to `include`.
|
||||
+
|
||||
+**CMAKE_INSTALL_DOCDIR**:STRING
|
||||
+ The path to install documentation, relative to the ``CMAKE_INSTALL_PREFIX``.
|
||||
+ Defaults to `share/doc`.
|
||||
+
|
||||
+**CMAKE_INSTALL_MANDIR**:STRING
|
||||
+ The path to install manpage files, relative to the ``CMAKE_INSTALL_PREFIX``.
|
||||
+ Defaults to `share/man`.
|
||||
|
||||
**LLVM_CREATE_XCODE_TOOLCHAIN**:BOOL
|
||||
macOS Only: If enabled CMake will generate a target named
|
||||
@@ -786,9 +806,11 @@ the ``cmake`` command or by setting it directly in ``ccmake`` or ``cmake-gui``).
|
||||
|
||||
This file is available in two different locations.
|
||||
|
||||
-* ``<INSTALL_PREFIX>/lib/cmake/llvm/LLVMConfig.cmake`` where
|
||||
- ``<INSTALL_PREFIX>`` is the install prefix of an installed version of LLVM.
|
||||
- On Linux typically this is ``/usr/lib/cmake/llvm/LLVMConfig.cmake``.
|
||||
+* ``<LLVM_INSTALL_PACKAGE_DIR>LLVMConfig.cmake`` where
|
||||
+ ``<LLVM_INSTALL_PACKAGE_DIR>`` is the location where LLVM CMake modules are
|
||||
+ installed as part of an installed version of LLVM. This is typically
|
||||
+ ``cmake/llvm/`` within the lib directory. On Linux, this is typically
|
||||
+ ``/usr/lib/cmake/llvm/LLVMConfig.cmake``.
|
||||
|
||||
* ``<LLVM_BUILD_ROOT>/lib/cmake/llvm/LLVMConfig.cmake`` where
|
||||
``<LLVM_BUILD_ROOT>`` is the root of the LLVM build tree. **Note: this is only
|
||||
diff --git a/examples/Bye/CMakeLists.txt b/examples/Bye/CMakeLists.txt
|
||||
index bb96edb4b4bf..678c22fb43c8 100644
|
||||
--- a/examples/Bye/CMakeLists.txt
|
||||
+++ b/examples/Bye/CMakeLists.txt
|
||||
@@ -14,6 +14,6 @@ if (NOT WIN32)
|
||||
BUILDTREE_ONLY
|
||||
)
|
||||
|
||||
- install(TARGETS ${name} RUNTIME DESTINATION examples)
|
||||
+ install(TARGETS ${name} RUNTIME DESTINATION ${CMAKE_INSTALL_DOCDIR}/examples)
|
||||
set_target_properties(${name} PROPERTIES FOLDER "Examples")
|
||||
endif()
|
||||
diff --git a/include/llvm/CMakeLists.txt b/include/llvm/CMakeLists.txt
|
||||
index b46319f24fc8..2feabd1954e4 100644
|
||||
--- a/include/llvm/CMakeLists.txt
|
||||
+++ b/include/llvm/CMakeLists.txt
|
||||
@@ -5,5 +5,5 @@ add_subdirectory(Frontend)
|
||||
# If we're doing an out-of-tree build, copy a module map for generated
|
||||
# header files into the build area.
|
||||
if (NOT "${CMAKE_SOURCE_DIR}" STREQUAL "${CMAKE_BINARY_DIR}")
|
||||
- configure_file(module.modulemap.build module.modulemap COPYONLY)
|
||||
+ configure_file(module.modulemap.build ${LLVM_INCLUDE_DIR}/module.modulemap COPYONLY)
|
||||
endif (NOT "${CMAKE_SOURCE_DIR}" STREQUAL "${CMAKE_BINARY_DIR}")
|
||||
diff --git a/tools/llvm-config/BuildVariables.inc.in b/tools/llvm-config/BuildVariables.inc.in
|
||||
index ebe5b73a5c65..70c497be12f5 100644
|
||||
--- a/tools/llvm-config/BuildVariables.inc.in
|
||||
+++ b/tools/llvm-config/BuildVariables.inc.in
|
||||
@@ -23,6 +23,10 @@
|
||||
#define LLVM_CXXFLAGS "@LLVM_CXXFLAGS@"
|
||||
#define LLVM_BUILDMODE "@LLVM_BUILDMODE@"
|
||||
#define LLVM_LIBDIR_SUFFIX "@LLVM_LIBDIR_SUFFIX@"
|
||||
+#define LLVM_INSTALL_BINDIR "@CMAKE_INSTALL_BINDIR@"
|
||||
+#define LLVM_INSTALL_LIBDIR "@CMAKE_INSTALL_LIBDIR@"
|
||||
+#define LLVM_INSTALL_INCLUDEDIR "@CMAKE_INSTALL_INCLUDEDIR@"
|
||||
+#define LLVM_INSTALL_CMAKEDIR "@LLVM_INSTALL_CMAKE_DIR@"
|
||||
#define LLVM_TARGETS_BUILT "@LLVM_TARGETS_BUILT@"
|
||||
#define LLVM_SYSTEM_LIBS "@LLVM_SYSTEM_LIBS@"
|
||||
#define LLVM_BUILD_SYSTEM "@LLVM_BUILD_SYSTEM@"
|
||||
diff --git a/tools/llvm-config/llvm-config.cpp b/tools/llvm-config/llvm-config.cpp
|
||||
index 1a2f04552d13..44fa7d3eec6b 100644
|
||||
--- a/tools/llvm-config/llvm-config.cpp
|
||||
+++ b/tools/llvm-config/llvm-config.cpp
|
||||
@@ -357,12 +357,26 @@ int main(int argc, char **argv) {
|
||||
("-I" + ActiveIncludeDir + " " + "-I" + ActiveObjRoot + "/include");
|
||||
} else {
|
||||
ActivePrefix = CurrentExecPrefix;
|
||||
- ActiveIncludeDir = ActivePrefix + "/include";
|
||||
- SmallString<256> path(StringRef(LLVM_TOOLS_INSTALL_DIR));
|
||||
- sys::fs::make_absolute(ActivePrefix, path);
|
||||
- ActiveBinDir = std::string(path.str());
|
||||
- ActiveLibDir = ActivePrefix + "/lib" + LLVM_LIBDIR_SUFFIX;
|
||||
- ActiveCMakeDir = ActiveLibDir + "/cmake/llvm";
|
||||
+ {
|
||||
+ SmallString<256> path(StringRef(LLVM_INSTALL_INCLUDEDIR));
|
||||
+ sys::fs::make_absolute(ActivePrefix, path);
|
||||
+ ActiveIncludeDir = std::string(path.str());
|
||||
+ }
|
||||
+ {
|
||||
+ SmallString<256> path(StringRef(LLVM_INSTALL_BINDIR));
|
||||
+ sys::fs::make_absolute(ActivePrefix, path);
|
||||
+ ActiveBinDir = std::string(path.str());
|
||||
+ }
|
||||
+ {
|
||||
+ SmallString<256> path(StringRef(LLVM_INSTALL_LIBDIR LLVM_LIBDIR_SUFFIX));
|
||||
+ sys::fs::make_absolute(ActivePrefix, path);
|
||||
+ ActiveLibDir = std::string(path.str());
|
||||
+ }
|
||||
+ {
|
||||
+ SmallString<256> path(StringRef(LLVM_INSTALL_CMAKEDIR));
|
||||
+ sys::fs::make_absolute(ActivePrefix, path);
|
||||
+ ActiveCMakeDir = std::string(path.str());
|
||||
+ }
|
||||
ActiveIncludeOption = "-I" + ActiveIncludeDir;
|
||||
}
|
||||
|
||||
diff --git a/tools/lto/CMakeLists.txt b/tools/lto/CMakeLists.txt
|
||||
index 2963f97cad88..69d66c9c9ca1 100644
|
||||
--- a/tools/lto/CMakeLists.txt
|
||||
+++ b/tools/lto/CMakeLists.txt
|
||||
@@ -25,7 +25,7 @@ add_llvm_library(LTO SHARED INSTALL_WITH_TOOLCHAIN ${SOURCES} DEPENDS
|
||||
intrinsics_gen)
|
||||
|
||||
install(FILES ${LLVM_MAIN_INCLUDE_DIR}/llvm-c/lto.h
|
||||
- DESTINATION include/llvm-c
|
||||
+ DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}/llvm-c
|
||||
COMPONENT LTO)
|
||||
|
||||
if (APPLE)
|
||||
diff --git a/tools/opt-viewer/CMakeLists.txt b/tools/opt-viewer/CMakeLists.txt
|
||||
index ead73ec13a8f..250362021f17 100644
|
||||
--- a/tools/opt-viewer/CMakeLists.txt
|
||||
+++ b/tools/opt-viewer/CMakeLists.txt
|
||||
@@ -8,7 +8,7 @@ set (files
|
||||
|
||||
foreach (file ${files})
|
||||
install(PROGRAMS ${file}
|
||||
- DESTINATION share/opt-viewer
|
||||
+ DESTINATION ${CMAKE_INSTALL_DATADIR}/opt-viewer
|
||||
COMPONENT opt-viewer)
|
||||
endforeach (file)
|
||||
|
||||
diff --git a/tools/remarks-shlib/CMakeLists.txt b/tools/remarks-shlib/CMakeLists.txt
|
||||
index 865436247270..ce1daa62f6ab 100644
|
||||
--- a/tools/remarks-shlib/CMakeLists.txt
|
||||
+++ b/tools/remarks-shlib/CMakeLists.txt
|
||||
@@ -19,7 +19,7 @@ if(LLVM_ENABLE_PIC)
|
||||
endif()
|
||||
|
||||
install(FILES ${LLVM_MAIN_INCLUDE_DIR}/llvm-c/Remarks.h
|
||||
- DESTINATION include/llvm-c
|
||||
+ DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}/llvm-c
|
||||
COMPONENT Remarks)
|
||||
|
||||
if (APPLE)
|
|
@ -0,0 +1,28 @@
|
|||
diff --git a/lib/Target/RISCV/MCTargetDesc/RISCVBaseInfo.cpp b/lib/Target/RISCV/MCTargetDesc/RISCVBaseInfo.cpp
|
||||
index 0aba18b20..9bb75e7f4 100644
|
||||
--- a/lib/Target/RISCV/MCTargetDesc/RISCVBaseInfo.cpp
|
||||
+++ b/lib/Target/RISCV/MCTargetDesc/RISCVBaseInfo.cpp
|
||||
@@ -33,6 +33,8 @@ ABI computeTargetABI(const Triple &TT, FeatureBitset FeatureBits,
|
||||
auto TargetABI = getTargetABI(ABIName);
|
||||
bool IsRV64 = TT.isArch64Bit();
|
||||
bool IsRV32E = FeatureBits[RISCV::FeatureRV32E];
|
||||
+ bool IsRV32D = FeatureBits[RISCV::FeatureStdExtD];
|
||||
+ bool IsRV32F = FeatureBits[RISCV::FeatureStdExtF];
|
||||
|
||||
if (!ABIName.empty() && TargetABI == ABI_Unknown) {
|
||||
errs()
|
||||
@@ -56,10 +58,10 @@ ABI computeTargetABI(const Triple &TT, FeatureBitset FeatureBits,
|
||||
if (TargetABI != ABI_Unknown)
|
||||
return TargetABI;
|
||||
|
||||
- // For now, default to the ilp32/ilp32e/lp64 ABI if no explicit ABI is given
|
||||
- // or an invalid/unrecognised string is given. In the future, it might be
|
||||
- // worth changing this to default to ilp32f/lp64f and ilp32d/lp64d when
|
||||
- // hardware support for floating point is present.
|
||||
+ if (IsRV32D)
|
||||
+ return ABI_ILP32D;
|
||||
+ if (IsRV32F)
|
||||
+ return ABI_ILP32F;
|
||||
if (IsRV32E)
|
||||
return ABI_ILP32E;
|
||||
if (IsRV64)
|
|
@ -0,0 +1,16 @@
|
|||
diff --git a/tools/llvm-config/llvm-config.cpp b/tools/llvm-config/llvm-config.cpp
|
||||
index 94d426b..37f7794 100644
|
||||
--- a/tools/llvm-config/llvm-config.cpp
|
||||
+++ b/tools/llvm-config/llvm-config.cpp
|
||||
@@ -333,6 +333,11 @@ int main(int argc, char **argv) {
|
||||
ActiveIncludeOption = "-I" + ActiveIncludeDir;
|
||||
}
|
||||
|
||||
+ /// Nix-specific multiple-output handling: override ActiveLibDir
|
||||
+ if (!IsInDevelopmentTree) {
|
||||
+ ActiveLibDir = std::string("@lib@") + "/lib" + LLVM_LIBDIR_SUFFIX;
|
||||
+ }
|
||||
+
|
||||
/// We only use `shared library` mode in cases where the static library form
|
||||
/// of the components provided are not available; note however that this is
|
||||
/// skipped if we're run from within the build dir. However, once installed,
|
|
@ -10,8 +10,13 @@ crate-type = ["cdylib"]
|
|||
|
||||
[dependencies]
|
||||
pyo3 = { version = "0.14", features = ["extension-module"] }
|
||||
inkwell = { git = "https://github.com/TheDan64/inkwell", branch = "master", features = ["llvm12-0"] }
|
||||
parking_lot = "0.11"
|
||||
tempfile = "3"
|
||||
nac3parser = { path = "../nac3parser" }
|
||||
nac3core = { path = "../nac3core" }
|
||||
|
||||
[dependencies.inkwell]
|
||||
git = "https://github.com/TheDan64/inkwell"
|
||||
branch = "master"
|
||||
default-features = false
|
||||
features = ["llvm12-0", "target-x86", "target-arm", "target-riscv", "no-libffi-linking"]
|
||||
|
|
|
@ -3,19 +3,44 @@ from functools import wraps
|
|||
from types import SimpleNamespace
|
||||
from numpy import int32, int64
|
||||
from typing import Generic, TypeVar
|
||||
from math import floor, ceil
|
||||
|
||||
import nac3artiq
|
||||
|
||||
__all__ = ["KernelInvariant", "extern", "kernel", "portable", "nac3",
|
||||
"ms", "us", "ns",
|
||||
"print_int32", "print_int64",
|
||||
"Core", "TTLOut", "parallel", "sequential"]
|
||||
|
||||
__all__ = [
|
||||
"Kernel", "KernelInvariant", "virtual",
|
||||
"round64", "floor64", "ceil64",
|
||||
"extern", "kernel", "portable", "nac3",
|
||||
"ms", "us", "ns",
|
||||
"print_int32", "print_int64",
|
||||
"Core", "TTLOut",
|
||||
"parallel", "sequential"
|
||||
]
|
||||
|
||||
|
||||
T = TypeVar('T')
|
||||
|
||||
class Kernel(Generic[T]):
|
||||
pass
|
||||
|
||||
class KernelInvariant(Generic[T]):
|
||||
pass
|
||||
|
||||
# The virtual class must exist before nac3artiq.NAC3 is created.
|
||||
class virtual(Generic[T]):
|
||||
pass
|
||||
|
||||
|
||||
def round64(x):
|
||||
return round(x)
|
||||
|
||||
def floor64(x):
|
||||
return floor(x)
|
||||
|
||||
def ceil64(x):
|
||||
return ceil(x)
|
||||
|
||||
|
||||
import device_db
|
||||
core_arguments = device_db.device_db["core"]["arguments"]
|
||||
|
|
|
@ -1,26 +1,28 @@
|
|||
use std::collections::{HashMap, HashSet};
|
||||
use std::fs;
|
||||
use std::process::Command;
|
||||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
|
||||
use inkwell::{
|
||||
memory_buffer::MemoryBuffer,
|
||||
passes::{PassManager, PassManagerBuilder},
|
||||
targets::*,
|
||||
OptimizationLevel,
|
||||
};
|
||||
use nac3parser::{
|
||||
ast::{self, StrRef},
|
||||
ast::{self, Stmt, StrRef},
|
||||
parser::{self, parse_program},
|
||||
};
|
||||
use pyo3::prelude::*;
|
||||
use pyo3::{exceptions, types::PyBytes, types::PyList, types::PySet};
|
||||
use pyo3::{exceptions, types::PyBytes, types::PyDict, types::PySet};
|
||||
|
||||
use parking_lot::{Mutex, RwLock};
|
||||
|
||||
use nac3core::{
|
||||
codegen::{concrete_type::ConcreteTypeStore, CodeGenTask, WithCall, WorkerRegistry},
|
||||
symbol_resolver::SymbolResolver,
|
||||
toplevel::{composer::TopLevelComposer, DefinitionId, GenCall, TopLevelContext, TopLevelDef},
|
||||
toplevel::{composer::{TopLevelComposer, ComposerConfig}, DefinitionId, GenCall, TopLevelDef},
|
||||
typecheck::typedef::{FunSignature, FuncArg},
|
||||
typecheck::{type_inferencer::PrimitiveStore, typedef::Type},
|
||||
};
|
||||
|
@ -55,8 +57,14 @@ pub struct PrimitivePythonId {
|
|||
bool: u64,
|
||||
list: u64,
|
||||
tuple: u64,
|
||||
typevar: u64,
|
||||
none: u64,
|
||||
generic_alias: (u64, u64),
|
||||
virtual_id: u64,
|
||||
}
|
||||
|
||||
type TopLevelComponent = (Stmt, String, PyObject);
|
||||
|
||||
// TopLevelComposer is unsendable as it holds the unification table, which is
|
||||
// unsendable due to Rc. Arc would cause a performance hit.
|
||||
#[pyclass(unsendable, name = "NAC3")]
|
||||
|
@ -64,15 +72,15 @@ struct Nac3 {
|
|||
isa: Isa,
|
||||
time_fns: &'static (dyn TimeFns + Sync),
|
||||
primitive: PrimitiveStore,
|
||||
builtins: Vec<(StrRef, FunSignature, Arc<GenCall>)>,
|
||||
builtins_ty: HashMap<StrRef, Type>,
|
||||
builtins_def: HashMap<StrRef, DefinitionId>,
|
||||
pyid_to_def: Arc<RwLock<HashMap<u64, DefinitionId>>>,
|
||||
pyid_to_type: Arc<RwLock<HashMap<u64, Type>>>,
|
||||
composer: TopLevelComposer,
|
||||
top_level: Option<Arc<TopLevelContext>>,
|
||||
primitive_ids: PrimitivePythonId,
|
||||
global_value_ids: Arc<Mutex<HashSet<u64>>>,
|
||||
global_value_ids: Arc<RwLock<HashSet<u64>>>,
|
||||
working_directory: TempDir,
|
||||
top_levels: Vec<TopLevelComponent>,
|
||||
}
|
||||
|
||||
impl Nac3 {
|
||||
|
@ -81,32 +89,13 @@ impl Nac3 {
|
|||
module: PyObject,
|
||||
registered_class_ids: &HashSet<u64>,
|
||||
) -> PyResult<()> {
|
||||
let mut name_to_pyid: HashMap<StrRef, u64> = HashMap::new();
|
||||
let (module_name, source_file, helper) =
|
||||
Python::with_gil(|py| -> PyResult<(String, String, PythonHelper)> {
|
||||
let module: &PyAny = module.extract(py)?;
|
||||
let builtins = PyModule::import(py, "builtins")?;
|
||||
let id_fn = builtins.getattr("id")?;
|
||||
let members: &PyList = PyModule::import(py, "inspect")?
|
||||
.getattr("getmembers")?
|
||||
.call1((module,))?
|
||||
.cast_as()?;
|
||||
for member in members.iter() {
|
||||
let key: &str = member.get_item(0)?.extract()?;
|
||||
let val = id_fn.call1((member.get_item(1)?,))?.extract()?;
|
||||
name_to_pyid.insert(key.into(), val);
|
||||
}
|
||||
let helper = PythonHelper {
|
||||
id_fn: builtins.getattr("id").unwrap().to_object(py),
|
||||
len_fn: builtins.getattr("len").unwrap().to_object(py),
|
||||
type_fn: builtins.getattr("type").unwrap().to_object(py),
|
||||
};
|
||||
Ok((
|
||||
module.getattr("__name__")?.extract()?,
|
||||
module.getattr("__file__")?.extract()?,
|
||||
helper,
|
||||
))
|
||||
})?;
|
||||
let (module_name, source_file) = Python::with_gil(|py| -> PyResult<(String, String)> {
|
||||
let module: &PyAny = module.extract(py)?;
|
||||
Ok((
|
||||
module.getattr("__name__")?.extract()?,
|
||||
module.getattr("__file__")?.extract()?,
|
||||
))
|
||||
})?;
|
||||
|
||||
let source = fs::read_to_string(source_file).map_err(|e| {
|
||||
exceptions::PyIOError::new_err(format!("failed to read input file: {}", e))
|
||||
|
@ -114,21 +103,6 @@ impl Nac3 {
|
|||
let parser_result = parser::parse_program(&source)
|
||||
.map_err(|e| exceptions::PySyntaxError::new_err(format!("parse error: {}", e)))?;
|
||||
|
||||
let resolver = Arc::new(Resolver(Arc::new(InnerResolver {
|
||||
id_to_type: self.builtins_ty.clone().into(),
|
||||
id_to_def: self.builtins_def.clone().into(),
|
||||
pyid_to_def: self.pyid_to_def.clone(),
|
||||
pyid_to_type: self.pyid_to_type.clone(),
|
||||
primitive_ids: self.primitive_ids.clone(),
|
||||
global_value_ids: self.global_value_ids.clone(),
|
||||
class_names: Default::default(),
|
||||
name_to_pyid: name_to_pyid.clone(),
|
||||
module: module.clone(),
|
||||
helper,
|
||||
}))) as Arc<dyn SymbolResolver + Send + Sync>;
|
||||
let mut name_to_def = HashMap::new();
|
||||
let mut name_to_type = HashMap::new();
|
||||
|
||||
for mut stmt in parser_result.into_iter() {
|
||||
let include = match stmt.node {
|
||||
ast::StmtKind::ClassDef {
|
||||
|
@ -194,24 +168,10 @@ impl Nac3 {
|
|||
};
|
||||
|
||||
if include {
|
||||
let (name, def_id, ty) = self
|
||||
.composer
|
||||
.register_top_level(stmt, Some(resolver.clone()), module_name.clone())
|
||||
.map_err(|e| exceptions::PyRuntimeError::new_err(format!("nac3 compilation failure: {}", e)))?;
|
||||
name_to_def.insert(name, def_id);
|
||||
if let Some(ty) = ty {
|
||||
name_to_type.insert(name, ty);
|
||||
}
|
||||
self.top_levels
|
||||
.push((stmt, module_name.clone(), module.clone()));
|
||||
}
|
||||
}
|
||||
let mut map = self.pyid_to_def.write();
|
||||
for (name, def) in name_to_def.into_iter() {
|
||||
map.insert(*name_to_pyid.get(&name).unwrap(), def);
|
||||
}
|
||||
let mut map = self.pyid_to_type.write();
|
||||
for (name, ty) in name_to_type.into_iter() {
|
||||
map.insert(*name_to_pyid.get(&name).unwrap(), ty);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
@ -279,12 +239,50 @@ impl Nac3 {
|
|||
}))),
|
||||
),
|
||||
];
|
||||
let (composer, builtins_def, builtins_ty) = TopLevelComposer::new(builtins);
|
||||
let (_, builtins_def, builtins_ty) = TopLevelComposer::new(builtins.clone(), ComposerConfig {
|
||||
kernel_ann: Some("Kernel"),
|
||||
kernel_invariant_ann: "KernelInvariant"
|
||||
});
|
||||
|
||||
let builtins_mod = PyModule::import(py, "builtins").unwrap();
|
||||
let id_fn = builtins_mod.getattr("id").unwrap();
|
||||
let numpy_mod = PyModule::import(py, "numpy").unwrap();
|
||||
let typing_mod = PyModule::import(py, "typing").unwrap();
|
||||
let types_mod = PyModule::import(py, "types").unwrap();
|
||||
let primitive_ids = PrimitivePythonId {
|
||||
virtual_id: id_fn
|
||||
.call1((builtins_mod
|
||||
.getattr("globals")
|
||||
.unwrap()
|
||||
.call0()
|
||||
.unwrap()
|
||||
.get_item("virtual")
|
||||
.unwrap(),))
|
||||
.unwrap()
|
||||
.extract()
|
||||
.unwrap(),
|
||||
generic_alias: (
|
||||
id_fn
|
||||
.call1((typing_mod.getattr("_GenericAlias").unwrap(),))
|
||||
.unwrap()
|
||||
.extract()
|
||||
.unwrap(),
|
||||
id_fn
|
||||
.call1((types_mod.getattr("GenericAlias").unwrap(),))
|
||||
.unwrap()
|
||||
.extract()
|
||||
.unwrap(),
|
||||
),
|
||||
none: id_fn
|
||||
.call1((builtins_mod.getattr("None").unwrap(),))
|
||||
.unwrap()
|
||||
.extract()
|
||||
.unwrap(),
|
||||
typevar: id_fn
|
||||
.call1((typing_mod.getattr("TypeVar").unwrap(),))
|
||||
.unwrap()
|
||||
.extract()
|
||||
.unwrap(),
|
||||
int: id_fn
|
||||
.call1((builtins_mod.getattr("int").unwrap(),))
|
||||
.unwrap()
|
||||
|
@ -333,11 +331,11 @@ impl Nac3 {
|
|||
isa,
|
||||
time_fns,
|
||||
primitive,
|
||||
builtins,
|
||||
builtins_ty,
|
||||
builtins_def,
|
||||
composer,
|
||||
primitive_ids,
|
||||
top_level: None,
|
||||
top_levels: Default::default(),
|
||||
pyid_to_def: Default::default(),
|
||||
pyid_to_type: Default::default(),
|
||||
global_value_ids: Default::default(),
|
||||
|
@ -380,6 +378,83 @@ impl Nac3 {
|
|||
filename: &str,
|
||||
py: Python,
|
||||
) -> PyResult<()> {
|
||||
let (mut composer, _, _) = TopLevelComposer::new(self.builtins.clone(), ComposerConfig {
|
||||
kernel_ann: Some("Kernel"),
|
||||
kernel_invariant_ann: "KernelInvariant"
|
||||
});
|
||||
let mut id_to_def = HashMap::new();
|
||||
let mut id_to_type = HashMap::new();
|
||||
|
||||
let builtins = PyModule::import(py, "builtins")?;
|
||||
let typings = PyModule::import(py, "typing")?;
|
||||
let id_fn = builtins.getattr("id")?;
|
||||
let helper = PythonHelper {
|
||||
id_fn: builtins.getattr("id").unwrap().to_object(py),
|
||||
len_fn: builtins.getattr("len").unwrap().to_object(py),
|
||||
type_fn: builtins.getattr("type").unwrap().to_object(py),
|
||||
origin_ty_fn: typings.getattr("get_origin").unwrap().to_object(py),
|
||||
args_ty_fn: typings.getattr("get_args").unwrap().to_object(py),
|
||||
};
|
||||
let mut module_to_resolver_cache: HashMap<u64, _> = HashMap::new();
|
||||
|
||||
for (stmt, path, module) in self.top_levels.iter() {
|
||||
let py_module: &PyAny = module.extract(py)?;
|
||||
let module_id: u64 = id_fn.call1((py_module,))?.extract()?;
|
||||
let helper = helper.clone();
|
||||
let (name_to_pyid, resolver) = module_to_resolver_cache
|
||||
.get(&module_id)
|
||||
.cloned()
|
||||
.unwrap_or_else(|| {
|
||||
let mut name_to_pyid: HashMap<StrRef, u64> = HashMap::new();
|
||||
let members: &PyDict =
|
||||
py_module.getattr("__dict__").unwrap().cast_as().unwrap();
|
||||
for (key, val) in members.iter() {
|
||||
let key: &str = key.extract().unwrap();
|
||||
let val = id_fn.call1((val,)).unwrap().extract().unwrap();
|
||||
name_to_pyid.insert(key.into(), val);
|
||||
}
|
||||
let resolver = Arc::new(Resolver(Arc::new(InnerResolver {
|
||||
id_to_type: self.builtins_ty.clone().into(),
|
||||
id_to_def: self.builtins_def.clone().into(),
|
||||
pyid_to_def: self.pyid_to_def.clone(),
|
||||
pyid_to_type: self.pyid_to_type.clone(),
|
||||
primitive_ids: self.primitive_ids.clone(),
|
||||
global_value_ids: self.global_value_ids.clone(),
|
||||
class_names: Default::default(),
|
||||
name_to_pyid: name_to_pyid.clone(),
|
||||
module: module.clone(),
|
||||
id_to_pyval: Default::default(),
|
||||
id_to_primitive: Default::default(),
|
||||
field_to_val: Default::default(),
|
||||
helper,
|
||||
})))
|
||||
as Arc<dyn SymbolResolver + Send + Sync>;
|
||||
let name_to_pyid = Rc::new(name_to_pyid);
|
||||
module_to_resolver_cache
|
||||
.insert(module_id, (name_to_pyid.clone(), resolver.clone()));
|
||||
(name_to_pyid, resolver)
|
||||
});
|
||||
|
||||
let (name, def_id, ty) = composer
|
||||
.register_top_level(stmt.clone(), Some(resolver.clone()), path.clone())
|
||||
.map_err(|e| exceptions::PyRuntimeError::new_err(format!("nac3 compilation failure: {}", e)))?;
|
||||
let id = *name_to_pyid.get(&name).unwrap();
|
||||
id_to_def.insert(id, def_id);
|
||||
if let Some(ty) = ty {
|
||||
id_to_type.insert(id, ty);
|
||||
}
|
||||
}
|
||||
{
|
||||
let mut map = self.pyid_to_def.write();
|
||||
for (id, def) in id_to_def.into_iter() {
|
||||
map.insert(id, def);
|
||||
}
|
||||
let mut map = self.pyid_to_type.write();
|
||||
for (id, ty) in id_to_type.into_iter() {
|
||||
map.insert(id, ty);
|
||||
}
|
||||
}
|
||||
|
||||
let id_fun = PyModule::import(py, "builtins")?.getattr("id")?;
|
||||
let mut name_to_pyid: HashMap<StrRef, u64> = HashMap::new();
|
||||
let module = PyModule::new(py, "tmp")?;
|
||||
|
@ -402,12 +477,6 @@ impl Nac3 {
|
|||
)
|
||||
};
|
||||
let mut synthesized = parse_program(&synthesized).unwrap();
|
||||
let builtins = PyModule::import(py, "builtins")?;
|
||||
let helper = PythonHelper {
|
||||
id_fn: builtins.getattr("id").unwrap().to_object(py),
|
||||
len_fn: builtins.getattr("len").unwrap().to_object(py),
|
||||
type_fn: builtins.getattr("type").unwrap().to_object(py),
|
||||
};
|
||||
let resolver = Arc::new(Resolver(Arc::new(InnerResolver {
|
||||
id_to_type: self.builtins_ty.clone().into(),
|
||||
id_to_def: self.builtins_def.clone().into(),
|
||||
|
@ -416,12 +485,14 @@ impl Nac3 {
|
|||
primitive_ids: self.primitive_ids.clone(),
|
||||
global_value_ids: self.global_value_ids.clone(),
|
||||
class_names: Default::default(),
|
||||
id_to_pyval: Default::default(),
|
||||
id_to_primitive: Default::default(),
|
||||
field_to_val: Default::default(),
|
||||
name_to_pyid,
|
||||
module: module.to_object(py),
|
||||
helper,
|
||||
}))) as Arc<dyn SymbolResolver + Send + Sync>;
|
||||
let (_, def_id, _) = self
|
||||
.composer
|
||||
let (_, def_id, _) = composer
|
||||
.register_top_level(
|
||||
synthesized.pop().unwrap(),
|
||||
Some(resolver.clone()),
|
||||
|
@ -437,18 +508,17 @@ impl Nac3 {
|
|||
let mut store = ConcreteTypeStore::new();
|
||||
let mut cache = HashMap::new();
|
||||
let signature = store.from_signature(
|
||||
&mut self.composer.unifier,
|
||||
&mut composer.unifier,
|
||||
&self.primitive,
|
||||
&signature,
|
||||
&mut cache,
|
||||
);
|
||||
let signature = store.add_cty(signature);
|
||||
|
||||
self.composer.start_analysis(true).map_err(|e| exceptions::PyRuntimeError::new_err(format!(
|
||||
composer.start_analysis(true).map_err(|e| exceptions::PyRuntimeError::new_err(format!(
|
||||
"nac3 compilation failure: {}", e
|
||||
)))?;
|
||||
self.top_level = Some(Arc::new(self.composer.make_top_level_context()));
|
||||
let top_level = self.top_level.as_ref().unwrap();
|
||||
let top_level = Arc::new(composer.make_top_level_context());
|
||||
let instance = {
|
||||
let defs = top_level.definitions.read();
|
||||
let mut definition = defs[def_id.0].write();
|
||||
|
@ -478,52 +548,17 @@ impl Nac3 {
|
|||
};
|
||||
let isa = self.isa;
|
||||
let working_directory = self.working_directory.path().to_owned();
|
||||
let f = Arc::new(WithCall::new(Box::new(move |module| {
|
||||
let builder = PassManagerBuilder::create();
|
||||
builder.set_optimization_level(OptimizationLevel::Default);
|
||||
let passes = PassManager::create(());
|
||||
builder.populate_module_pass_manager(&passes);
|
||||
passes.run_on(module);
|
||||
|
||||
let (triple, features) = match isa {
|
||||
Isa::Host => (
|
||||
TargetMachine::get_default_triple(),
|
||||
TargetMachine::get_host_cpu_features().to_string(),
|
||||
),
|
||||
Isa::RiscV32G => (
|
||||
TargetTriple::create("riscv32-unknown-linux"),
|
||||
"+a,+m,+f,+d".to_string(),
|
||||
),
|
||||
Isa::RiscV32IMA => (
|
||||
TargetTriple::create("riscv32-unknown-linux"),
|
||||
"+a,+m".to_string(),
|
||||
),
|
||||
Isa::CortexA9 => (
|
||||
TargetTriple::create("armv7-unknown-linux-gnueabihf"),
|
||||
"+dsp,+fp16,+neon,+vfp3".to_string(),
|
||||
),
|
||||
};
|
||||
let target =
|
||||
Target::from_triple(&triple).expect("couldn't create target from target triple");
|
||||
let target_machine = target
|
||||
.create_target_machine(
|
||||
&triple,
|
||||
"",
|
||||
&features,
|
||||
OptimizationLevel::Default,
|
||||
RelocMode::PIC,
|
||||
CodeModel::Default,
|
||||
)
|
||||
.expect("couldn't create target machine");
|
||||
target_machine
|
||||
.write_to_file(
|
||||
module,
|
||||
FileType::Object,
|
||||
&working_directory.join(&format!("{}.o", module.get_name().to_str().unwrap())),
|
||||
)
|
||||
.expect("couldn't write module to file");
|
||||
let membuffers: Arc<Mutex<Vec<Vec<u8>>>> = Default::default();
|
||||
|
||||
let membuffer = membuffers.clone();
|
||||
|
||||
let f = Arc::new(WithCall::new(Box::new(move |module| {
|
||||
let buffer = module.write_bitcode_to_memory();
|
||||
let buffer = buffer.as_slice().into();
|
||||
membuffer.lock().push(buffer);
|
||||
})));
|
||||
let thread_names: Vec<String> = (0..4).map(|i| format!("module{}", i)).collect();
|
||||
let thread_names: Vec<String> = (0..4).map(|_| "main".to_string()).collect();
|
||||
let threads: Vec<_> = thread_names
|
||||
.iter()
|
||||
.map(|s| Box::new(ArtiqCodeGenerator::new(s.to_string(), self.time_fns)))
|
||||
|
@ -535,12 +570,79 @@ impl Nac3 {
|
|||
registry.wait_tasks_complete(handles);
|
||||
});
|
||||
|
||||
let buffers = membuffers.lock();
|
||||
let context = inkwell::context::Context::create();
|
||||
let main = context
|
||||
.create_module_from_ir(MemoryBuffer::create_from_memory_range(&buffers[0], "main"))
|
||||
.unwrap();
|
||||
for buffer in buffers.iter().skip(1) {
|
||||
let other = context
|
||||
.create_module_from_ir(MemoryBuffer::create_from_memory_range(buffer, "main"))
|
||||
.unwrap();
|
||||
|
||||
main.link_in_module(other)
|
||||
.map_err(|err| exceptions::PyRuntimeError::new_err(err.to_string()))?;
|
||||
}
|
||||
|
||||
let mut function_iter = main.get_first_function();
|
||||
while let Some(func) = function_iter {
|
||||
if func.count_basic_blocks() > 0 && func.get_name().to_str().unwrap() != "__modinit__" {
|
||||
func.set_linkage(inkwell::module::Linkage::Private);
|
||||
}
|
||||
function_iter = func.get_next_function();
|
||||
}
|
||||
|
||||
let builder = PassManagerBuilder::create();
|
||||
builder.set_optimization_level(OptimizationLevel::Aggressive);
|
||||
let passes = PassManager::create(());
|
||||
builder.set_inliner_with_threshold(255);
|
||||
builder.populate_module_pass_manager(&passes);
|
||||
passes.run_on(&main);
|
||||
|
||||
let (triple, features) = match isa {
|
||||
Isa::Host => (
|
||||
TargetMachine::get_default_triple(),
|
||||
TargetMachine::get_host_cpu_features().to_string(),
|
||||
),
|
||||
Isa::RiscV32G => (
|
||||
TargetTriple::create("riscv32-unknown-linux"),
|
||||
"+a,+m,+f,+d".to_string(),
|
||||
),
|
||||
Isa::RiscV32IMA => (
|
||||
TargetTriple::create("riscv32-unknown-linux"),
|
||||
"+a,+m".to_string(),
|
||||
),
|
||||
Isa::CortexA9 => (
|
||||
TargetTriple::create("armv7-unknown-linux-gnueabihf"),
|
||||
"+dsp,+fp16,+neon,+vfp3".to_string(),
|
||||
),
|
||||
};
|
||||
let target =
|
||||
Target::from_triple(&triple).expect("couldn't create target from target triple");
|
||||
let target_machine = target
|
||||
.create_target_machine(
|
||||
&triple,
|
||||
"",
|
||||
&features,
|
||||
OptimizationLevel::Default,
|
||||
RelocMode::PIC,
|
||||
CodeModel::Default,
|
||||
)
|
||||
.expect("couldn't create target machine");
|
||||
target_machine
|
||||
.write_to_file(&main, FileType::Object, &working_directory.join("module.o"))
|
||||
.expect("couldn't write module to file");
|
||||
|
||||
let mut linker_args = vec![
|
||||
"-shared".to_string(),
|
||||
"--eh-frame-hdr".to_string(),
|
||||
"-x".to_string(),
|
||||
"-o".to_string(),
|
||||
filename.to_string(),
|
||||
working_directory
|
||||
.join("module.o")
|
||||
.to_string_lossy()
|
||||
.to_string(),
|
||||
];
|
||||
if isa != Isa::Host {
|
||||
linker_args.push(
|
||||
|
@ -553,15 +655,7 @@ impl Nac3 {
|
|||
.unwrap(),
|
||||
);
|
||||
}
|
||||
linker_args.extend(thread_names.iter().map(|name| {
|
||||
let name_o = name.to_owned() + ".o";
|
||||
self.working_directory
|
||||
.path()
|
||||
.join(name_o.as_str())
|
||||
.to_str()
|
||||
.unwrap()
|
||||
.to_string()
|
||||
}));
|
||||
|
||||
if let Ok(linker_status) = Command::new("ld.lld").args(linker_args).status() {
|
||||
if !linker_status.success() {
|
||||
return Err(exceptions::PyRuntimeError::new_err(
|
||||
|
|
|
@ -12,7 +12,7 @@ use nac3core::{
|
|||
use nac3parser::ast::{self, StrRef};
|
||||
use parking_lot::{Mutex, RwLock};
|
||||
use pyo3::{
|
||||
types::{PyList, PyModule, PyTuple},
|
||||
types::{PyDict, PyTuple},
|
||||
PyAny, PyObject, PyResult, Python,
|
||||
};
|
||||
use std::{
|
||||
|
@ -23,10 +23,20 @@ use std::{
|
|||
|
||||
use crate::PrimitivePythonId;
|
||||
|
||||
pub enum PrimitiveValue {
|
||||
I32(i32),
|
||||
I64(i64),
|
||||
F64(f64),
|
||||
Bool(bool),
|
||||
}
|
||||
|
||||
pub struct InnerResolver {
|
||||
pub id_to_type: Mutex<HashMap<StrRef, Type>>,
|
||||
pub id_to_def: Mutex<HashMap<StrRef, DefinitionId>>,
|
||||
pub global_value_ids: Arc<Mutex<HashSet<u64>>>,
|
||||
pub id_to_type: RwLock<HashMap<StrRef, Type>>,
|
||||
pub id_to_def: RwLock<HashMap<StrRef, DefinitionId>>,
|
||||
pub id_to_pyval: RwLock<HashMap<StrRef, (u64, PyObject)>>,
|
||||
pub id_to_primitive: RwLock<HashMap<u64, PrimitiveValue>>,
|
||||
pub field_to_val: RwLock<HashMap<(u64, StrRef), Option<(u64, PyObject)>>>,
|
||||
pub global_value_ids: Arc<RwLock<HashSet<u64>>>,
|
||||
pub class_names: Mutex<HashMap<StrRef, Type>>,
|
||||
pub pyid_to_def: Arc<RwLock<HashMap<u64, DefinitionId>>>,
|
||||
pub pyid_to_type: Arc<RwLock<HashMap<u64, Type>>>,
|
||||
|
@ -39,10 +49,13 @@ pub struct InnerResolver {
|
|||
|
||||
pub struct Resolver(pub Arc<InnerResolver>);
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct PythonHelper {
|
||||
pub type_fn: PyObject,
|
||||
pub len_fn: PyObject,
|
||||
pub id_fn: PyObject,
|
||||
pub origin_ty_fn: PyObject,
|
||||
pub args_ty_fn: PyObject,
|
||||
}
|
||||
|
||||
struct PythonValue {
|
||||
|
@ -60,6 +73,20 @@ impl StaticValue for PythonValue {
|
|||
&self,
|
||||
ctx: &mut CodeGenContext<'ctx, 'a>,
|
||||
) -> BasicValueEnum<'ctx> {
|
||||
if let Some(val) = self.resolver.id_to_primitive.read().get(&self.id) {
|
||||
return match val {
|
||||
PrimitiveValue::I32(val) => ctx.ctx.i32_type().const_int(*val as u64, false).into(),
|
||||
PrimitiveValue::I64(val) => ctx.ctx.i64_type().const_int(*val as u64, false).into(),
|
||||
PrimitiveValue::F64(val) => ctx.ctx.f64_type().const_float(*val).into(),
|
||||
PrimitiveValue::Bool(val) => {
|
||||
ctx.ctx.bool_type().const_int(*val as u64, false).into()
|
||||
}
|
||||
};
|
||||
}
|
||||
if let Some(global) = ctx.module.get_global(&self.id.to_string()) {
|
||||
return global.as_pointer_value().into();
|
||||
}
|
||||
|
||||
Python::with_gil(|py| -> PyResult<BasicValueEnum<'ctx>> {
|
||||
self.resolver
|
||||
.get_obj_value(py, self.value.as_ref(py), ctx)
|
||||
|
@ -73,34 +100,48 @@ impl StaticValue for PythonValue {
|
|||
name: StrRef,
|
||||
ctx: &mut CodeGenContext<'ctx, 'a>,
|
||||
) -> Option<ValueEnum<'ctx>> {
|
||||
Python::with_gil(|py| -> PyResult<Option<ValueEnum<'ctx>>> {
|
||||
let helper = &self.resolver.helper;
|
||||
let ty = helper.type_fn.call1(py, (&self.value,))?;
|
||||
let ty_id: u64 = helper.id_fn.call1(py, (ty,))?.extract(py)?;
|
||||
let def_id = { *self.resolver.pyid_to_def.read().get(&ty_id).unwrap() };
|
||||
let mut mutable = true;
|
||||
let defs = ctx.top_level.definitions.read();
|
||||
if let TopLevelDef::Class { fields, .. } = &*defs[def_id.0].read() {
|
||||
for (field_name, _, is_mutable) in fields.iter() {
|
||||
if field_name == &name {
|
||||
mutable = *is_mutable;
|
||||
break;
|
||||
{
|
||||
let field_to_val = self.resolver.field_to_val.read();
|
||||
field_to_val.get(&(self.id, name)).cloned()
|
||||
}
|
||||
.unwrap_or_else(|| {
|
||||
Python::with_gil(|py| -> PyResult<Option<(u64, PyObject)>> {
|
||||
let helper = &self.resolver.helper;
|
||||
let ty = helper.type_fn.call1(py, (&self.value,))?;
|
||||
let ty_id: u64 = helper.id_fn.call1(py, (ty,))?.extract(py)?;
|
||||
let def_id = { *self.resolver.pyid_to_def.read().get(&ty_id).unwrap() };
|
||||
let mut mutable = true;
|
||||
let defs = ctx.top_level.definitions.read();
|
||||
if let TopLevelDef::Class { fields, .. } = &*defs[def_id.0].read() {
|
||||
for (field_name, _, is_mutable) in fields.iter() {
|
||||
if field_name == &name {
|
||||
mutable = *is_mutable;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(if mutable {
|
||||
None
|
||||
} else {
|
||||
let obj = self.value.getattr(py, &name.to_string())?;
|
||||
let id = self.resolver.helper.id_fn.call1(py, (&obj,))?.extract(py)?;
|
||||
Some(ValueEnum::Static(Arc::new(PythonValue {
|
||||
id,
|
||||
value: obj,
|
||||
resolver: self.resolver.clone(),
|
||||
})))
|
||||
let result = if mutable {
|
||||
None
|
||||
} else {
|
||||
let obj = self.value.getattr(py, &name.to_string())?;
|
||||
let id = self.resolver.helper.id_fn.call1(py, (&obj,))?.extract(py)?;
|
||||
Some((id, obj))
|
||||
};
|
||||
self.resolver
|
||||
.field_to_val
|
||||
.write()
|
||||
.insert((self.id, name), result.clone());
|
||||
Ok(result)
|
||||
})
|
||||
.unwrap()
|
||||
})
|
||||
.map(|(id, obj)| {
|
||||
ValueEnum::Static(Arc::new(PythonValue {
|
||||
id,
|
||||
value: obj,
|
||||
resolver: self.resolver.clone(),
|
||||
}))
|
||||
})
|
||||
.unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -133,47 +174,42 @@ impl InnerResolver {
|
|||
}))
|
||||
}
|
||||
|
||||
fn get_obj_type(
|
||||
// handle python objects that represent types themselves
|
||||
// primitives and class types should be themselves, use `ty_id` to check,
|
||||
// TypeVars and GenericAlias(`A[int, bool]`) should use `ty_ty_id` to check
|
||||
// the `bool` value returned indicates whether they are instantiated or not
|
||||
fn get_pyty_obj_type(
|
||||
&self,
|
||||
py: Python,
|
||||
obj: &PyAny,
|
||||
pyty: &PyAny,
|
||||
unifier: &mut Unifier,
|
||||
defs: &[Arc<RwLock<TopLevelDef>>],
|
||||
primitives: &PrimitiveStore,
|
||||
) -> PyResult<Option<Type>> {
|
||||
let ty_id: u64 = self
|
||||
) -> PyResult<Result<(Type, bool), String>> {
|
||||
let ty_id: u64 = self.helper.id_fn.call1(py, (pyty,))?.extract(py)?;
|
||||
let ty_ty_id: u64 = self
|
||||
.helper
|
||||
.id_fn
|
||||
.call1(py, (self.helper.type_fn.call1(py, (obj,))?,))?
|
||||
.call1(py, (self.helper.type_fn.call1(py, (pyty,))?,))?
|
||||
.extract(py)?;
|
||||
|
||||
if ty_id == self.primitive_ids.int || ty_id == self.primitive_ids.int32 {
|
||||
Ok(Some(primitives.int32))
|
||||
Ok(Ok((primitives.int32, true)))
|
||||
} else if ty_id == self.primitive_ids.int64 {
|
||||
Ok(Some(primitives.int64))
|
||||
Ok(Ok((primitives.int64, true)))
|
||||
} else if ty_id == self.primitive_ids.bool {
|
||||
Ok(Some(primitives.bool))
|
||||
Ok(Ok((primitives.bool, true)))
|
||||
} else if ty_id == self.primitive_ids.float {
|
||||
Ok(Some(primitives.float))
|
||||
Ok(Ok((primitives.float, true)))
|
||||
} else if ty_id == self.primitive_ids.list {
|
||||
let len: usize = self.helper.len_fn.call1(py, (obj,))?.extract(py)?;
|
||||
if len == 0 {
|
||||
let var = unifier.get_fresh_var().0;
|
||||
let list = unifier.add_ty(TypeEnum::TList { ty: var });
|
||||
Ok(Some(list))
|
||||
} else {
|
||||
let ty = self.get_list_elem_type(py, obj, len, unifier, defs, primitives)?;
|
||||
Ok(ty.map(|ty| unifier.add_ty(TypeEnum::TList { ty })))
|
||||
}
|
||||
// do not handle type var param and concrete check here
|
||||
let var = unifier.get_fresh_var().0;
|
||||
let list = unifier.add_ty(TypeEnum::TList { ty: var });
|
||||
Ok(Ok((list, false)))
|
||||
} else if ty_id == self.primitive_ids.tuple {
|
||||
let elements: &PyTuple = obj.cast_as()?;
|
||||
let types: Result<Option<Vec<_>>, _> = elements
|
||||
.iter()
|
||||
.map(|elem| self.get_obj_type(py, elem, unifier, defs, primitives))
|
||||
.collect();
|
||||
let types = types?;
|
||||
Ok(types.map(|types| unifier.add_ty(TypeEnum::TTuple { ty: types })))
|
||||
} else if let Some(def_id) = self.pyid_to_def.read().get(&ty_id) {
|
||||
// do not handle type var param and concrete check here
|
||||
Ok(Ok((unifier.add_ty(TypeEnum::TTuple { ty: vec![] }), false)))
|
||||
} else if let Some(def_id) = self.pyid_to_def.read().get(&ty_id).cloned() {
|
||||
let def = defs[def_id.0].read();
|
||||
if let TopLevelDef::Class {
|
||||
object_id,
|
||||
|
@ -183,35 +219,305 @@ impl InnerResolver {
|
|||
..
|
||||
} = &*def
|
||||
{
|
||||
let var_map: HashMap<_, _> = type_vars
|
||||
.iter()
|
||||
.map(|var| {
|
||||
(
|
||||
if let TypeEnum::TVar { id, .. } = &*unifier.get_ty(*var) {
|
||||
*id
|
||||
} else {
|
||||
unreachable!()
|
||||
},
|
||||
unifier.get_fresh_var().0,
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
let mut fields_ty = HashMap::new();
|
||||
for method in methods.iter() {
|
||||
fields_ty.insert(method.0, (method.1, false));
|
||||
}
|
||||
for field in fields.iter() {
|
||||
let name: String = field.0.into();
|
||||
let field_data = obj.getattr(&name)?;
|
||||
let ty = self
|
||||
.get_obj_type(py, field_data, unifier, defs, primitives)?
|
||||
.unwrap_or(primitives.none);
|
||||
let field_ty = unifier.subst(field.1, &var_map).unwrap_or(field.1);
|
||||
if unifier.unify(ty, field_ty).is_err() {
|
||||
// field type mismatch
|
||||
return Ok(None);
|
||||
// do not handle type var param and concrete check here, and no subst
|
||||
Ok(Ok({
|
||||
let ty = TypeEnum::TObj {
|
||||
obj_id: *object_id,
|
||||
params: RefCell::new({
|
||||
type_vars
|
||||
.iter()
|
||||
.map(|x| {
|
||||
if let TypeEnum::TVar { id, .. } = &*unifier.get_ty(*x) {
|
||||
(*id, *x)
|
||||
} else {
|
||||
unreachable!()
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
}),
|
||||
fields: RefCell::new({
|
||||
let mut res = methods
|
||||
.iter()
|
||||
.map(|(iden, ty, _)| (*iden, (*ty, false)))
|
||||
.collect::<HashMap<_, _>>();
|
||||
res.extend(fields.clone().into_iter().map(|x| (x.0, (x.1, x.2))));
|
||||
res
|
||||
}),
|
||||
};
|
||||
// here also false, later instantiation use python object to check compatible
|
||||
(unifier.add_ty(ty), false)
|
||||
}))
|
||||
} else {
|
||||
// only object is supported, functions are not supported
|
||||
unreachable!("function type is not supported, should not be queried")
|
||||
}
|
||||
} else if ty_ty_id == self.primitive_ids.typevar {
|
||||
let constraint_types = {
|
||||
let constraints = pyty.getattr("__constraints__").unwrap();
|
||||
let mut result: Vec<Type> = vec![];
|
||||
for i in 0.. {
|
||||
if let Ok(constr) = constraints.get_item(i) {
|
||||
result.push({
|
||||
match self.get_pyty_obj_type(py, constr, unifier, defs, primitives)? {
|
||||
Ok((ty, _)) => {
|
||||
if unifier.is_concrete(ty, &[]) {
|
||||
ty
|
||||
} else {
|
||||
return Ok(Err(format!(
|
||||
"the {}th constraint of TypeVar `{}` is not concrete",
|
||||
i + 1,
|
||||
pyty.getattr("__name__")?.extract::<String>()?
|
||||
)));
|
||||
}
|
||||
}
|
||||
Err(err) => return Ok(Err(err)),
|
||||
}
|
||||
})
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
result
|
||||
};
|
||||
let res = unifier.get_fresh_var_with_range(&constraint_types).0;
|
||||
Ok(Ok((res, true)))
|
||||
} else if ty_ty_id == self.primitive_ids.generic_alias.0
|
||||
|| ty_ty_id == self.primitive_ids.generic_alias.1
|
||||
{
|
||||
let origin = self.helper.origin_ty_fn.call1(py, (pyty,))?;
|
||||
let args = self.helper.args_ty_fn.call1(py, (pyty,))?;
|
||||
let args: &PyTuple = args.cast_as(py)?;
|
||||
let origin_ty =
|
||||
match self.get_pyty_obj_type(py, origin.as_ref(py), unifier, defs, primitives)? {
|
||||
Ok((ty, false)) => ty,
|
||||
Ok((_, true)) => {
|
||||
return Ok(Err("instantiated type does not take type parameters".into()))
|
||||
}
|
||||
Err(err) => return Ok(Err(err)),
|
||||
};
|
||||
|
||||
match &*unifier.get_ty(origin_ty) {
|
||||
TypeEnum::TList { .. } => {
|
||||
if args.len() == 1 {
|
||||
let ty = match self.get_pyty_obj_type(
|
||||
py,
|
||||
args.get_item(0),
|
||||
unifier,
|
||||
defs,
|
||||
primitives,
|
||||
)? {
|
||||
Ok(ty) => ty,
|
||||
Err(err) => return Ok(Err(err)),
|
||||
};
|
||||
if !unifier.is_concrete(ty.0, &[]) && !ty.1 {
|
||||
panic!("type list should take concrete parameters in type var ranges")
|
||||
}
|
||||
Ok(Ok((unifier.add_ty(TypeEnum::TList { ty: ty.0 }), true)))
|
||||
} else {
|
||||
return Ok(Err(format!(
|
||||
"type list needs exactly 1 type parameters, found {}",
|
||||
args.len()
|
||||
)));
|
||||
}
|
||||
}
|
||||
TypeEnum::TTuple { .. } => {
|
||||
let args = match args
|
||||
.iter()
|
||||
.map(|x| self.get_pyty_obj_type(py, x, unifier, defs, primitives))
|
||||
.collect::<Result<Vec<_>, _>>()?
|
||||
.into_iter()
|
||||
.collect::<Result<Vec<_>, _>>() {
|
||||
Ok(args) if !args.is_empty() => args
|
||||
.into_iter()
|
||||
.map(|(x, check)| if !unifier.is_concrete(x, &[]) && !check {
|
||||
panic!("type tuple should take concrete parameters in type var ranges")
|
||||
} else {
|
||||
x
|
||||
}
|
||||
)
|
||||
.collect::<Vec<_>>(),
|
||||
Err(err) => return Ok(Err(err)),
|
||||
_ => return Ok(Err("tuple type needs at least 1 type parameters".to_string()))
|
||||
};
|
||||
Ok(Ok((unifier.add_ty(TypeEnum::TTuple { ty: args }), true)))
|
||||
}
|
||||
TypeEnum::TObj { params, obj_id, .. } => {
|
||||
let subst = {
|
||||
let params = &*params.borrow();
|
||||
if params.len() != args.len() {
|
||||
return Ok(Err(format!(
|
||||
"for class #{}, expect {} type parameters, got {}.",
|
||||
obj_id.0,
|
||||
params.len(),
|
||||
args.len(),
|
||||
)));
|
||||
}
|
||||
let args = match args
|
||||
.iter()
|
||||
.map(|x| self.get_pyty_obj_type(py, x, unifier, defs, primitives))
|
||||
.collect::<Result<Vec<_>, _>>()?
|
||||
.into_iter()
|
||||
.collect::<Result<Vec<_>, _>>() {
|
||||
Ok(args) => args
|
||||
.into_iter()
|
||||
.map(|(x, check)| if !unifier.is_concrete(x, &[]) && !check {
|
||||
panic!("type class should take concrete parameters in type var ranges")
|
||||
} else {
|
||||
x
|
||||
}
|
||||
)
|
||||
.collect::<Vec<_>>(),
|
||||
Err(err) => return Ok(Err(err)),
|
||||
};
|
||||
params
|
||||
.iter()
|
||||
.zip(args.iter())
|
||||
.map(|((id, _), ty)| (*id, *ty))
|
||||
.collect::<HashMap<_, _>>()
|
||||
};
|
||||
Ok(Ok((
|
||||
unifier.subst(origin_ty, &subst).unwrap_or(origin_ty),
|
||||
true,
|
||||
)))
|
||||
}
|
||||
TypeEnum::TVirtual { .. } => {
|
||||
if args.len() == 1 {
|
||||
let ty = match self.get_pyty_obj_type(
|
||||
py,
|
||||
args.get_item(0),
|
||||
unifier,
|
||||
defs,
|
||||
primitives,
|
||||
)? {
|
||||
Ok(ty) => ty,
|
||||
Err(err) => return Ok(Err(err)),
|
||||
};
|
||||
if !unifier.is_concrete(ty.0, &[]) && !ty.1 {
|
||||
panic!(
|
||||
"virtual class should take concrete parameters in type var ranges"
|
||||
)
|
||||
}
|
||||
Ok(Ok((unifier.add_ty(TypeEnum::TVirtual { ty: ty.0 }), true)))
|
||||
} else {
|
||||
return Ok(Err(format!(
|
||||
"virtual class needs exactly 1 type parameters, found {}",
|
||||
args.len()
|
||||
)));
|
||||
}
|
||||
}
|
||||
_ => unimplemented!(),
|
||||
}
|
||||
} else if ty_id == self.primitive_ids.virtual_id {
|
||||
Ok(Ok((
|
||||
{
|
||||
let ty = TypeEnum::TVirtual {
|
||||
ty: unifier.get_fresh_var().0,
|
||||
};
|
||||
unifier.add_ty(ty)
|
||||
},
|
||||
false,
|
||||
)))
|
||||
} else {
|
||||
Ok(Err("unknown type".into()))
|
||||
}
|
||||
}
|
||||
|
||||
fn get_obj_type(
|
||||
&self,
|
||||
py: Python,
|
||||
obj: &PyAny,
|
||||
unifier: &mut Unifier,
|
||||
defs: &[Arc<RwLock<TopLevelDef>>],
|
||||
primitives: &PrimitiveStore,
|
||||
) -> PyResult<Option<Type>> {
|
||||
let ty = self.helper.type_fn.call1(py, (obj,)).unwrap();
|
||||
let (extracted_ty, inst_check) = match self.get_pyty_obj_type(
|
||||
py,
|
||||
{
|
||||
if [
|
||||
self.primitive_ids.typevar,
|
||||
self.primitive_ids.generic_alias.0,
|
||||
self.primitive_ids.generic_alias.1,
|
||||
]
|
||||
.contains(
|
||||
&self
|
||||
.helper
|
||||
.id_fn
|
||||
.call1(py, (ty.clone(),))?
|
||||
.extract::<u64>(py)?,
|
||||
) {
|
||||
obj
|
||||
} else {
|
||||
ty.as_ref(py)
|
||||
}
|
||||
},
|
||||
unifier,
|
||||
defs,
|
||||
primitives,
|
||||
)? {
|
||||
Ok(s) => s,
|
||||
Err(_) => return Ok(None),
|
||||
};
|
||||
return match (&*unifier.get_ty(extracted_ty), inst_check) {
|
||||
// do the instantiation for these three types
|
||||
(TypeEnum::TList { ty }, false) => {
|
||||
let len: usize = self.helper.len_fn.call1(py, (obj,))?.extract(py)?;
|
||||
if len == 0 {
|
||||
assert!(matches!(
|
||||
&*unifier.get_ty(extracted_ty),
|
||||
TypeEnum::TVar { meta: nac3core::typecheck::typedef::TypeVarMeta::Generic, range, .. }
|
||||
if range.borrow().is_empty()
|
||||
));
|
||||
Ok(Some(extracted_ty))
|
||||
} else {
|
||||
let actual_ty =
|
||||
self.get_list_elem_type(py, obj, len, unifier, defs, primitives)?;
|
||||
if let Some(actual_ty) = actual_ty {
|
||||
unifier.unify(*ty, actual_ty).unwrap();
|
||||
Ok(Some(extracted_ty))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
}
|
||||
(TypeEnum::TTuple { .. }, false) => {
|
||||
let elements: &PyTuple = obj.cast_as()?;
|
||||
let types: Result<Option<Vec<_>>, _> = elements
|
||||
.iter()
|
||||
.map(|elem| self.get_obj_type(py, elem, unifier, defs, primitives))
|
||||
.collect();
|
||||
let types = types?;
|
||||
Ok(types.map(|types| unifier.add_ty(TypeEnum::TTuple { ty: types })))
|
||||
}
|
||||
(TypeEnum::TObj { params, fields, .. }, false) => {
|
||||
let var_map = params
|
||||
.borrow()
|
||||
.iter()
|
||||
.map(|(id_var, ty)| {
|
||||
if let TypeEnum::TVar { id, range, .. } = &*unifier.get_ty(*ty) {
|
||||
assert_eq!(*id, *id_var);
|
||||
(*id, unifier.get_fresh_var_with_range(&range.borrow()).0)
|
||||
} else {
|
||||
unreachable!()
|
||||
}
|
||||
})
|
||||
.collect::<HashMap<_, _>>();
|
||||
// loop through non-function fields of the class to get the instantiated value
|
||||
for field in fields.borrow().iter() {
|
||||
let name: String = (*field.0).into();
|
||||
if let TypeEnum::TFunc(..) = &*unifier.get_ty(field.1 .0) {
|
||||
continue;
|
||||
} else {
|
||||
let field_data = obj.getattr(&name)?;
|
||||
let ty = self
|
||||
.get_obj_type(py, field_data, unifier, defs, primitives)?
|
||||
.unwrap_or(primitives.none);
|
||||
let field_ty = unifier.subst(field.1 .0, &var_map).unwrap_or(field.1 .0);
|
||||
if unifier.unify(ty, field_ty).is_err() {
|
||||
// field type mismatch
|
||||
return Ok(None);
|
||||
}
|
||||
}
|
||||
fields_ty.insert(field.0, (ty, field.2));
|
||||
}
|
||||
for (_, ty) in var_map.iter() {
|
||||
// must be concrete type
|
||||
|
@ -219,18 +525,14 @@ impl InnerResolver {
|
|||
return Ok(None);
|
||||
}
|
||||
}
|
||||
Ok(Some(unifier.add_ty(TypeEnum::TObj {
|
||||
obj_id: *object_id,
|
||||
fields: RefCell::new(fields_ty),
|
||||
params: RefCell::new(var_map),
|
||||
})))
|
||||
} else {
|
||||
// only object is supported, functions are not supported
|
||||
Ok(None)
|
||||
return Ok(Some(
|
||||
unifier
|
||||
.subst(extracted_ty, &var_map)
|
||||
.unwrap_or(extracted_ty),
|
||||
));
|
||||
}
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
_ => Ok(Some(extracted_ty)),
|
||||
};
|
||||
}
|
||||
|
||||
fn get_obj_value<'ctx, 'a>(
|
||||
|
@ -244,23 +546,40 @@ impl InnerResolver {
|
|||
.id_fn
|
||||
.call1(py, (self.helper.type_fn.call1(py, (obj,))?,))?
|
||||
.extract(py)?;
|
||||
let id: u64 = self.helper.id_fn.call1(py, (obj,))?.extract(py)?;
|
||||
if ty_id == self.primitive_ids.int || ty_id == self.primitive_ids.int32 {
|
||||
let val: i32 = obj.extract()?;
|
||||
self.id_to_primitive
|
||||
.write()
|
||||
.insert(id, PrimitiveValue::I32(val));
|
||||
Ok(Some(ctx.ctx.i32_type().const_int(val as u64, false).into()))
|
||||
} else if ty_id == self.primitive_ids.int64 {
|
||||
let val: i64 = obj.extract()?;
|
||||
self.id_to_primitive
|
||||
.write()
|
||||
.insert(id, PrimitiveValue::I64(val));
|
||||
Ok(Some(ctx.ctx.i64_type().const_int(val as u64, false).into()))
|
||||
} else if ty_id == self.primitive_ids.bool {
|
||||
let val: bool = obj.extract()?;
|
||||
self.id_to_primitive
|
||||
.write()
|
||||
.insert(id, PrimitiveValue::Bool(val));
|
||||
Ok(Some(
|
||||
ctx.ctx.bool_type().const_int(val as u64, false).into(),
|
||||
))
|
||||
} else if ty_id == self.primitive_ids.float {
|
||||
let val: f64 = obj.extract()?;
|
||||
self.id_to_primitive
|
||||
.write()
|
||||
.insert(id, PrimitiveValue::F64(val));
|
||||
Ok(Some(ctx.ctx.f64_type().const_float(val).into()))
|
||||
} else if ty_id == self.primitive_ids.list {
|
||||
let id: u64 = self.helper.id_fn.call1(py, (obj,))?.extract(py)?;
|
||||
let id_str = id.to_string();
|
||||
|
||||
if let Some(global) = ctx.module.get_global(&id_str) {
|
||||
return Ok(Some(global.as_pointer_value().into()));
|
||||
}
|
||||
|
||||
let len: usize = self.helper.len_fn.call1(py, (obj,))?.extract(py)?;
|
||||
let ty = if len == 0 {
|
||||
ctx.primitives.int32
|
||||
|
@ -285,15 +604,14 @@ impl InnerResolver {
|
|||
);
|
||||
|
||||
{
|
||||
let mut global_value_ids = self.global_value_ids.lock();
|
||||
if global_value_ids.contains(&id) {
|
||||
if self.global_value_ids.read().contains(&id) {
|
||||
let global = ctx.module.get_global(&id_str).unwrap_or_else(|| {
|
||||
ctx.module
|
||||
.add_global(arr_ty, Some(AddressSpace::Generic), &id_str)
|
||||
});
|
||||
return Ok(Some(global.as_pointer_value().into()));
|
||||
} else {
|
||||
global_value_ids.insert(id);
|
||||
self.global_value_ids.write().insert(id);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -361,8 +679,12 @@ impl InnerResolver {
|
|||
|
||||
Ok(Some(global.as_pointer_value().into()))
|
||||
} else if ty_id == self.primitive_ids.tuple {
|
||||
let id: u64 = self.helper.id_fn.call1(py, (obj,))?.extract(py)?;
|
||||
let id_str = id.to_string();
|
||||
|
||||
if let Some(global) = ctx.module.get_global(&id_str) {
|
||||
return Ok(Some(global.as_pointer_value().into()));
|
||||
}
|
||||
|
||||
let elements: &PyTuple = obj.cast_as()?;
|
||||
let types: Result<Option<Vec<_>>, _> = elements
|
||||
.iter()
|
||||
|
@ -381,15 +703,14 @@ impl InnerResolver {
|
|||
let ty = ctx.ctx.struct_type(&types, false);
|
||||
|
||||
{
|
||||
let mut global_value_ids = self.global_value_ids.lock();
|
||||
if global_value_ids.contains(&id) {
|
||||
if self.global_value_ids.read().contains(&id) {
|
||||
let global = ctx.module.get_global(&id_str).unwrap_or_else(|| {
|
||||
ctx.module
|
||||
.add_global(ty, Some(AddressSpace::Generic), &id_str)
|
||||
});
|
||||
return Ok(Some(global.as_pointer_value().into()));
|
||||
} else {
|
||||
global_value_ids.insert(id);
|
||||
self.global_value_ids.write().insert(id);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -405,8 +726,12 @@ impl InnerResolver {
|
|||
global.set_initializer(&val);
|
||||
Ok(Some(global.as_pointer_value().into()))
|
||||
} else {
|
||||
let id: u64 = self.helper.id_fn.call1(py, (obj,))?.extract(py)?;
|
||||
let id_str = id.to_string();
|
||||
|
||||
if let Some(global) = ctx.module.get_global(&id_str) {
|
||||
return Ok(Some(global.as_pointer_value().into()));
|
||||
}
|
||||
|
||||
let top_level_defs = ctx.top_level.definitions.read();
|
||||
let ty = self
|
||||
.get_obj_type(py, obj, &mut ctx.unifier, &top_level_defs, &ctx.primitives)?
|
||||
|
@ -417,16 +742,16 @@ impl InnerResolver {
|
|||
.get_element_type()
|
||||
.into_struct_type()
|
||||
.as_basic_type_enum();
|
||||
|
||||
{
|
||||
let mut global_value_ids = self.global_value_ids.lock();
|
||||
if global_value_ids.contains(&id) {
|
||||
if self.global_value_ids.read().contains(&id) {
|
||||
let global = ctx.module.get_global(&id_str).unwrap_or_else(|| {
|
||||
ctx.module
|
||||
.add_global(ty, Some(AddressSpace::Generic), &id_str)
|
||||
});
|
||||
return Ok(Some(global.as_pointer_value().into()));
|
||||
} else {
|
||||
global_value_ids.insert(id);
|
||||
self.global_value_ids.write().insert(id);
|
||||
}
|
||||
}
|
||||
// should be classes
|
||||
|
@ -505,14 +830,10 @@ impl SymbolResolver for Resolver {
|
|||
ast::ExprKind::Name { id, .. } => {
|
||||
Python::with_gil(|py| -> PyResult<Option<SymbolValue>> {
|
||||
let obj: &PyAny = self.0.module.extract(py)?;
|
||||
let members: &PyList = PyModule::import(py, "inspect")?
|
||||
.getattr("getmembers")?
|
||||
.call1((obj,))?
|
||||
.cast_as()?;
|
||||
let members: &PyDict = obj.getattr("__dict__").unwrap().cast_as().unwrap();
|
||||
let mut sym_value = None;
|
||||
for member in members.iter() {
|
||||
let key: &str = member.get_item(0)?.extract()?;
|
||||
let val = member.get_item(1)?;
|
||||
for (key, val) in members.iter() {
|
||||
let key: &str = key.extract()?;
|
||||
if key == id.to_string() {
|
||||
sym_value = Some(
|
||||
self.0
|
||||
|
@ -538,38 +859,40 @@ impl SymbolResolver for Resolver {
|
|||
primitives: &PrimitiveStore,
|
||||
str: StrRef,
|
||||
) -> Option<Type> {
|
||||
let mut id_to_type = self.0.id_to_type.lock();
|
||||
id_to_type.get(&str).cloned().or_else(|| {
|
||||
{
|
||||
let id_to_type = self.0.id_to_type.read();
|
||||
id_to_type.get(&str).cloned()
|
||||
}
|
||||
.or_else(|| {
|
||||
let py_id = self.0.name_to_pyid.get(&str);
|
||||
let result = py_id.and_then(|id| {
|
||||
self.0.pyid_to_type.read().get(id).copied().or_else(|| {
|
||||
Python::with_gil(|py| -> PyResult<Option<Type>> {
|
||||
{
|
||||
let pyid_to_type = self.0.pyid_to_type.read();
|
||||
pyid_to_type.get(id).copied()
|
||||
}
|
||||
.or_else(|| {
|
||||
let result = Python::with_gil(|py| -> PyResult<Option<Type>> {
|
||||
let obj: &PyAny = self.0.module.extract(py)?;
|
||||
let members: &PyList = PyModule::import(py, "inspect")?
|
||||
.getattr("getmembers")?
|
||||
.call1((obj,))?
|
||||
.cast_as()?;
|
||||
let mut sym_ty = None;
|
||||
for member in members.iter() {
|
||||
let key: &str = member.get_item(0)?.extract()?;
|
||||
let members: &PyDict = obj.getattr("__dict__").unwrap().cast_as().unwrap();
|
||||
for (key, val) in members.iter() {
|
||||
let key: &str = key.extract()?;
|
||||
if key == str.to_string() {
|
||||
sym_ty = self.0.get_obj_type(
|
||||
py,
|
||||
member.get_item(1)?,
|
||||
unifier,
|
||||
defs,
|
||||
primitives,
|
||||
)?;
|
||||
sym_ty = self.0.get_obj_type(py, val, unifier, defs, primitives)?;
|
||||
break;
|
||||
}
|
||||
}
|
||||
Ok(sym_ty)
|
||||
})
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
if let Some(result) = result {
|
||||
self.0.pyid_to_type.write().insert(*id, result);
|
||||
}
|
||||
result
|
||||
})
|
||||
});
|
||||
if let Some(result) = &result {
|
||||
id_to_type.insert(str, *result);
|
||||
self.0.id_to_type.write().insert(str, *result);
|
||||
}
|
||||
result
|
||||
})
|
||||
|
@ -580,29 +903,37 @@ impl SymbolResolver for Resolver {
|
|||
id: StrRef,
|
||||
_: &mut CodeGenContext<'ctx, 'a>,
|
||||
) -> Option<ValueEnum<'ctx>> {
|
||||
Python::with_gil(|py| -> PyResult<Option<ValueEnum<'ctx>>> {
|
||||
let obj: &PyAny = self.0.module.extract(py)?;
|
||||
let members: &PyList = PyModule::import(py, "inspect")?
|
||||
.getattr("getmembers")?
|
||||
.call1((obj,))?
|
||||
.cast_as()?;
|
||||
let mut sym_value = None;
|
||||
for member in members.iter() {
|
||||
let key: &str = member.get_item(0)?.extract()?;
|
||||
let val = member.get_item(1)?;
|
||||
if key == id.to_string() {
|
||||
let id = self.0.helper.id_fn.call1(py, (val,))?.extract(py)?;
|
||||
sym_value = Some(PythonValue {
|
||||
id,
|
||||
value: val.extract()?,
|
||||
resolver: self.0.clone(),
|
||||
});
|
||||
break;
|
||||
let sym_value = {
|
||||
let id_to_val = self.0.id_to_pyval.read();
|
||||
id_to_val.get(&id).cloned()
|
||||
}
|
||||
.or_else(|| {
|
||||
Python::with_gil(|py| -> PyResult<Option<(u64, PyObject)>> {
|
||||
let obj: &PyAny = self.0.module.extract(py)?;
|
||||
let mut sym_value: Option<(u64, PyObject)> = None;
|
||||
let members: &PyDict = obj.getattr("__dict__").unwrap().cast_as().unwrap();
|
||||
for (key, val) in members.iter() {
|
||||
let key: &str = key.extract()?;
|
||||
if key == id.to_string() {
|
||||
let id = self.0.helper.id_fn.call1(py, (val,))?.extract(py)?;
|
||||
sym_value = Some((id, val.extract()?));
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(sym_value.map(|v| ValueEnum::Static(Arc::new(v))))
|
||||
if let Some((pyid, val)) = &sym_value {
|
||||
self.0.id_to_pyval.write().insert(id, (*pyid, val.clone()));
|
||||
}
|
||||
Ok(sym_value)
|
||||
})
|
||||
.unwrap()
|
||||
});
|
||||
sym_value.map(|(id, v)| {
|
||||
ValueEnum::Static(Arc::new(PythonValue {
|
||||
id,
|
||||
value: v,
|
||||
resolver: self.0.clone(),
|
||||
}))
|
||||
})
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
fn get_symbol_location(&self, _: StrRef) -> Option<Location> {
|
||||
|
@ -610,12 +941,15 @@ impl SymbolResolver for Resolver {
|
|||
}
|
||||
|
||||
fn get_identifier_def(&self, id: StrRef) -> Option<DefinitionId> {
|
||||
let mut id_to_def = self.0.id_to_def.lock();
|
||||
id_to_def.get(&id).cloned().or_else(|| {
|
||||
{
|
||||
let id_to_def = self.0.id_to_def.read();
|
||||
id_to_def.get(&id).cloned()
|
||||
}
|
||||
.or_else(|| {
|
||||
let py_id = self.0.name_to_pyid.get(&id);
|
||||
let result = py_id.and_then(|id| self.0.pyid_to_def.read().get(id).copied());
|
||||
if let Some(result) = &result {
|
||||
id_to_def.insert(id, *result);
|
||||
self.0.id_to_def.write().insert(id, *result);
|
||||
}
|
||||
result
|
||||
})
|
||||
|
|
|
@ -7,13 +7,18 @@ edition = "2018"
|
|||
[dependencies]
|
||||
num-bigint = "0.3"
|
||||
num-traits = "0.2"
|
||||
inkwell = { git = "https://github.com/TheDan64/inkwell", branch = "master", features = ["llvm12-0"] }
|
||||
itertools = "0.10.1"
|
||||
crossbeam = "0.8.1"
|
||||
parking_lot = "0.11.1"
|
||||
rayon = "1.5.1"
|
||||
nac3parser = { path = "../nac3parser" }
|
||||
|
||||
[dependencies.inkwell]
|
||||
git = "https://github.com/TheDan64/inkwell"
|
||||
branch = "master"
|
||||
default-features = false
|
||||
features = ["llvm12-0", "target-x86", "target-arm", "target-riscv", "no-libffi-linking"]
|
||||
|
||||
[dev-dependencies]
|
||||
test-case = "1.2.0"
|
||||
indoc = "1.0"
|
||||
|
|
|
@ -323,6 +323,10 @@ pub fn gen_func<'ctx, G: CodeGenerator + ?Sized>(
|
|||
unifier.get_representative(primitives.str),
|
||||
context.i8_type().ptr_type(AddressSpace::Generic).into(),
|
||||
),
|
||||
(
|
||||
unifier.get_representative(primitives.range),
|
||||
context.i32_type().array_type(3).ptr_type(AddressSpace::Generic).into()
|
||||
),
|
||||
]
|
||||
.iter()
|
||||
.cloned()
|
||||
|
|
|
@ -138,159 +138,165 @@ pub fn parse_type_annotation<T>(
|
|||
let list_id = ids[6];
|
||||
let tuple_id = ids[7];
|
||||
|
||||
match &expr.node {
|
||||
Name { id, .. } => {
|
||||
if *id == int32_id {
|
||||
Ok(primitives.int32)
|
||||
} else if *id == int64_id {
|
||||
Ok(primitives.int64)
|
||||
} else if *id == float_id {
|
||||
Ok(primitives.float)
|
||||
} else if *id == bool_id {
|
||||
Ok(primitives.bool)
|
||||
} else if *id == none_id {
|
||||
Ok(primitives.none)
|
||||
} else {
|
||||
let obj_id = resolver.get_identifier_def(*id);
|
||||
if let Some(obj_id) = obj_id {
|
||||
let def = top_level_defs[obj_id.0].read();
|
||||
if let TopLevelDef::Class { fields, methods, type_vars, .. } = &*def {
|
||||
if !type_vars.is_empty() {
|
||||
return Err(format!(
|
||||
"Unexpected number of type parameters: expected {} but got 0",
|
||||
type_vars.len()
|
||||
));
|
||||
}
|
||||
let fields = RefCell::new(
|
||||
chain(
|
||||
fields.iter().map(|(k, v, m)| (*k, (*v, *m))),
|
||||
methods.iter().map(|(k, v, _)| (*k, (*v, false))),
|
||||
)
|
||||
.collect(),
|
||||
);
|
||||
Ok(unifier.add_ty(TypeEnum::TObj {
|
||||
obj_id,
|
||||
fields,
|
||||
params: Default::default(),
|
||||
}))
|
||||
} else {
|
||||
Err("Cannot use function name as type".into())
|
||||
let name_handling = |id: &StrRef, unifier: &mut Unifier| {
|
||||
if *id == int32_id {
|
||||
Ok(primitives.int32)
|
||||
} else if *id == int64_id {
|
||||
Ok(primitives.int64)
|
||||
} else if *id == float_id {
|
||||
Ok(primitives.float)
|
||||
} else if *id == bool_id {
|
||||
Ok(primitives.bool)
|
||||
} else if *id == none_id {
|
||||
Ok(primitives.none)
|
||||
} else {
|
||||
let obj_id = resolver.get_identifier_def(*id);
|
||||
if let Some(obj_id) = obj_id {
|
||||
let def = top_level_defs[obj_id.0].read();
|
||||
if let TopLevelDef::Class { fields, methods, type_vars, .. } = &*def {
|
||||
if !type_vars.is_empty() {
|
||||
return Err(format!(
|
||||
"Unexpected number of type parameters: expected {} but got 0",
|
||||
type_vars.len()
|
||||
));
|
||||
}
|
||||
let fields = RefCell::new(
|
||||
chain(
|
||||
fields.iter().map(|(k, v, m)| (*k, (*v, *m))),
|
||||
methods.iter().map(|(k, v, _)| (*k, (*v, false))),
|
||||
)
|
||||
.collect(),
|
||||
);
|
||||
Ok(unifier.add_ty(TypeEnum::TObj {
|
||||
obj_id,
|
||||
fields,
|
||||
params: Default::default(),
|
||||
}))
|
||||
} else {
|
||||
// it could be a type variable
|
||||
let ty = resolver
|
||||
.get_symbol_type(unifier, top_level_defs, primitives, *id)
|
||||
.ok_or_else(|| "unknown type variable name".to_owned())?;
|
||||
if let TypeEnum::TVar { .. } = &*unifier.get_ty(ty) {
|
||||
Ok(ty)
|
||||
} else {
|
||||
Err(format!("Unknown type annotation {}", id))
|
||||
}
|
||||
Err("Cannot use function name as type".into())
|
||||
}
|
||||
} else {
|
||||
// it could be a type variable
|
||||
let ty = resolver
|
||||
.get_symbol_type(unifier, top_level_defs, primitives, *id)
|
||||
.ok_or_else(|| "unknown type variable name".to_owned())?;
|
||||
if let TypeEnum::TVar { .. } = &*unifier.get_ty(ty) {
|
||||
Ok(ty)
|
||||
} else {
|
||||
Err(format!("Unknown type annotation {}", id))
|
||||
}
|
||||
}
|
||||
}
|
||||
Subscript { value, slice, .. } => {
|
||||
if let Name { id, .. } = &value.node {
|
||||
if *id == virtual_id {
|
||||
let ty = parse_type_annotation(
|
||||
resolver,
|
||||
top_level_defs,
|
||||
unifier,
|
||||
primitives,
|
||||
slice,
|
||||
)?;
|
||||
Ok(unifier.add_ty(TypeEnum::TVirtual { ty }))
|
||||
} else if *id == list_id {
|
||||
let ty = parse_type_annotation(
|
||||
resolver,
|
||||
top_level_defs,
|
||||
unifier,
|
||||
primitives,
|
||||
slice,
|
||||
)?;
|
||||
Ok(unifier.add_ty(TypeEnum::TList { ty }))
|
||||
} else if *id == tuple_id {
|
||||
if let Tuple { elts, .. } = &slice.node {
|
||||
let ty = elts
|
||||
.iter()
|
||||
.map(|elt| {
|
||||
parse_type_annotation(
|
||||
resolver,
|
||||
top_level_defs,
|
||||
unifier,
|
||||
primitives,
|
||||
elt,
|
||||
)
|
||||
})
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
Ok(unifier.add_ty(TypeEnum::TTuple { ty }))
|
||||
} else {
|
||||
Err("Expected multiple elements for tuple".into())
|
||||
}
|
||||
} else {
|
||||
let types = if let Tuple { elts, .. } = &slice.node {
|
||||
elts.iter()
|
||||
.map(|v| {
|
||||
parse_type_annotation(
|
||||
resolver,
|
||||
top_level_defs,
|
||||
unifier,
|
||||
primitives,
|
||||
v,
|
||||
)
|
||||
})
|
||||
.collect::<Result<Vec<_>, _>>()?
|
||||
} else {
|
||||
vec![parse_type_annotation(
|
||||
};
|
||||
|
||||
let subscript_name_handle = |id: &StrRef, slice: &Expr<T>, unifier: &mut Unifier| {
|
||||
if *id == virtual_id {
|
||||
let ty = parse_type_annotation(
|
||||
resolver,
|
||||
top_level_defs,
|
||||
unifier,
|
||||
primitives,
|
||||
slice,
|
||||
)?;
|
||||
Ok(unifier.add_ty(TypeEnum::TVirtual { ty }))
|
||||
} else if *id == list_id {
|
||||
let ty = parse_type_annotation(
|
||||
resolver,
|
||||
top_level_defs,
|
||||
unifier,
|
||||
primitives,
|
||||
slice,
|
||||
)?;
|
||||
Ok(unifier.add_ty(TypeEnum::TList { ty }))
|
||||
} else if *id == tuple_id {
|
||||
if let Tuple { elts, .. } = &slice.node {
|
||||
let ty = elts
|
||||
.iter()
|
||||
.map(|elt| {
|
||||
parse_type_annotation(
|
||||
resolver,
|
||||
top_level_defs,
|
||||
unifier,
|
||||
primitives,
|
||||
slice,
|
||||
)?]
|
||||
};
|
||||
elt,
|
||||
)
|
||||
})
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
Ok(unifier.add_ty(TypeEnum::TTuple { ty }))
|
||||
} else {
|
||||
Err("Expected multiple elements for tuple".into())
|
||||
}
|
||||
} else {
|
||||
let types = if let Tuple { elts, .. } = &slice.node {
|
||||
elts.iter()
|
||||
.map(|v| {
|
||||
parse_type_annotation(
|
||||
resolver,
|
||||
top_level_defs,
|
||||
unifier,
|
||||
primitives,
|
||||
v,
|
||||
)
|
||||
})
|
||||
.collect::<Result<Vec<_>, _>>()?
|
||||
} else {
|
||||
vec![parse_type_annotation(
|
||||
resolver,
|
||||
top_level_defs,
|
||||
unifier,
|
||||
primitives,
|
||||
slice,
|
||||
)?]
|
||||
};
|
||||
|
||||
let obj_id = resolver
|
||||
.get_identifier_def(*id)
|
||||
.ok_or_else(|| format!("Unknown type annotation {}", id))?;
|
||||
let def = top_level_defs[obj_id.0].read();
|
||||
if let TopLevelDef::Class { fields, methods, type_vars, .. } = &*def {
|
||||
if types.len() != type_vars.len() {
|
||||
return Err(format!(
|
||||
"Unexpected number of type parameters: expected {} but got {}",
|
||||
type_vars.len(),
|
||||
types.len()
|
||||
));
|
||||
}
|
||||
let mut subst = HashMap::new();
|
||||
for (var, ty) in izip!(type_vars.iter(), types.iter()) {
|
||||
let id = if let TypeEnum::TVar { id, .. } = &*unifier.get_ty(*var) {
|
||||
*id
|
||||
} else {
|
||||
unreachable!()
|
||||
};
|
||||
subst.insert(id, *ty);
|
||||
}
|
||||
let mut fields = fields
|
||||
.iter()
|
||||
.map(|(attr, ty, is_mutable)| {
|
||||
let ty = unifier.subst(*ty, &subst).unwrap_or(*ty);
|
||||
(*attr, (ty, *is_mutable))
|
||||
})
|
||||
.collect::<HashMap<_, _>>();
|
||||
fields.extend(methods.iter().map(|(attr, ty, _)| {
|
||||
let ty = unifier.subst(*ty, &subst).unwrap_or(*ty);
|
||||
(*attr, (ty, false))
|
||||
}));
|
||||
Ok(unifier.add_ty(TypeEnum::TObj {
|
||||
obj_id,
|
||||
fields: fields.into(),
|
||||
params: subst.into(),
|
||||
}))
|
||||
} else {
|
||||
Err("Cannot use function name as type".into())
|
||||
}
|
||||
let obj_id = resolver
|
||||
.get_identifier_def(*id)
|
||||
.ok_or_else(|| format!("Unknown type annotation {}", id))?;
|
||||
let def = top_level_defs[obj_id.0].read();
|
||||
if let TopLevelDef::Class { fields, methods, type_vars, .. } = &*def {
|
||||
if types.len() != type_vars.len() {
|
||||
return Err(format!(
|
||||
"Unexpected number of type parameters: expected {} but got {}",
|
||||
type_vars.len(),
|
||||
types.len()
|
||||
));
|
||||
}
|
||||
let mut subst = HashMap::new();
|
||||
for (var, ty) in izip!(type_vars.iter(), types.iter()) {
|
||||
let id = if let TypeEnum::TVar { id, .. } = &*unifier.get_ty(*var) {
|
||||
*id
|
||||
} else {
|
||||
unreachable!()
|
||||
};
|
||||
subst.insert(id, *ty);
|
||||
}
|
||||
let mut fields = fields
|
||||
.iter()
|
||||
.map(|(attr, ty, is_mutable)| {
|
||||
let ty = unifier.subst(*ty, &subst).unwrap_or(*ty);
|
||||
(*attr, (ty, *is_mutable))
|
||||
})
|
||||
.collect::<HashMap<_, _>>();
|
||||
fields.extend(methods.iter().map(|(attr, ty, _)| {
|
||||
let ty = unifier.subst(*ty, &subst).unwrap_or(*ty);
|
||||
(*attr, (ty, false))
|
||||
}));
|
||||
Ok(unifier.add_ty(TypeEnum::TObj {
|
||||
obj_id,
|
||||
fields: fields.into(),
|
||||
params: subst.into(),
|
||||
}))
|
||||
} else {
|
||||
Err("Cannot use function name as type".into())
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
match &expr.node {
|
||||
Name { id, .. } => name_handling(id, unifier),
|
||||
Subscript { value, slice, .. } => {
|
||||
if let Name { id, .. } = &value.node {
|
||||
subscript_name_handle(id, slice, unifier)
|
||||
} else {
|
||||
Err(format!("unsupported type expression at {}", expr.location))
|
||||
}
|
||||
|
|
|
@ -0,0 +1,694 @@
|
|||
use std::cell::RefCell;
|
||||
use inkwell::{IntPredicate::{self, *}, FloatPredicate, values::IntValue};
|
||||
use crate::{symbol_resolver::SymbolValue, codegen::expr::destructure_range};
|
||||
use super::*;
|
||||
|
||||
type BuiltinInfo = (
|
||||
Vec<(Arc<RwLock<TopLevelDef>>, Option<Stmt>)>,
|
||||
&'static [&'static str]
|
||||
);
|
||||
|
||||
pub fn get_builtins(primitives: &mut (PrimitiveStore, Unifier)) -> BuiltinInfo {
|
||||
let int32 = primitives.0.int32;
|
||||
let int64 = primitives.0.int64;
|
||||
let float = primitives.0.float;
|
||||
let boolean = primitives.0.bool;
|
||||
let range = primitives.0.range;
|
||||
let string = primitives.0.str;
|
||||
let num_ty = primitives.1.get_fresh_var_with_range(&[int32, int64, float, boolean]);
|
||||
let var_map: HashMap<_, _> = vec![(num_ty.1, num_ty.0)].into_iter().collect();
|
||||
|
||||
let top_level_def_list = vec![
|
||||
Arc::new(RwLock::new(TopLevelComposer::make_top_level_class_def(
|
||||
0,
|
||||
None,
|
||||
"int32".into(),
|
||||
None,
|
||||
))),
|
||||
Arc::new(RwLock::new(TopLevelComposer::make_top_level_class_def(
|
||||
1,
|
||||
None,
|
||||
"int64".into(),
|
||||
None,
|
||||
))),
|
||||
Arc::new(RwLock::new(TopLevelComposer::make_top_level_class_def(
|
||||
2,
|
||||
None,
|
||||
"float".into(),
|
||||
None,
|
||||
))),
|
||||
Arc::new(RwLock::new(TopLevelComposer::make_top_level_class_def(3, None, "bool".into(), None))),
|
||||
Arc::new(RwLock::new(TopLevelComposer::make_top_level_class_def(4, None, "none".into(), None))),
|
||||
Arc::new(RwLock::new(TopLevelComposer::make_top_level_class_def(
|
||||
5,
|
||||
None,
|
||||
"range".into(),
|
||||
None,
|
||||
))),
|
||||
Arc::new(RwLock::new(TopLevelComposer::make_top_level_class_def(6, None, "str".into(), None))),
|
||||
Arc::new(RwLock::new(TopLevelDef::Function {
|
||||
name: "int32".into(),
|
||||
simple_name: "int32".into(),
|
||||
signature: primitives.1.add_ty(TypeEnum::TFunc(RefCell::new(FunSignature {
|
||||
args: vec![FuncArg { name: "_".into(), ty: num_ty.0, default_value: None }],
|
||||
ret: int32,
|
||||
vars: var_map.clone(),
|
||||
}))),
|
||||
var_id: Default::default(),
|
||||
instance_to_symbol: Default::default(),
|
||||
instance_to_stmt: Default::default(),
|
||||
resolver: None,
|
||||
codegen_callback: Some(Arc::new(GenCall::new(Box::new(
|
||||
|ctx, _, fun, args| {
|
||||
let int32 = ctx.primitives.int32;
|
||||
let int64 = ctx.primitives.int64;
|
||||
let float = ctx.primitives.float;
|
||||
let boolean = ctx.primitives.bool;
|
||||
let arg_ty = fun.0.args[0].ty;
|
||||
let arg = args[0].1;
|
||||
if ctx.unifier.unioned(arg_ty, boolean) {
|
||||
Some(
|
||||
ctx.builder
|
||||
.build_int_z_extend(
|
||||
arg.into_int_value(),
|
||||
ctx.ctx.i32_type(),
|
||||
"zext",
|
||||
)
|
||||
.into(),
|
||||
)
|
||||
} else if ctx.unifier.unioned(arg_ty, int32) {
|
||||
Some(arg)
|
||||
} else if ctx.unifier.unioned(arg_ty, int64) {
|
||||
Some(
|
||||
ctx.builder
|
||||
.build_int_truncate(
|
||||
arg.into_int_value(),
|
||||
ctx.ctx.i32_type(),
|
||||
"trunc",
|
||||
)
|
||||
.into(),
|
||||
)
|
||||
} else if ctx.unifier.unioned(arg_ty, float) {
|
||||
let val = ctx
|
||||
.builder
|
||||
.build_float_to_signed_int(
|
||||
arg.into_float_value(),
|
||||
ctx.ctx.i32_type(),
|
||||
"fptosi",
|
||||
)
|
||||
.into();
|
||||
Some(val)
|
||||
} else {
|
||||
unreachable!()
|
||||
}
|
||||
},
|
||||
)))),
|
||||
})),
|
||||
Arc::new(RwLock::new(TopLevelDef::Function {
|
||||
name: "int64".into(),
|
||||
simple_name: "int64".into(),
|
||||
signature: primitives.1.add_ty(TypeEnum::TFunc(RefCell::new(FunSignature {
|
||||
args: vec![FuncArg { name: "_".into(), ty: num_ty.0, default_value: None }],
|
||||
ret: int64,
|
||||
vars: var_map.clone(),
|
||||
}))),
|
||||
var_id: Default::default(),
|
||||
instance_to_symbol: Default::default(),
|
||||
instance_to_stmt: Default::default(),
|
||||
resolver: None,
|
||||
codegen_callback: Some(Arc::new(GenCall::new(Box::new(
|
||||
|ctx, _, fun, args| {
|
||||
let int32 = ctx.primitives.int32;
|
||||
let int64 = ctx.primitives.int64;
|
||||
let float = ctx.primitives.float;
|
||||
let boolean = ctx.primitives.bool;
|
||||
let arg_ty = fun.0.args[0].ty;
|
||||
let arg = args[0].1;
|
||||
if ctx.unifier.unioned(arg_ty, boolean)
|
||||
|| ctx.unifier.unioned(arg_ty, int32)
|
||||
{
|
||||
Some(
|
||||
ctx.builder
|
||||
.build_int_z_extend(
|
||||
arg.into_int_value(),
|
||||
ctx.ctx.i64_type(),
|
||||
"zext",
|
||||
)
|
||||
.into(),
|
||||
)
|
||||
} else if ctx.unifier.unioned(arg_ty, int64) {
|
||||
Some(arg)
|
||||
} else if ctx.unifier.unioned(arg_ty, float) {
|
||||
let val = ctx
|
||||
.builder
|
||||
.build_float_to_signed_int(
|
||||
arg.into_float_value(),
|
||||
ctx.ctx.i64_type(),
|
||||
"fptosi",
|
||||
)
|
||||
.into();
|
||||
Some(val)
|
||||
} else {
|
||||
unreachable!()
|
||||
}
|
||||
},
|
||||
)))),
|
||||
})),
|
||||
Arc::new(RwLock::new(TopLevelDef::Function {
|
||||
name: "float".into(),
|
||||
simple_name: "float".into(),
|
||||
signature: primitives.1.add_ty(TypeEnum::TFunc(RefCell::new(FunSignature {
|
||||
args: vec![FuncArg { name: "_".into(), ty: num_ty.0, default_value: None }],
|
||||
ret: float,
|
||||
vars: var_map.clone(),
|
||||
}))),
|
||||
var_id: Default::default(),
|
||||
instance_to_symbol: Default::default(),
|
||||
instance_to_stmt: Default::default(),
|
||||
resolver: None,
|
||||
codegen_callback: Some(Arc::new(GenCall::new(Box::new(
|
||||
|ctx, _, fun, args| {
|
||||
let int32 = ctx.primitives.int32;
|
||||
let int64 = ctx.primitives.int64;
|
||||
let boolean = ctx.primitives.bool;
|
||||
let float = ctx.primitives.float;
|
||||
let arg_ty = fun.0.args[0].ty;
|
||||
let arg = args[0].1;
|
||||
if ctx.unifier.unioned(arg_ty, boolean)
|
||||
|| ctx.unifier.unioned(arg_ty, int32)
|
||||
|| ctx.unifier.unioned(arg_ty, int64)
|
||||
{
|
||||
let arg = args[0].1.into_int_value();
|
||||
let val = ctx
|
||||
.builder
|
||||
.build_signed_int_to_float(arg, ctx.ctx.f64_type(), "sitofp")
|
||||
.into();
|
||||
Some(val)
|
||||
} else if ctx.unifier.unioned(arg_ty, float) {
|
||||
Some(arg)
|
||||
} else {
|
||||
unreachable!()
|
||||
}
|
||||
},
|
||||
)))),
|
||||
})),
|
||||
Arc::new(RwLock::new(TopLevelDef::Function {
|
||||
name: "round".into(),
|
||||
simple_name: "round".into(),
|
||||
signature: primitives.1.add_ty(TypeEnum::TFunc(RefCell::new(FunSignature {
|
||||
args: vec![FuncArg { name: "_".into(), ty: float, default_value: None }],
|
||||
ret: int32,
|
||||
vars: Default::default(),
|
||||
}))),
|
||||
var_id: Default::default(),
|
||||
instance_to_symbol: Default::default(),
|
||||
instance_to_stmt: Default::default(),
|
||||
resolver: None,
|
||||
codegen_callback: Some(Arc::new(GenCall::new(Box::new(|ctx, _, _, args| {
|
||||
let arg = args[0].1;
|
||||
let round_intrinsic =
|
||||
ctx.module.get_function("llvm.round.f64").unwrap_or_else(|| {
|
||||
let float = ctx.ctx.f64_type();
|
||||
let fn_type = float.fn_type(&[float.into()], false);
|
||||
ctx.module.add_function("llvm.round.f64", fn_type, None)
|
||||
});
|
||||
let val = ctx
|
||||
.builder
|
||||
.build_call(round_intrinsic, &[arg], "round")
|
||||
.try_as_basic_value()
|
||||
.left()
|
||||
.unwrap();
|
||||
Some(
|
||||
ctx.builder
|
||||
.build_float_to_signed_int(
|
||||
val.into_float_value(),
|
||||
ctx.ctx.i32_type(),
|
||||
"fptosi",
|
||||
)
|
||||
.into(),
|
||||
)
|
||||
})))),
|
||||
})),
|
||||
Arc::new(RwLock::new(TopLevelDef::Function {
|
||||
name: "round64".into(),
|
||||
simple_name: "round64".into(),
|
||||
signature: primitives.1.add_ty(TypeEnum::TFunc(RefCell::new(FunSignature {
|
||||
args: vec![FuncArg { name: "_".into(), ty: float, default_value: None }],
|
||||
ret: int64,
|
||||
vars: Default::default(),
|
||||
}))),
|
||||
var_id: Default::default(),
|
||||
instance_to_symbol: Default::default(),
|
||||
instance_to_stmt: Default::default(),
|
||||
resolver: None,
|
||||
codegen_callback: Some(Arc::new(GenCall::new(Box::new(|ctx, _, _, args| {
|
||||
let arg = args[0].1;
|
||||
let round_intrinsic =
|
||||
ctx.module.get_function("llvm.round.f64").unwrap_or_else(|| {
|
||||
let float = ctx.ctx.f64_type();
|
||||
let fn_type = float.fn_type(&[float.into()], false);
|
||||
ctx.module.add_function("llvm.round.f64", fn_type, None)
|
||||
});
|
||||
let val = ctx
|
||||
.builder
|
||||
.build_call(round_intrinsic, &[arg], "round")
|
||||
.try_as_basic_value()
|
||||
.left()
|
||||
.unwrap();
|
||||
Some(
|
||||
ctx.builder
|
||||
.build_float_to_signed_int(
|
||||
val.into_float_value(),
|
||||
ctx.ctx.i64_type(),
|
||||
"fptosi",
|
||||
)
|
||||
.into(),
|
||||
)
|
||||
})))),
|
||||
})),
|
||||
Arc::new(RwLock::new(TopLevelDef::Function {
|
||||
name: "range".into(),
|
||||
simple_name: "range".into(),
|
||||
signature: primitives.1.add_ty(TypeEnum::TFunc(RefCell::new(FunSignature {
|
||||
args: vec![
|
||||
FuncArg { name: "start".into(), ty: int32, default_value: None },
|
||||
FuncArg {
|
||||
name: "stop".into(),
|
||||
ty: int32,
|
||||
// placeholder
|
||||
default_value: Some(SymbolValue::I32(0)),
|
||||
},
|
||||
FuncArg {
|
||||
name: "step".into(),
|
||||
ty: int32,
|
||||
default_value: Some(SymbolValue::I32(1)),
|
||||
},
|
||||
],
|
||||
ret: range,
|
||||
vars: Default::default(),
|
||||
}))),
|
||||
var_id: Default::default(),
|
||||
instance_to_symbol: Default::default(),
|
||||
instance_to_stmt: Default::default(),
|
||||
resolver: None,
|
||||
codegen_callback: Some(Arc::new(GenCall::new(Box::new(|ctx, _, _, args| {
|
||||
let mut start = None;
|
||||
let mut stop = None;
|
||||
let mut step = None;
|
||||
let int32 = ctx.ctx.i32_type();
|
||||
let zero = int32.const_zero();
|
||||
for (i, arg) in args.iter().enumerate() {
|
||||
if arg.0 == Some("start".into()) {
|
||||
start = Some(arg.1);
|
||||
} else if arg.0 == Some("stop".into()) {
|
||||
stop = Some(arg.1);
|
||||
} else if arg.0 == Some("step".into()) {
|
||||
step = Some(arg.1);
|
||||
} else if i == 0 {
|
||||
start = Some(arg.1);
|
||||
} else if i == 1 {
|
||||
stop = Some(arg.1);
|
||||
} else if i == 2 {
|
||||
step = Some(arg.1);
|
||||
}
|
||||
}
|
||||
// TODO: error when step == 0
|
||||
let step = step.unwrap_or_else(|| int32.const_int(1, false).into());
|
||||
let stop = stop.unwrap_or_else(|| {
|
||||
let v = start.unwrap();
|
||||
start = None;
|
||||
v
|
||||
});
|
||||
let start = start.unwrap_or_else(|| int32.const_zero().into());
|
||||
let ty = int32.array_type(3);
|
||||
let ptr = ctx.builder.build_alloca(ty, "range");
|
||||
unsafe {
|
||||
let a = ctx.builder.build_in_bounds_gep(ptr, &[zero, zero], "start");
|
||||
let b = ctx.builder.build_in_bounds_gep(
|
||||
ptr,
|
||||
&[zero, int32.const_int(1, false)],
|
||||
"end",
|
||||
);
|
||||
let c = ctx.builder.build_in_bounds_gep(
|
||||
ptr,
|
||||
&[zero, int32.const_int(2, false)],
|
||||
"step",
|
||||
);
|
||||
ctx.builder.build_store(a, start);
|
||||
ctx.builder.build_store(b, stop);
|
||||
ctx.builder.build_store(c, step);
|
||||
}
|
||||
Some(ptr.into())
|
||||
})))),
|
||||
})),
|
||||
Arc::new(RwLock::new(TopLevelDef::Function {
|
||||
name: "str".into(),
|
||||
simple_name: "str".into(),
|
||||
signature: primitives.1.add_ty(TypeEnum::TFunc(RefCell::new(FunSignature {
|
||||
args: vec![FuncArg { name: "_".into(), ty: string, default_value: None }],
|
||||
ret: string,
|
||||
vars: Default::default(),
|
||||
}))),
|
||||
var_id: Default::default(),
|
||||
instance_to_symbol: Default::default(),
|
||||
instance_to_stmt: Default::default(),
|
||||
resolver: None,
|
||||
codegen_callback: Some(Arc::new(GenCall::new(Box::new(|_, _, _, args| {
|
||||
Some(args[0].1)
|
||||
})))),
|
||||
})),
|
||||
Arc::new(RwLock::new(TopLevelDef::Function {
|
||||
name: "bool".into(),
|
||||
simple_name: "bool".into(),
|
||||
signature: primitives.1.add_ty(TypeEnum::TFunc(RefCell::new(FunSignature {
|
||||
args: vec![FuncArg { name: "_".into(), ty: num_ty.0, default_value: None }],
|
||||
ret: primitives.0.bool,
|
||||
vars: var_map,
|
||||
}))),
|
||||
var_id: Default::default(),
|
||||
instance_to_symbol: Default::default(),
|
||||
instance_to_stmt: Default::default(),
|
||||
resolver: None,
|
||||
codegen_callback: Some(Arc::new(GenCall::new(Box::new(
|
||||
|ctx, _, fun, args| {
|
||||
let int32 = ctx.primitives.int32;
|
||||
let int64 = ctx.primitives.int64;
|
||||
let float = ctx.primitives.float;
|
||||
let boolean = ctx.primitives.bool;
|
||||
let arg_ty = fun.0.args[0].ty;
|
||||
let arg = args[0].1;
|
||||
if ctx.unifier.unioned(arg_ty, boolean) {
|
||||
Some(arg)
|
||||
} else if ctx.unifier.unioned(arg_ty, int32) {
|
||||
Some(ctx.builder.build_int_compare(
|
||||
IntPredicate::NE,
|
||||
ctx.ctx.i32_type().const_zero(),
|
||||
arg.into_int_value(),
|
||||
"bool",
|
||||
).into())
|
||||
} else if ctx.unifier.unioned(arg_ty, int64) {
|
||||
Some(ctx.builder.build_int_compare(
|
||||
IntPredicate::NE,
|
||||
ctx.ctx.i64_type().const_zero(),
|
||||
arg.into_int_value(),
|
||||
"bool",
|
||||
).into())
|
||||
} else if ctx.unifier.unioned(arg_ty, float) {
|
||||
let val = ctx.builder.
|
||||
build_float_compare(
|
||||
// UEQ as bool(nan) is True
|
||||
FloatPredicate::UEQ,
|
||||
arg.into_float_value(),
|
||||
ctx.ctx.f64_type().const_zero(),
|
||||
"bool"
|
||||
).into();
|
||||
Some(val)
|
||||
} else {
|
||||
unreachable!()
|
||||
}
|
||||
},
|
||||
)))),
|
||||
})),
|
||||
Arc::new(RwLock::new(TopLevelDef::Function {
|
||||
name: "floor".into(),
|
||||
simple_name: "floor".into(),
|
||||
signature: primitives.1.add_ty(TypeEnum::TFunc(RefCell::new(FunSignature {
|
||||
args: vec![FuncArg { name: "_".into(), ty: float, default_value: None }],
|
||||
ret: int32,
|
||||
vars: Default::default(),
|
||||
}))),
|
||||
var_id: Default::default(),
|
||||
instance_to_symbol: Default::default(),
|
||||
instance_to_stmt: Default::default(),
|
||||
resolver: None,
|
||||
codegen_callback: Some(Arc::new(GenCall::new(Box::new(|ctx, _, _, args| {
|
||||
let arg = args[0].1;
|
||||
let floor_intrinsic =
|
||||
ctx.module.get_function("llvm.floor.f64").unwrap_or_else(|| {
|
||||
let float = ctx.ctx.f64_type();
|
||||
let fn_type = float.fn_type(&[float.into()], false);
|
||||
ctx.module.add_function("llvm.floor.f64", fn_type, None)
|
||||
});
|
||||
let val = ctx
|
||||
.builder
|
||||
.build_call(floor_intrinsic, &[arg], "floor")
|
||||
.try_as_basic_value()
|
||||
.left()
|
||||
.unwrap();
|
||||
Some(
|
||||
ctx.builder
|
||||
.build_float_to_signed_int(
|
||||
val.into_float_value(),
|
||||
ctx.ctx.i32_type(),
|
||||
"fptosi",
|
||||
)
|
||||
.into(),
|
||||
)
|
||||
})))),
|
||||
})),
|
||||
Arc::new(RwLock::new(TopLevelDef::Function {
|
||||
name: "floor64".into(),
|
||||
simple_name: "floor64".into(),
|
||||
signature: primitives.1.add_ty(TypeEnum::TFunc(RefCell::new(FunSignature {
|
||||
args: vec![FuncArg { name: "_".into(), ty: float, default_value: None }],
|
||||
ret: int64,
|
||||
vars: Default::default(),
|
||||
}))),
|
||||
var_id: Default::default(),
|
||||
instance_to_symbol: Default::default(),
|
||||
instance_to_stmt: Default::default(),
|
||||
resolver: None,
|
||||
codegen_callback: Some(Arc::new(GenCall::new(Box::new(|ctx, _, _, args| {
|
||||
let arg = args[0].1;
|
||||
let floor_intrinsic =
|
||||
ctx.module.get_function("llvm.floor.f64").unwrap_or_else(|| {
|
||||
let float = ctx.ctx.f64_type();
|
||||
let fn_type = float.fn_type(&[float.into()], false);
|
||||
ctx.module.add_function("llvm.floor.f64", fn_type, None)
|
||||
});
|
||||
let val = ctx
|
||||
.builder
|
||||
.build_call(floor_intrinsic, &[arg], "floor")
|
||||
.try_as_basic_value()
|
||||
.left()
|
||||
.unwrap();
|
||||
Some(
|
||||
ctx.builder
|
||||
.build_float_to_signed_int(
|
||||
val.into_float_value(),
|
||||
ctx.ctx.i64_type(),
|
||||
"fptosi",
|
||||
)
|
||||
.into(),
|
||||
)
|
||||
})))),
|
||||
})),
|
||||
Arc::new(RwLock::new(TopLevelDef::Function {
|
||||
name: "ceil".into(),
|
||||
simple_name: "ceil".into(),
|
||||
signature: primitives.1.add_ty(TypeEnum::TFunc(RefCell::new(FunSignature {
|
||||
args: vec![FuncArg { name: "_".into(), ty: float, default_value: None }],
|
||||
ret: int32,
|
||||
vars: Default::default(),
|
||||
}))),
|
||||
var_id: Default::default(),
|
||||
instance_to_symbol: Default::default(),
|
||||
instance_to_stmt: Default::default(),
|
||||
resolver: None,
|
||||
codegen_callback: Some(Arc::new(GenCall::new(Box::new(|ctx, _, _, args| {
|
||||
let arg = args[0].1;
|
||||
let ceil_intrinsic =
|
||||
ctx.module.get_function("llvm.ceil.f64").unwrap_or_else(|| {
|
||||
let float = ctx.ctx.f64_type();
|
||||
let fn_type = float.fn_type(&[float.into()], false);
|
||||
ctx.module.add_function("llvm.ceil.f64", fn_type, None)
|
||||
});
|
||||
let val = ctx
|
||||
.builder
|
||||
.build_call(ceil_intrinsic, &[arg], "ceil")
|
||||
.try_as_basic_value()
|
||||
.left()
|
||||
.unwrap();
|
||||
Some(
|
||||
ctx.builder
|
||||
.build_float_to_signed_int(
|
||||
val.into_float_value(),
|
||||
ctx.ctx.i32_type(),
|
||||
"fptosi",
|
||||
)
|
||||
.into(),
|
||||
)
|
||||
})))),
|
||||
})),
|
||||
Arc::new(RwLock::new(TopLevelDef::Function {
|
||||
name: "ceil64".into(),
|
||||
simple_name: "ceil64".into(),
|
||||
signature: primitives.1.add_ty(TypeEnum::TFunc(RefCell::new(FunSignature {
|
||||
args: vec![FuncArg { name: "_".into(), ty: float, default_value: None }],
|
||||
ret: int64,
|
||||
vars: Default::default(),
|
||||
}))),
|
||||
var_id: Default::default(),
|
||||
instance_to_symbol: Default::default(),
|
||||
instance_to_stmt: Default::default(),
|
||||
resolver: None,
|
||||
codegen_callback: Some(Arc::new(GenCall::new(Box::new(|ctx, _, _, args| {
|
||||
let arg = args[0].1;
|
||||
let ceil_intrinsic =
|
||||
ctx.module.get_function("llvm.ceil.f64").unwrap_or_else(|| {
|
||||
let float = ctx.ctx.f64_type();
|
||||
let fn_type = float.fn_type(&[float.into()], false);
|
||||
ctx.module.add_function("llvm.ceil.f64", fn_type, None)
|
||||
});
|
||||
let val = ctx
|
||||
.builder
|
||||
.build_call(ceil_intrinsic, &[arg], "ceil")
|
||||
.try_as_basic_value()
|
||||
.left()
|
||||
.unwrap();
|
||||
Some(
|
||||
ctx.builder
|
||||
.build_float_to_signed_int(
|
||||
val.into_float_value(),
|
||||
ctx.ctx.i64_type(),
|
||||
"fptosi",
|
||||
)
|
||||
.into(),
|
||||
)
|
||||
})))),
|
||||
})),
|
||||
Arc::new(RwLock::new({
|
||||
let list_var = primitives.1.get_fresh_var();
|
||||
let list = primitives.1.add_ty(TypeEnum::TList { ty: list_var.0 });
|
||||
let arg_ty = primitives.1.get_fresh_var_with_range(&[list, primitives.0.range]);
|
||||
TopLevelDef::Function {
|
||||
name: "len".into(),
|
||||
simple_name: "len".into(),
|
||||
signature: primitives.1.add_ty(TypeEnum::TFunc(RefCell::new(FunSignature {
|
||||
args: vec![FuncArg {
|
||||
name: "_".into(),
|
||||
ty: arg_ty.0,
|
||||
default_value: None
|
||||
}],
|
||||
ret: int32,
|
||||
vars: vec![(list_var.1, list_var.0), (arg_ty.1, arg_ty.0)].into_iter().collect(),
|
||||
}))),
|
||||
var_id: vec![arg_ty.1],
|
||||
instance_to_symbol: Default::default(),
|
||||
instance_to_stmt: Default::default(),
|
||||
resolver: None,
|
||||
codegen_callback: Some(Arc::new(GenCall::new(Box::new(
|
||||
|ctx, _, fun, args| {
|
||||
let range_ty = ctx.primitives.range;
|
||||
let arg_ty = fun.0.args[0].ty;
|
||||
let arg = args[0].1;
|
||||
if ctx.unifier.unioned(arg_ty, range_ty) {
|
||||
let arg = arg.into_pointer_value();
|
||||
let (start, end, step) = destructure_range(ctx, arg);
|
||||
Some(calculate_len_for_slice_range(ctx, start, end, step).into())
|
||||
} else {
|
||||
let int32 = ctx.ctx.i32_type();
|
||||
let zero = int32.const_zero();
|
||||
Some(ctx.build_gep_and_load(arg.into_pointer_value(), &[zero, zero]))
|
||||
}
|
||||
},
|
||||
)))),
|
||||
}
|
||||
}))
|
||||
];
|
||||
let ast_list: Vec<Option<ast::Stmt<()>>> =
|
||||
(0..top_level_def_list.len()).map(|_| None).collect();
|
||||
(
|
||||
izip!(top_level_def_list, ast_list).collect_vec(),
|
||||
&[
|
||||
"int32",
|
||||
"int64",
|
||||
"float",
|
||||
"round",
|
||||
"round64",
|
||||
"range",
|
||||
"str",
|
||||
"bool",
|
||||
"floor",
|
||||
"floor64",
|
||||
"ceil",
|
||||
"ceil64",
|
||||
"len",
|
||||
]
|
||||
)
|
||||
}
|
||||
|
||||
// equivalent code:
|
||||
// def length(start, end, step != 0):
|
||||
// diff = end - start
|
||||
// if diff > 0 and step > 0:
|
||||
// return ((diff - 1) // step) + 1
|
||||
// elif diff < 0 and step < 0:
|
||||
// return ((diff + 1) // step) + 1
|
||||
// else:
|
||||
// return 0
|
||||
pub fn calculate_len_for_slice_range<'ctx, 'a>(
|
||||
ctx: &mut CodeGenContext<'ctx, 'a>,
|
||||
start: IntValue<'ctx>,
|
||||
end: IntValue<'ctx>,
|
||||
step: IntValue<'ctx>,
|
||||
) -> IntValue<'ctx> {
|
||||
let int32 = ctx.ctx.i32_type();
|
||||
let start = ctx.builder.build_int_s_extend(start, int32, "start");
|
||||
let end = ctx.builder.build_int_s_extend(end, int32, "end");
|
||||
let step = ctx.builder.build_int_s_extend(step, int32, "step");
|
||||
let diff = ctx.builder.build_int_sub(end, start, "diff");
|
||||
|
||||
let diff_pos = ctx.builder.build_int_compare(SGT, diff, int32.const_zero(), "diffpos");
|
||||
let step_pos = ctx.builder.build_int_compare(SGT, step, int32.const_zero(), "steppos");
|
||||
let test_1 = ctx.builder.build_and(diff_pos, step_pos, "bothpos");
|
||||
|
||||
let current = ctx.builder.get_insert_block().unwrap().get_parent().unwrap();
|
||||
let then_bb = ctx.ctx.append_basic_block(current, "then");
|
||||
let else_bb = ctx.ctx.append_basic_block(current, "else");
|
||||
let then_bb_2 = ctx.ctx.append_basic_block(current, "then_2");
|
||||
let else_bb_2 = ctx.ctx.append_basic_block(current, "else_2");
|
||||
let cont_bb_2 = ctx.ctx.append_basic_block(current, "cont_2");
|
||||
let cont_bb = ctx.ctx.append_basic_block(current, "cont");
|
||||
ctx.builder.build_conditional_branch(test_1, then_bb, else_bb);
|
||||
|
||||
ctx.builder.position_at_end(then_bb);
|
||||
let length_pos = {
|
||||
let diff_pos_min_1 = ctx.builder.build_int_sub(diff, int32.const_int(1, false), "diffminone");
|
||||
let length_pos = ctx.builder.build_int_signed_div(diff_pos_min_1, step, "div");
|
||||
ctx.builder.build_int_add(length_pos, int32.const_int(1, false), "add1")
|
||||
};
|
||||
ctx.builder.build_unconditional_branch(cont_bb);
|
||||
|
||||
ctx.builder.position_at_end(else_bb);
|
||||
let phi_1 = {
|
||||
let diff_neg = ctx.builder.build_int_compare(SLT, diff, int32.const_zero(), "diffneg");
|
||||
let step_neg = ctx.builder.build_int_compare(SLT, step, int32.const_zero(), "stepneg");
|
||||
let test_2 = ctx.builder.build_and(diff_neg, step_neg, "bothneg");
|
||||
|
||||
ctx.builder.build_conditional_branch(test_2, then_bb_2, else_bb_2);
|
||||
|
||||
ctx.builder.position_at_end(then_bb_2);
|
||||
let length_neg = {
|
||||
let diff_neg_add_1 = ctx.builder.build_int_add(diff, int32.const_int(1, false), "diffminone");
|
||||
let length_neg = ctx.builder.build_int_signed_div(diff_neg_add_1, step, "div");
|
||||
ctx.builder.build_int_add(length_neg, int32.const_int(1, false), "add1")
|
||||
};
|
||||
ctx.builder.build_unconditional_branch(cont_bb_2);
|
||||
|
||||
ctx.builder.position_at_end(else_bb_2);
|
||||
let length_zero = int32.const_zero();
|
||||
ctx.builder.build_unconditional_branch(cont_bb_2);
|
||||
|
||||
ctx.builder.position_at_end(cont_bb_2);
|
||||
let phi_1 = ctx.builder.build_phi(int32, "lenphi1");
|
||||
phi_1.add_incoming(&[(&length_neg, then_bb_2), (&length_zero, else_bb_2)]);
|
||||
phi_1.as_basic_value().into_int_value()
|
||||
};
|
||||
ctx.builder.build_unconditional_branch(cont_bb);
|
||||
|
||||
ctx.builder.position_at_end(cont_bb);
|
||||
let phi = ctx.builder.build_phi(int32, "lenphi");
|
||||
phi.add_incoming(&[(&length_pos, then_bb), (&phi_1, cont_bb_2)]);
|
||||
phi.as_basic_value().into_int_value()
|
||||
}
|
|
@ -1,16 +1,28 @@
|
|||
use std::cell::RefCell;
|
||||
|
||||
use nac3parser::ast::fold::Fold;
|
||||
use inkwell::{FloatPredicate, IntPredicate};
|
||||
|
||||
use crate::{
|
||||
symbol_resolver::SymbolValue,
|
||||
typecheck::type_inferencer::{FunctionData, Inferencer},
|
||||
codegen::expr::get_subst_key,
|
||||
};
|
||||
|
||||
use super::*;
|
||||
|
||||
pub struct ComposerConfig {
|
||||
pub kernel_ann: Option<&'static str>,
|
||||
pub kernel_invariant_ann: &'static str,
|
||||
}
|
||||
|
||||
impl Default for ComposerConfig {
|
||||
fn default() -> Self {
|
||||
ComposerConfig {
|
||||
kernel_ann: None,
|
||||
kernel_invariant_ann: "Invariant"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type DefAst = (Arc<RwLock<TopLevelDef>>, Option<ast::Stmt<()>>);
|
||||
pub struct TopLevelComposer {
|
||||
// list of top level definitions, same as top level context
|
||||
|
@ -26,12 +38,13 @@ pub struct TopLevelComposer {
|
|||
// get the class def id of a class method
|
||||
pub method_class: HashMap<DefinitionId, DefinitionId>,
|
||||
// number of built-in function and classes in the definition list, later skip
|
||||
pub built_in_num: usize,
|
||||
pub builtin_num: usize,
|
||||
pub core_config: ComposerConfig,
|
||||
}
|
||||
|
||||
impl Default for TopLevelComposer {
|
||||
fn default() -> Self {
|
||||
Self::new(vec![]).0
|
||||
Self::new(vec![], Default::default()).0
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -40,408 +53,10 @@ impl TopLevelComposer {
|
|||
/// resolver can later figure out primitive type definitions when passed a primitive type name
|
||||
pub fn new(
|
||||
builtins: Vec<(StrRef, FunSignature, Arc<GenCall>)>,
|
||||
core_config: ComposerConfig
|
||||
) -> (Self, HashMap<StrRef, DefinitionId>, HashMap<StrRef, Type>) {
|
||||
let mut primitives = Self::make_primitives();
|
||||
|
||||
let int32 = primitives.0.int32;
|
||||
let int64 = primitives.0.int64;
|
||||
let float = primitives.0.float;
|
||||
let boolean = primitives.0.bool;
|
||||
let range = primitives.0.range;
|
||||
let string = primitives.0.str;
|
||||
let num_ty = primitives.1.get_fresh_var_with_range(&[int32, int64, float, boolean]);
|
||||
let var_map: HashMap<_, _> = vec![(num_ty.1, num_ty.0)].into_iter().collect();
|
||||
|
||||
let mut definition_ast_list = {
|
||||
let top_level_def_list = vec![
|
||||
Arc::new(RwLock::new(Self::make_top_level_class_def(
|
||||
0,
|
||||
None,
|
||||
"int32".into(),
|
||||
None,
|
||||
))),
|
||||
Arc::new(RwLock::new(Self::make_top_level_class_def(
|
||||
1,
|
||||
None,
|
||||
"int64".into(),
|
||||
None,
|
||||
))),
|
||||
Arc::new(RwLock::new(Self::make_top_level_class_def(
|
||||
2,
|
||||
None,
|
||||
"float".into(),
|
||||
None,
|
||||
))),
|
||||
Arc::new(RwLock::new(Self::make_top_level_class_def(3, None, "bool".into(), None))),
|
||||
Arc::new(RwLock::new(Self::make_top_level_class_def(4, None, "none".into(), None))),
|
||||
Arc::new(RwLock::new(Self::make_top_level_class_def(
|
||||
5,
|
||||
None,
|
||||
"range".into(),
|
||||
None,
|
||||
))),
|
||||
Arc::new(RwLock::new(Self::make_top_level_class_def(6, None, "str".into(), None))),
|
||||
Arc::new(RwLock::new(TopLevelDef::Function {
|
||||
name: "int32".into(),
|
||||
simple_name: "int32".into(),
|
||||
signature: primitives.1.add_ty(TypeEnum::TFunc(RefCell::new(FunSignature {
|
||||
args: vec![FuncArg { name: "_".into(), ty: num_ty.0, default_value: None }],
|
||||
ret: int32,
|
||||
vars: var_map.clone(),
|
||||
}))),
|
||||
var_id: Default::default(),
|
||||
instance_to_symbol: Default::default(),
|
||||
instance_to_stmt: Default::default(),
|
||||
resolver: None,
|
||||
codegen_callback: Some(Arc::new(GenCall::new(Box::new(
|
||||
|ctx, _, fun, args| {
|
||||
let int32 = ctx.primitives.int32;
|
||||
let int64 = ctx.primitives.int64;
|
||||
let float = ctx.primitives.float;
|
||||
let boolean = ctx.primitives.bool;
|
||||
let arg_ty = fun.0.args[0].ty;
|
||||
let arg = args[0].1;
|
||||
if ctx.unifier.unioned(arg_ty, boolean) {
|
||||
Some(
|
||||
ctx.builder
|
||||
.build_int_s_extend(
|
||||
arg.into_int_value(),
|
||||
ctx.ctx.i32_type(),
|
||||
"sext",
|
||||
)
|
||||
.into(),
|
||||
)
|
||||
} else if ctx.unifier.unioned(arg_ty, int32) {
|
||||
Some(arg)
|
||||
} else if ctx.unifier.unioned(arg_ty, int64) {
|
||||
Some(
|
||||
ctx.builder
|
||||
.build_int_truncate(
|
||||
arg.into_int_value(),
|
||||
ctx.ctx.i32_type(),
|
||||
"trunc",
|
||||
)
|
||||
.into(),
|
||||
)
|
||||
} else if ctx.unifier.unioned(arg_ty, float) {
|
||||
let val = ctx
|
||||
.builder
|
||||
.build_float_to_signed_int(
|
||||
arg.into_float_value(),
|
||||
ctx.ctx.i32_type(),
|
||||
"fptosi",
|
||||
)
|
||||
.into();
|
||||
Some(val)
|
||||
} else {
|
||||
unreachable!()
|
||||
}
|
||||
},
|
||||
)))),
|
||||
})),
|
||||
Arc::new(RwLock::new(TopLevelDef::Function {
|
||||
name: "int64".into(),
|
||||
simple_name: "int64".into(),
|
||||
signature: primitives.1.add_ty(TypeEnum::TFunc(RefCell::new(FunSignature {
|
||||
args: vec![FuncArg { name: "_".into(), ty: num_ty.0, default_value: None }],
|
||||
ret: int64,
|
||||
vars: var_map.clone(),
|
||||
}))),
|
||||
var_id: Default::default(),
|
||||
instance_to_symbol: Default::default(),
|
||||
instance_to_stmt: Default::default(),
|
||||
resolver: None,
|
||||
codegen_callback: Some(Arc::new(GenCall::new(Box::new(
|
||||
|ctx, _, fun, args| {
|
||||
let int32 = ctx.primitives.int32;
|
||||
let int64 = ctx.primitives.int64;
|
||||
let float = ctx.primitives.float;
|
||||
let boolean = ctx.primitives.bool;
|
||||
let arg_ty = fun.0.args[0].ty;
|
||||
let arg = args[0].1;
|
||||
if ctx.unifier.unioned(arg_ty, boolean)
|
||||
|| ctx.unifier.unioned(arg_ty, int32)
|
||||
{
|
||||
Some(
|
||||
ctx.builder
|
||||
.build_int_s_extend(
|
||||
arg.into_int_value(),
|
||||
ctx.ctx.i64_type(),
|
||||
"sext",
|
||||
)
|
||||
.into(),
|
||||
)
|
||||
} else if ctx.unifier.unioned(arg_ty, int64) {
|
||||
Some(arg)
|
||||
} else if ctx.unifier.unioned(arg_ty, float) {
|
||||
let val = ctx
|
||||
.builder
|
||||
.build_float_to_signed_int(
|
||||
arg.into_float_value(),
|
||||
ctx.ctx.i64_type(),
|
||||
"fptosi",
|
||||
)
|
||||
.into();
|
||||
Some(val)
|
||||
} else {
|
||||
unreachable!()
|
||||
}
|
||||
},
|
||||
)))),
|
||||
})),
|
||||
Arc::new(RwLock::new(TopLevelDef::Function {
|
||||
name: "float".into(),
|
||||
simple_name: "float".into(),
|
||||
signature: primitives.1.add_ty(TypeEnum::TFunc(RefCell::new(FunSignature {
|
||||
args: vec![FuncArg { name: "_".into(), ty: num_ty.0, default_value: None }],
|
||||
ret: float,
|
||||
vars: var_map.clone(),
|
||||
}))),
|
||||
var_id: Default::default(),
|
||||
instance_to_symbol: Default::default(),
|
||||
instance_to_stmt: Default::default(),
|
||||
resolver: None,
|
||||
codegen_callback: Some(Arc::new(GenCall::new(Box::new(
|
||||
|ctx, _, fun, args| {
|
||||
let int32 = ctx.primitives.int32;
|
||||
let int64 = ctx.primitives.int64;
|
||||
let boolean = ctx.primitives.bool;
|
||||
let float = ctx.primitives.float;
|
||||
let arg_ty = fun.0.args[0].ty;
|
||||
let arg = args[0].1;
|
||||
if ctx.unifier.unioned(arg_ty, boolean)
|
||||
|| ctx.unifier.unioned(arg_ty, int32)
|
||||
|| ctx.unifier.unioned(arg_ty, int64)
|
||||
{
|
||||
let arg = args[0].1.into_int_value();
|
||||
let val = ctx
|
||||
.builder
|
||||
.build_signed_int_to_float(arg, ctx.ctx.f64_type(), "sitofp")
|
||||
.into();
|
||||
Some(val)
|
||||
} else if ctx.unifier.unioned(arg_ty, float) {
|
||||
Some(arg)
|
||||
} else {
|
||||
unreachable!()
|
||||
}
|
||||
},
|
||||
)))),
|
||||
})),
|
||||
Arc::new(RwLock::new(TopLevelDef::Function {
|
||||
name: "round".into(),
|
||||
simple_name: "round".into(),
|
||||
signature: primitives.1.add_ty(TypeEnum::TFunc(RefCell::new(FunSignature {
|
||||
args: vec![FuncArg { name: "_".into(), ty: float, default_value: None }],
|
||||
ret: int32,
|
||||
vars: Default::default(),
|
||||
}))),
|
||||
var_id: Default::default(),
|
||||
instance_to_symbol: Default::default(),
|
||||
instance_to_stmt: Default::default(),
|
||||
resolver: None,
|
||||
codegen_callback: Some(Arc::new(GenCall::new(Box::new(|ctx, _, _, args| {
|
||||
let arg = args[0].1;
|
||||
let round_intrinsic =
|
||||
ctx.module.get_function("llvm.round.f64").unwrap_or_else(|| {
|
||||
let float = ctx.ctx.f64_type();
|
||||
let fn_type = float.fn_type(&[float.into()], false);
|
||||
ctx.module.add_function("llvm.round.f64", fn_type, None)
|
||||
});
|
||||
let val = ctx
|
||||
.builder
|
||||
.build_call(round_intrinsic, &[arg], "round")
|
||||
.try_as_basic_value()
|
||||
.left()
|
||||
.unwrap();
|
||||
Some(
|
||||
ctx.builder
|
||||
.build_float_to_signed_int(
|
||||
val.into_float_value(),
|
||||
ctx.ctx.i32_type(),
|
||||
"fptosi",
|
||||
)
|
||||
.into(),
|
||||
)
|
||||
})))),
|
||||
})),
|
||||
Arc::new(RwLock::new(TopLevelDef::Function {
|
||||
name: "round64".into(),
|
||||
simple_name: "round64".into(),
|
||||
signature: primitives.1.add_ty(TypeEnum::TFunc(RefCell::new(FunSignature {
|
||||
args: vec![FuncArg { name: "_".into(), ty: float, default_value: None }],
|
||||
ret: int64,
|
||||
vars: Default::default(),
|
||||
}))),
|
||||
var_id: Default::default(),
|
||||
instance_to_symbol: Default::default(),
|
||||
instance_to_stmt: Default::default(),
|
||||
resolver: None,
|
||||
codegen_callback: Some(Arc::new(GenCall::new(Box::new(|ctx, _, _, args| {
|
||||
let arg = args[0].1;
|
||||
let round_intrinsic =
|
||||
ctx.module.get_function("llvm.round.f64").unwrap_or_else(|| {
|
||||
let float = ctx.ctx.f64_type();
|
||||
let fn_type = float.fn_type(&[float.into()], false);
|
||||
ctx.module.add_function("llvm.round.f64", fn_type, None)
|
||||
});
|
||||
let val = ctx
|
||||
.builder
|
||||
.build_call(round_intrinsic, &[arg], "round")
|
||||
.try_as_basic_value()
|
||||
.left()
|
||||
.unwrap();
|
||||
Some(
|
||||
ctx.builder
|
||||
.build_float_to_signed_int(
|
||||
val.into_float_value(),
|
||||
ctx.ctx.i64_type(),
|
||||
"fptosi",
|
||||
)
|
||||
.into(),
|
||||
)
|
||||
})))),
|
||||
})),
|
||||
Arc::new(RwLock::new(TopLevelDef::Function {
|
||||
name: "range".into(),
|
||||
simple_name: "range".into(),
|
||||
signature: primitives.1.add_ty(TypeEnum::TFunc(RefCell::new(FunSignature {
|
||||
args: vec![
|
||||
FuncArg { name: "start".into(), ty: int32, default_value: None },
|
||||
FuncArg {
|
||||
name: "stop".into(),
|
||||
ty: int32,
|
||||
// placeholder
|
||||
default_value: Some(SymbolValue::I32(0)),
|
||||
},
|
||||
FuncArg {
|
||||
name: "step".into(),
|
||||
ty: int32,
|
||||
default_value: Some(SymbolValue::I32(1)),
|
||||
},
|
||||
],
|
||||
ret: range,
|
||||
vars: Default::default(),
|
||||
}))),
|
||||
var_id: Default::default(),
|
||||
instance_to_symbol: Default::default(),
|
||||
instance_to_stmt: Default::default(),
|
||||
resolver: None,
|
||||
codegen_callback: Some(Arc::new(GenCall::new(Box::new(|ctx, _, _, args| {
|
||||
let mut start = None;
|
||||
let mut stop = None;
|
||||
let mut step = None;
|
||||
let int32 = ctx.ctx.i32_type();
|
||||
let zero = int32.const_zero();
|
||||
for (i, arg) in args.iter().enumerate() {
|
||||
if arg.0 == Some("start".into()) {
|
||||
start = Some(arg.1);
|
||||
} else if arg.0 == Some("stop".into()) {
|
||||
stop = Some(arg.1);
|
||||
} else if arg.0 == Some("step".into()) {
|
||||
step = Some(arg.1);
|
||||
} else if i == 0 {
|
||||
start = Some(arg.1);
|
||||
} else if i == 1 {
|
||||
stop = Some(arg.1);
|
||||
} else if i == 2 {
|
||||
step = Some(arg.1);
|
||||
}
|
||||
}
|
||||
// TODO: error when step == 0
|
||||
let step = step.unwrap_or_else(|| int32.const_int(1, false).into());
|
||||
let stop = stop.unwrap_or_else(|| {
|
||||
let v = start.unwrap();
|
||||
start = None;
|
||||
v
|
||||
});
|
||||
let start = start.unwrap_or_else(|| int32.const_zero().into());
|
||||
let ty = int32.array_type(3);
|
||||
let ptr = ctx.builder.build_alloca(ty, "range");
|
||||
unsafe {
|
||||
let a = ctx.builder.build_in_bounds_gep(ptr, &[zero, zero], "start");
|
||||
let b = ctx.builder.build_in_bounds_gep(
|
||||
ptr,
|
||||
&[zero, int32.const_int(1, false)],
|
||||
"end",
|
||||
);
|
||||
let c = ctx.builder.build_in_bounds_gep(
|
||||
ptr,
|
||||
&[zero, int32.const_int(2, false)],
|
||||
"step",
|
||||
);
|
||||
ctx.builder.build_store(a, start);
|
||||
ctx.builder.build_store(b, stop);
|
||||
ctx.builder.build_store(c, step);
|
||||
}
|
||||
Some(ptr.into())
|
||||
})))),
|
||||
})),
|
||||
Arc::new(RwLock::new(TopLevelDef::Function {
|
||||
name: "str".into(),
|
||||
simple_name: "str".into(),
|
||||
signature: primitives.1.add_ty(TypeEnum::TFunc(RefCell::new(FunSignature {
|
||||
args: vec![FuncArg { name: "_".into(), ty: string, default_value: None }],
|
||||
ret: string,
|
||||
vars: Default::default(),
|
||||
}))),
|
||||
var_id: Default::default(),
|
||||
instance_to_symbol: Default::default(),
|
||||
instance_to_stmt: Default::default(),
|
||||
resolver: None,
|
||||
codegen_callback: Some(Arc::new(GenCall::new(Box::new(|_, _, _, args| {
|
||||
Some(args[0].1)
|
||||
})))),
|
||||
})),
|
||||
Arc::new(RwLock::new(TopLevelDef::Function {
|
||||
name: "bool".into(),
|
||||
simple_name: "bool".into(),
|
||||
signature: primitives.1.add_ty(TypeEnum::TFunc(RefCell::new(FunSignature {
|
||||
args: vec![FuncArg { name: "_".into(), ty: num_ty.0, default_value: None }],
|
||||
ret: primitives.0.bool,
|
||||
vars: var_map,
|
||||
}))),
|
||||
var_id: Default::default(),
|
||||
instance_to_symbol: Default::default(),
|
||||
instance_to_stmt: Default::default(),
|
||||
resolver: None,
|
||||
codegen_callback: Some(Arc::new(GenCall::new(Box::new(
|
||||
|ctx, _, fun, args| {
|
||||
let int32 = ctx.primitives.int32;
|
||||
let int64 = ctx.primitives.int64;
|
||||
let float = ctx.primitives.float;
|
||||
let boolean = ctx.primitives.bool;
|
||||
let arg_ty = fun.0.args[0].ty;
|
||||
let arg = args[0].1;
|
||||
if ctx.unifier.unioned(arg_ty, boolean) {
|
||||
Some(arg)
|
||||
} else if ctx.unifier.unioned(arg_ty, int32) || ctx.unifier.unioned(arg_ty, int64) {
|
||||
Some(ctx.builder.build_int_compare(
|
||||
IntPredicate::NE,
|
||||
ctx.ctx.i64_type().const_zero(),
|
||||
arg.into_int_value(),
|
||||
"bool",
|
||||
).into())
|
||||
} else if ctx.unifier.unioned(arg_ty, float) {
|
||||
let val = ctx.builder.
|
||||
build_float_compare(
|
||||
// UEQ as bool(nan) is True
|
||||
FloatPredicate::UEQ,
|
||||
arg.into_float_value(),
|
||||
ctx.ctx.f64_type().const_zero(),
|
||||
"bool"
|
||||
).into();
|
||||
Some(val)
|
||||
} else {
|
||||
unreachable!()
|
||||
}
|
||||
},
|
||||
)))),
|
||||
})),
|
||||
];
|
||||
let ast_list: Vec<Option<ast::Stmt<()>>> =
|
||||
(0..top_level_def_list.len()).map(|_| None).collect();
|
||||
izip!(top_level_def_list, ast_list).collect_vec()
|
||||
};
|
||||
let (mut definition_ast_list, builtin_name_list) = builtins::get_builtins(&mut primitives);
|
||||
let primitives_ty = primitives.0;
|
||||
let mut unifier = primitives.1;
|
||||
let mut keyword_list: HashSet<StrRef> = HashSet::from_iter(vec![
|
||||
|
@ -464,19 +79,17 @@ impl TopLevelComposer {
|
|||
let defined_names: HashSet<String> = Default::default();
|
||||
let method_class: HashMap<DefinitionId, DefinitionId> = Default::default();
|
||||
|
||||
let mut built_in_id: HashMap<StrRef, DefinitionId> = Default::default();
|
||||
let mut built_in_ty: HashMap<StrRef, Type> = Default::default();
|
||||
let mut builtin_id: HashMap<StrRef, DefinitionId> = Default::default();
|
||||
let mut builtin_ty: HashMap<StrRef, Type> = Default::default();
|
||||
|
||||
for (id, name) in
|
||||
["int32", "int64", "float", "round", "round64", "range", "str", "bool"].iter().rev().enumerate()
|
||||
{
|
||||
for (id, name) in builtin_name_list.iter().rev().enumerate() {
|
||||
let name = (**name).into();
|
||||
let id = definition_ast_list.len() - id - 1;
|
||||
let def = definition_ast_list[id].0.read();
|
||||
if let TopLevelDef::Function { simple_name, signature, .. } = &*def {
|
||||
assert!(name == *simple_name);
|
||||
built_in_ty.insert(name, *signature);
|
||||
built_in_id.insert(name, DefinitionId(id));
|
||||
builtin_ty.insert(name, *signature);
|
||||
builtin_id.insert(name, DefinitionId(id));
|
||||
} else {
|
||||
unreachable!()
|
||||
}
|
||||
|
@ -484,8 +97,8 @@ impl TopLevelComposer {
|
|||
|
||||
for (name, sig, codegen_callback) in builtins {
|
||||
let fun_sig = unifier.add_ty(TypeEnum::TFunc(RefCell::new(sig)));
|
||||
built_in_ty.insert(name, fun_sig);
|
||||
built_in_id.insert(name, DefinitionId(definition_ast_list.len()));
|
||||
builtin_ty.insert(name, fun_sig);
|
||||
builtin_id.insert(name, DefinitionId(definition_ast_list.len()));
|
||||
definition_ast_list.push((
|
||||
Arc::new(RwLock::new(TopLevelDef::Function {
|
||||
name: name.into(),
|
||||
|
@ -504,16 +117,17 @@ impl TopLevelComposer {
|
|||
|
||||
(
|
||||
TopLevelComposer {
|
||||
built_in_num: definition_ast_list.len(),
|
||||
builtin_num: definition_ast_list.len(),
|
||||
definition_ast_list,
|
||||
primitives_ty,
|
||||
unifier,
|
||||
keyword_list,
|
||||
defined_names,
|
||||
method_class,
|
||||
core_config,
|
||||
},
|
||||
built_in_id,
|
||||
built_in_ty,
|
||||
builtin_id,
|
||||
builtin_ty,
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -532,7 +146,7 @@ impl TopLevelComposer {
|
|||
}
|
||||
}
|
||||
|
||||
fn extract_def_list(&self) -> Vec<Arc<RwLock<TopLevelDef>>> {
|
||||
pub fn extract_def_list(&self) -> Vec<Arc<RwLock<TopLevelDef>>> {
|
||||
self.definition_ast_list.iter().map(|(def, ..)| def.clone()).collect_vec()
|
||||
}
|
||||
|
||||
|
@ -752,7 +366,7 @@ impl TopLevelComposer {
|
|||
let primitives_store = &self.primitives_ty;
|
||||
|
||||
// skip 5 to skip analyzing the primitives
|
||||
for (class_def, class_ast) in def_list.iter().skip(self.built_in_num) {
|
||||
for (class_def, class_ast) in def_list.iter().skip(self.builtin_num) {
|
||||
// only deal with class def here
|
||||
let mut class_def = class_def.write();
|
||||
let (class_bases_ast, class_def_type_vars, class_resolver) = {
|
||||
|
@ -864,7 +478,7 @@ impl TopLevelComposer {
|
|||
|
||||
// first, only push direct parent into the list
|
||||
// skip 5 to skip analyzing the primitives
|
||||
for (class_def, class_ast) in self.definition_ast_list.iter_mut().skip(self.built_in_num) {
|
||||
for (class_def, class_ast) in self.definition_ast_list.iter_mut().skip(self.builtin_num) {
|
||||
let mut class_def = class_def.write();
|
||||
let (class_def_id, class_bases, class_ancestors, class_resolver, class_type_vars) = {
|
||||
if let TopLevelDef::Class { ancestors, resolver, object_id, type_vars, .. } =
|
||||
|
@ -933,7 +547,7 @@ impl TopLevelComposer {
|
|||
// second, get all ancestors
|
||||
let mut ancestors_store: HashMap<DefinitionId, Vec<TypeAnnotation>> = Default::default();
|
||||
// skip 5 to skip analyzing the primitives
|
||||
for (class_def, _) in self.definition_ast_list.iter().skip(self.built_in_num) {
|
||||
for (class_def, _) in self.definition_ast_list.iter().skip(self.builtin_num) {
|
||||
let class_def = class_def.read();
|
||||
let (class_ancestors, class_id) = {
|
||||
if let TopLevelDef::Class { ancestors, object_id, .. } = class_def.deref() {
|
||||
|
@ -955,7 +569,7 @@ impl TopLevelComposer {
|
|||
|
||||
// insert the ancestors to the def list
|
||||
// skip 5 to skip analyzing the primitives
|
||||
for (class_def, _) in self.definition_ast_list.iter_mut().skip(self.built_in_num) {
|
||||
for (class_def, _) in self.definition_ast_list.iter_mut().skip(self.builtin_num) {
|
||||
let mut class_def = class_def.write();
|
||||
let (class_ancestors, class_id, class_type_vars) = {
|
||||
if let TopLevelDef::Class { ancestors, object_id, type_vars, .. } =
|
||||
|
@ -988,7 +602,7 @@ impl TopLevelComposer {
|
|||
let mut type_var_to_concrete_def: HashMap<Type, TypeAnnotation> = HashMap::new();
|
||||
|
||||
// skip 5 to skip analyzing the primitives
|
||||
for (class_def, class_ast) in def_ast_list.iter().skip(self.built_in_num) {
|
||||
for (class_def, class_ast) in def_ast_list.iter().skip(self.builtin_num) {
|
||||
if matches!(&*class_def.read(), TopLevelDef::Class { .. }) {
|
||||
Self::analyze_single_class_methods_fields(
|
||||
class_def.clone(),
|
||||
|
@ -997,7 +611,7 @@ impl TopLevelComposer {
|
|||
unifier,
|
||||
primitives,
|
||||
&mut type_var_to_concrete_def,
|
||||
&self.keyword_list,
|
||||
(&self.keyword_list, &self.core_config)
|
||||
)?
|
||||
}
|
||||
}
|
||||
|
@ -1009,7 +623,7 @@ impl TopLevelComposer {
|
|||
loop {
|
||||
let mut finished = true;
|
||||
|
||||
for (class_def, _) in def_ast_list.iter().skip(self.built_in_num) {
|
||||
for (class_def, _) in def_ast_list.iter().skip(self.builtin_num) {
|
||||
let mut class_def = class_def.write();
|
||||
if let TopLevelDef::Class { ancestors, .. } = class_def.deref() {
|
||||
// if the length of the ancestor is equal to the current depth
|
||||
|
@ -1068,7 +682,7 @@ impl TopLevelComposer {
|
|||
let primitives_store = &self.primitives_ty;
|
||||
|
||||
// skip 5 to skip analyzing the primitives
|
||||
for (function_def, function_ast) in def_list.iter().skip(self.built_in_num) {
|
||||
for (function_def, function_ast) in def_list.iter().skip(self.builtin_num) {
|
||||
let mut function_def = function_def.write();
|
||||
let function_def = function_def.deref_mut();
|
||||
let function_ast = if let Some(x) = function_ast.as_ref() {
|
||||
|
@ -1270,8 +884,9 @@ impl TopLevelComposer {
|
|||
unifier: &mut Unifier,
|
||||
primitives: &PrimitiveStore,
|
||||
type_var_to_concrete_def: &mut HashMap<Type, TypeAnnotation>,
|
||||
keyword_list: &HashSet<StrRef>,
|
||||
core_info: (&HashSet<StrRef>, &ComposerConfig),
|
||||
) -> Result<(), String> {
|
||||
let (keyword_list, core_config) = core_info;
|
||||
let mut class_def = class_def.write();
|
||||
let (
|
||||
class_id,
|
||||
|
@ -1503,20 +1118,17 @@ impl TopLevelComposer {
|
|||
let dummy_field_type = unifier.get_fresh_var().0;
|
||||
|
||||
// handle Kernel[T], KernelInvariant[T]
|
||||
let (annotation, mutable) = {
|
||||
let mut result = None;
|
||||
if let ast::ExprKind::Subscript { value, slice, .. } = &annotation.as_ref().node {
|
||||
if let ast::ExprKind::Name { id, .. } = &value.node {
|
||||
result = if id == &"Kernel".into() {
|
||||
Some((slice, true))
|
||||
} else if id == &"KernelInvariant".into() {
|
||||
Some((slice, false))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
result.unwrap_or((annotation, true))
|
||||
let (annotation, mutable) = match &annotation.node {
|
||||
ast::ExprKind::Subscript { value, slice, .. } if matches!(
|
||||
&value.node,
|
||||
ast::ExprKind::Name { id, .. } if id == &core_config.kernel_invariant_ann.into()
|
||||
) => (slice, false),
|
||||
ast::ExprKind::Subscript { value, slice, .. } if matches!(
|
||||
&value.node,
|
||||
ast::ExprKind::Name { id, .. } if core_config.kernel_ann.map_or(false, |c| id == &c.into())
|
||||
) => (slice, true),
|
||||
_ if core_config.kernel_ann.is_none() => (annotation, true),
|
||||
_ => continue // ignore fields annotated otherwise
|
||||
};
|
||||
class_fields_def.push((*attr, dummy_field_type, mutable));
|
||||
|
||||
|
@ -1695,7 +1307,7 @@ impl TopLevelComposer {
|
|||
fn analyze_function_instance(&mut self) -> Result<(), String> {
|
||||
// first get the class contructor type correct for the following type check in function body
|
||||
// also do class field instantiation check
|
||||
for (def, ast) in self.definition_ast_list.iter().skip(self.built_in_num) {
|
||||
for (def, ast) in self.definition_ast_list.iter().skip(self.builtin_num) {
|
||||
let class_def = def.read();
|
||||
if let TopLevelDef::Class {
|
||||
constructor,
|
||||
|
@ -1767,7 +1379,7 @@ impl TopLevelComposer {
|
|||
|
||||
let ctx = Arc::new(self.make_top_level_context());
|
||||
// type inference inside function body
|
||||
for (id, (def, ast)) in self.definition_ast_list.iter().enumerate().skip(self.built_in_num)
|
||||
for (id, (def, ast)) in self.definition_ast_list.iter().enumerate().skip(self.builtin_num)
|
||||
{
|
||||
let mut function_def = def.write();
|
||||
if let TopLevelDef::Function {
|
||||
|
|
|
@ -24,7 +24,8 @@ pub struct DefinitionId(pub usize);
|
|||
|
||||
pub mod composer;
|
||||
pub mod helper;
|
||||
mod type_annotation;
|
||||
pub mod builtins;
|
||||
pub mod type_annotation;
|
||||
use composer::*;
|
||||
use type_annotation::*;
|
||||
#[cfg(test)]
|
||||
|
|
|
@ -4,10 +4,10 @@ expression: res_vec
|
|||
|
||||
---
|
||||
[
|
||||
"Class {\nname: \"Generic_A\",\nancestors: [\"{class: Generic_A, params: [\\\"var4\\\"]}\", \"{class: B, params: []}\"],\nfields: [\"aa\", \"a\"],\nmethods: [(\"__init__\", \"fn[[], none]\"), (\"foo\", \"fn[[b=var3], none]\"), (\"fun\", \"fn[[a=int32], var4]\")],\ntype_vars: [\"var4\"]\n}\n",
|
||||
"Function {\nname: \"Generic_A.__init__\",\nsig: \"fn[[], none]\",\nvar_id: [4]\n}\n",
|
||||
"Function {\nname: \"Generic_A.fun\",\nsig: \"fn[[a=int32], var4]\",\nvar_id: [4]\n}\n",
|
||||
"Class {\nname: \"B\",\nancestors: [\"{class: B, params: []}\"],\nfields: [\"aa\"],\nmethods: [(\"__init__\", \"fn[[], none]\"), (\"foo\", \"fn[[b=var3], none]\")],\ntype_vars: []\n}\n",
|
||||
"Class {\nname: \"Generic_A\",\nancestors: [\"{class: Generic_A, params: [\\\"var6\\\"]}\", \"{class: B, params: []}\"],\nfields: [\"aa\", \"a\"],\nmethods: [(\"__init__\", \"fn[[], none]\"), (\"foo\", \"fn[[b=var5], none]\"), (\"fun\", \"fn[[a=int32], var6]\")],\ntype_vars: [\"var6\"]\n}\n",
|
||||
"Function {\nname: \"Generic_A.__init__\",\nsig: \"fn[[], none]\",\nvar_id: [6]\n}\n",
|
||||
"Function {\nname: \"Generic_A.fun\",\nsig: \"fn[[a=int32], var6]\",\nvar_id: [6]\n}\n",
|
||||
"Class {\nname: \"B\",\nancestors: [\"{class: B, params: []}\"],\nfields: [\"aa\"],\nmethods: [(\"__init__\", \"fn[[], none]\"), (\"foo\", \"fn[[b=var5], none]\")],\ntype_vars: []\n}\n",
|
||||
"Function {\nname: \"B.__init__\",\nsig: \"fn[[], none]\",\nvar_id: []\n}\n",
|
||||
"Function {\nname: \"B.foo\",\nsig: \"fn[[b=var3], none]\",\nvar_id: []\n}\n",
|
||||
"Function {\nname: \"B.foo\",\nsig: \"fn[[b=var5], none]\",\nvar_id: []\n}\n",
|
||||
]
|
||||
|
|
|
@ -4,13 +4,13 @@ expression: res_vec
|
|||
|
||||
---
|
||||
[
|
||||
"Class {\nname: \"A\",\nancestors: [\"{class: A, params: [\\\"var3\\\"]}\"],\nfields: [\"a\", \"b\", \"c\"],\nmethods: [(\"__init__\", \"fn[[t=var3], none]\"), (\"fun\", \"fn[[a=int32, b=var3], list[virtual[B[4->bool]]]]\"), (\"foo\", \"fn[[c=C], none]\")],\ntype_vars: [\"var3\"]\n}\n",
|
||||
"Function {\nname: \"A.__init__\",\nsig: \"fn[[t=var3], none]\",\nvar_id: []\n}\n",
|
||||
"Function {\nname: \"A.fun\",\nsig: \"fn[[a=int32, b=var3], list[virtual[B[4->bool]]]]\",\nvar_id: []\n}\n",
|
||||
"Class {\nname: \"A\",\nancestors: [\"{class: A, params: [\\\"var5\\\"]}\"],\nfields: [\"a\", \"b\", \"c\"],\nmethods: [(\"__init__\", \"fn[[t=var5], none]\"), (\"fun\", \"fn[[a=int32, b=var5], list[virtual[B[6->bool]]]]\"), (\"foo\", \"fn[[c=C], none]\")],\ntype_vars: [\"var5\"]\n}\n",
|
||||
"Function {\nname: \"A.__init__\",\nsig: \"fn[[t=var5], none]\",\nvar_id: []\n}\n",
|
||||
"Function {\nname: \"A.fun\",\nsig: \"fn[[a=int32, b=var5], list[virtual[B[6->bool]]]]\",\nvar_id: []\n}\n",
|
||||
"Function {\nname: \"A.foo\",\nsig: \"fn[[c=C], none]\",\nvar_id: []\n}\n",
|
||||
"Class {\nname: \"B\",\nancestors: [\"{class: B, params: [\\\"var4\\\"]}\", \"{class: A, params: [\\\"float\\\"]}\"],\nfields: [\"a\", \"b\", \"c\", \"d\"],\nmethods: [(\"__init__\", \"fn[[], none]\"), (\"fun\", \"fn[[a=int32, b=var3], list[virtual[B[4->bool]]]]\"), (\"foo\", \"fn[[c=C], none]\")],\ntype_vars: [\"var4\"]\n}\n",
|
||||
"Function {\nname: \"B.__init__\",\nsig: \"fn[[], none]\",\nvar_id: [4]\n}\n",
|
||||
"Function {\nname: \"B.fun\",\nsig: \"fn[[a=int32, b=var3], list[virtual[B[4->bool]]]]\",\nvar_id: [4]\n}\n",
|
||||
"Class {\nname: \"C\",\nancestors: [\"{class: C, params: []}\", \"{class: B, params: [\\\"bool\\\"]}\", \"{class: A, params: [\\\"float\\\"]}\"],\nfields: [\"a\", \"b\", \"c\", \"d\", \"e\"],\nmethods: [(\"__init__\", \"fn[[], none]\"), (\"fun\", \"fn[[a=int32, b=var3], list[virtual[B[4->bool]]]]\"), (\"foo\", \"fn[[c=C], none]\")],\ntype_vars: []\n}\n",
|
||||
"Class {\nname: \"B\",\nancestors: [\"{class: B, params: [\\\"var6\\\"]}\", \"{class: A, params: [\\\"float\\\"]}\"],\nfields: [\"a\", \"b\", \"c\", \"d\"],\nmethods: [(\"__init__\", \"fn[[], none]\"), (\"fun\", \"fn[[a=int32, b=var5], list[virtual[B[6->bool]]]]\"), (\"foo\", \"fn[[c=C], none]\")],\ntype_vars: [\"var6\"]\n}\n",
|
||||
"Function {\nname: \"B.__init__\",\nsig: \"fn[[], none]\",\nvar_id: [6]\n}\n",
|
||||
"Function {\nname: \"B.fun\",\nsig: \"fn[[a=int32, b=var5], list[virtual[B[6->bool]]]]\",\nvar_id: [6]\n}\n",
|
||||
"Class {\nname: \"C\",\nancestors: [\"{class: C, params: []}\", \"{class: B, params: [\\\"bool\\\"]}\", \"{class: A, params: [\\\"float\\\"]}\"],\nfields: [\"a\", \"b\", \"c\", \"d\", \"e\"],\nmethods: [(\"__init__\", \"fn[[], none]\"), (\"fun\", \"fn[[a=int32, b=var5], list[virtual[B[6->bool]]]]\"), (\"foo\", \"fn[[c=C], none]\")],\ntype_vars: []\n}\n",
|
||||
"Function {\nname: \"C.__init__\",\nsig: \"fn[[], none]\",\nvar_id: []\n}\n",
|
||||
]
|
||||
|
|
|
@ -4,11 +4,11 @@ expression: res_vec
|
|||
|
||||
---
|
||||
[
|
||||
"Function {\nname: \"foo\",\nsig: \"fn[[a=list[int32], b=tuple[var3, float]], A[3->B, 4->bool]]\",\nvar_id: []\n}\n",
|
||||
"Class {\nname: \"A\",\nancestors: [\"{class: A, params: [\\\"var3\\\", \\\"var4\\\"]}\"],\nfields: [\"a\", \"b\"],\nmethods: [(\"__init__\", \"fn[[v=var4], none]\"), (\"fun\", \"fn[[a=var3], var4]\")],\ntype_vars: [\"var3\", \"var4\"]\n}\n",
|
||||
"Function {\nname: \"A.__init__\",\nsig: \"fn[[v=var4], none]\",\nvar_id: [4]\n}\n",
|
||||
"Function {\nname: \"A.fun\",\nsig: \"fn[[a=var3], var4]\",\nvar_id: [4]\n}\n",
|
||||
"Function {\nname: \"gfun\",\nsig: \"fn[[a=A[3->list[float], 4->int32]], none]\",\nvar_id: []\n}\n",
|
||||
"Function {\nname: \"foo\",\nsig: \"fn[[a=list[int32], b=tuple[var5, float]], A[5->B, 6->bool]]\",\nvar_id: []\n}\n",
|
||||
"Class {\nname: \"A\",\nancestors: [\"{class: A, params: [\\\"var5\\\", \\\"var6\\\"]}\"],\nfields: [\"a\", \"b\"],\nmethods: [(\"__init__\", \"fn[[v=var6], none]\"), (\"fun\", \"fn[[a=var5], var6]\")],\ntype_vars: [\"var5\", \"var6\"]\n}\n",
|
||||
"Function {\nname: \"A.__init__\",\nsig: \"fn[[v=var6], none]\",\nvar_id: [6]\n}\n",
|
||||
"Function {\nname: \"A.fun\",\nsig: \"fn[[a=var5], var6]\",\nvar_id: [6]\n}\n",
|
||||
"Function {\nname: \"gfun\",\nsig: \"fn[[a=A[5->list[float], 6->int32]], none]\",\nvar_id: []\n}\n",
|
||||
"Class {\nname: \"B\",\nancestors: [\"{class: B, params: []}\"],\nfields: [],\nmethods: [(\"__init__\", \"fn[[], none]\")],\ntype_vars: []\n}\n",
|
||||
"Function {\nname: \"B.__init__\",\nsig: \"fn[[], none]\",\nvar_id: []\n}\n",
|
||||
]
|
||||
|
|
|
@ -4,11 +4,11 @@ expression: res_vec
|
|||
|
||||
---
|
||||
[
|
||||
"Class {\nname: \"A\",\nancestors: [\"{class: A, params: [\\\"var3\\\", \\\"var4\\\"]}\"],\nfields: [\"a\", \"b\"],\nmethods: [(\"__init__\", \"fn[[a=A[3->float, 4->bool], b=B], none]\"), (\"fun\", \"fn[[a=A[3->float, 4->bool]], A[3->bool, 4->int32]]\")],\ntype_vars: [\"var3\", \"var4\"]\n}\n",
|
||||
"Function {\nname: \"A.__init__\",\nsig: \"fn[[a=A[3->float, 4->bool], b=B], none]\",\nvar_id: [4]\n}\n",
|
||||
"Function {\nname: \"A.fun\",\nsig: \"fn[[a=A[3->float, 4->bool]], A[3->bool, 4->int32]]\",\nvar_id: [4]\n}\n",
|
||||
"Class {\nname: \"B\",\nancestors: [\"{class: B, params: []}\", \"{class: A, params: [\\\"int64\\\", \\\"bool\\\"]}\"],\nfields: [\"a\", \"b\"],\nmethods: [(\"__init__\", \"fn[[], none]\"), (\"fun\", \"fn[[a=A[3->float, 4->bool]], A[3->bool, 4->int32]]\"), (\"foo\", \"fn[[b=B], B]\"), (\"bar\", \"fn[[a=A[3->list[B], 4->int32]], tuple[A[3->virtual[A[3->B, 4->int32]], 4->bool], B]]\")],\ntype_vars: []\n}\n",
|
||||
"Class {\nname: \"A\",\nancestors: [\"{class: A, params: [\\\"var5\\\", \\\"var6\\\"]}\"],\nfields: [\"a\", \"b\"],\nmethods: [(\"__init__\", \"fn[[a=A[5->float, 6->bool], b=B], none]\"), (\"fun\", \"fn[[a=A[5->float, 6->bool]], A[5->bool, 6->int32]]\")],\ntype_vars: [\"var5\", \"var6\"]\n}\n",
|
||||
"Function {\nname: \"A.__init__\",\nsig: \"fn[[a=A[5->float, 6->bool], b=B], none]\",\nvar_id: [6]\n}\n",
|
||||
"Function {\nname: \"A.fun\",\nsig: \"fn[[a=A[5->float, 6->bool]], A[5->bool, 6->int32]]\",\nvar_id: [6]\n}\n",
|
||||
"Class {\nname: \"B\",\nancestors: [\"{class: B, params: []}\", \"{class: A, params: [\\\"int64\\\", \\\"bool\\\"]}\"],\nfields: [\"a\", \"b\"],\nmethods: [(\"__init__\", \"fn[[], none]\"), (\"fun\", \"fn[[a=A[5->float, 6->bool]], A[5->bool, 6->int32]]\"), (\"foo\", \"fn[[b=B], B]\"), (\"bar\", \"fn[[a=A[5->list[B], 6->int32]], tuple[A[5->virtual[A[5->B, 6->int32]], 6->bool], B]]\")],\ntype_vars: []\n}\n",
|
||||
"Function {\nname: \"B.__init__\",\nsig: \"fn[[], none]\",\nvar_id: []\n}\n",
|
||||
"Function {\nname: \"B.foo\",\nsig: \"fn[[b=B], B]\",\nvar_id: []\n}\n",
|
||||
"Function {\nname: \"B.bar\",\nsig: \"fn[[a=A[3->list[B], 4->int32]], tuple[A[3->virtual[A[3->B, 4->int32]], 4->bool], B]]\",\nvar_id: []\n}\n",
|
||||
"Function {\nname: \"B.bar\",\nsig: \"fn[[a=A[5->list[B], 6->int32]], tuple[A[5->virtual[A[5->B, 6->int32]], 6->bool], B]]\",\nvar_id: []\n}\n",
|
||||
]
|
||||
|
|
|
@ -4,15 +4,15 @@ expression: res_vec
|
|||
|
||||
---
|
||||
[
|
||||
"Class {\nname: \"A\",\nancestors: [\"{class: A, params: []}\"],\nfields: [\"a\"],\nmethods: [(\"__init__\", \"fn[[], none]\"), (\"fun\", \"fn[[b=B], none]\"), (\"foo\", \"fn[[a=var3, b=var4], none]\")],\ntype_vars: []\n}\n",
|
||||
"Class {\nname: \"A\",\nancestors: [\"{class: A, params: []}\"],\nfields: [\"a\"],\nmethods: [(\"__init__\", \"fn[[], none]\"), (\"fun\", \"fn[[b=B], none]\"), (\"foo\", \"fn[[a=var5, b=var6], none]\")],\ntype_vars: []\n}\n",
|
||||
"Function {\nname: \"A.__init__\",\nsig: \"fn[[], none]\",\nvar_id: []\n}\n",
|
||||
"Function {\nname: \"A.fun\",\nsig: \"fn[[b=B], none]\",\nvar_id: []\n}\n",
|
||||
"Function {\nname: \"A.foo\",\nsig: \"fn[[a=var3, b=var4], none]\",\nvar_id: [4]\n}\n",
|
||||
"Class {\nname: \"B\",\nancestors: [\"{class: B, params: []}\", \"{class: C, params: []}\", \"{class: A, params: []}\"],\nfields: [\"a\"],\nmethods: [(\"__init__\", \"fn[[], none]\"), (\"fun\", \"fn[[b=B], none]\"), (\"foo\", \"fn[[a=var3, b=var4], none]\")],\ntype_vars: []\n}\n",
|
||||
"Function {\nname: \"A.foo\",\nsig: \"fn[[a=var5, b=var6], none]\",\nvar_id: [6]\n}\n",
|
||||
"Class {\nname: \"B\",\nancestors: [\"{class: B, params: []}\", \"{class: C, params: []}\", \"{class: A, params: []}\"],\nfields: [\"a\"],\nmethods: [(\"__init__\", \"fn[[], none]\"), (\"fun\", \"fn[[b=B], none]\"), (\"foo\", \"fn[[a=var5, b=var6], none]\")],\ntype_vars: []\n}\n",
|
||||
"Function {\nname: \"B.__init__\",\nsig: \"fn[[], none]\",\nvar_id: []\n}\n",
|
||||
"Class {\nname: \"C\",\nancestors: [\"{class: C, params: []}\", \"{class: A, params: []}\"],\nfields: [\"a\"],\nmethods: [(\"__init__\", \"fn[[], none]\"), (\"fun\", \"fn[[b=B], none]\"), (\"foo\", \"fn[[a=var3, b=var4], none]\")],\ntype_vars: []\n}\n",
|
||||
"Class {\nname: \"C\",\nancestors: [\"{class: C, params: []}\", \"{class: A, params: []}\"],\nfields: [\"a\"],\nmethods: [(\"__init__\", \"fn[[], none]\"), (\"fun\", \"fn[[b=B], none]\"), (\"foo\", \"fn[[a=var5, b=var6], none]\")],\ntype_vars: []\n}\n",
|
||||
"Function {\nname: \"C.__init__\",\nsig: \"fn[[], none]\",\nvar_id: []\n}\n",
|
||||
"Function {\nname: \"C.fun\",\nsig: \"fn[[b=B], none]\",\nvar_id: []\n}\n",
|
||||
"Function {\nname: \"foo\",\nsig: \"fn[[a=A], none]\",\nvar_id: []\n}\n",
|
||||
"Function {\nname: \"ff\",\nsig: \"fn[[a=var3], var4]\",\nvar_id: [4]\n}\n",
|
||||
"Function {\nname: \"ff\",\nsig: \"fn[[a=var5], var6]\",\nvar_id: [6]\n}\n",
|
||||
]
|
||||
|
|
|
@ -162,7 +162,7 @@ fn test_simple_function_analyze(source: Vec<&str>, tys: Vec<&str>, names: Vec<&s
|
|||
|
||||
composer.start_analysis(true).unwrap();
|
||||
|
||||
for (i, (def, _)) in composer.definition_ast_list.iter().skip(composer.built_in_num).enumerate()
|
||||
for (i, (def, _)) in composer.definition_ast_list.iter().skip(composer.builtin_num).enumerate()
|
||||
{
|
||||
let def = &*def.read();
|
||||
if let TopLevelDef::Function { signature, name, .. } = def {
|
||||
|
@ -530,7 +530,7 @@ fn test_analyze(source: Vec<&str>, res: Vec<&str>) {
|
|||
} else {
|
||||
// skip 5 to skip primitives
|
||||
let mut res_vec: Vec<String> = Vec::new();
|
||||
for (def, _) in composer.definition_ast_list.iter().skip(composer.built_in_num) {
|
||||
for (def, _) in composer.definition_ast_list.iter().skip(composer.builtin_num) {
|
||||
let def = &*def.read();
|
||||
res_vec.push(format!("{}\n", def.to_string(composer.unifier.borrow_mut())));
|
||||
}
|
||||
|
@ -715,7 +715,7 @@ fn test_inference(source: Vec<&str>, res: Vec<&str>) {
|
|||
// skip 5 to skip primitives
|
||||
let mut stringify_folder = TypeToStringFolder { unifier: &mut composer.unifier };
|
||||
for (_i, (def, _)) in
|
||||
composer.definition_ast_list.iter().skip(composer.built_in_num).enumerate()
|
||||
composer.definition_ast_list.iter().skip(composer.builtin_num).enumerate()
|
||||
{
|
||||
let def = &*def.read();
|
||||
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
use std::cell::RefCell;
|
||||
|
||||
use crate::typecheck::typedef::TypeVarMeta;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
|
@ -49,64 +48,131 @@ pub fn parse_ast_to_type_annotation_kinds<T>(
|
|||
primitives: &PrimitiveStore,
|
||||
expr: &ast::Expr<T>,
|
||||
// the key stores the type_var of this topleveldef::class, we only need this field here
|
||||
mut locked: HashMap<DefinitionId, Vec<Type>>,
|
||||
locked: HashMap<DefinitionId, Vec<Type>>,
|
||||
) -> Result<TypeAnnotation, String> {
|
||||
match &expr.node {
|
||||
ast::ExprKind::Name { id, .. } => {
|
||||
if id == &"int32".into() {
|
||||
Ok(TypeAnnotation::Primitive(primitives.int32))
|
||||
} else if id == &"int64".into() {
|
||||
Ok(TypeAnnotation::Primitive(primitives.int64))
|
||||
} else if id == &"float".into() {
|
||||
Ok(TypeAnnotation::Primitive(primitives.float))
|
||||
} else if id == &"bool".into() {
|
||||
Ok(TypeAnnotation::Primitive(primitives.bool))
|
||||
} else if id == &"None".into() {
|
||||
Ok(TypeAnnotation::Primitive(primitives.none))
|
||||
} else if id == &"str".into() {
|
||||
Ok(TypeAnnotation::Primitive(primitives.str))
|
||||
} else if let Some(obj_id) = resolver.get_identifier_def(*id) {
|
||||
let type_vars = {
|
||||
let def_read = top_level_defs[obj_id.0].try_read();
|
||||
if let Some(def_read) = def_read {
|
||||
if let TopLevelDef::Class { type_vars, .. } = &*def_read {
|
||||
type_vars.clone()
|
||||
} else {
|
||||
return Err(format!(
|
||||
"function cannot be used as a type (at {})",
|
||||
expr.location
|
||||
));
|
||||
}
|
||||
let name_handle = |id: &StrRef, unifier: &mut Unifier, locked: HashMap<DefinitionId, Vec<Type>>| {
|
||||
if id == &"int32".into() {
|
||||
Ok(TypeAnnotation::Primitive(primitives.int32))
|
||||
} else if id == &"int64".into() {
|
||||
Ok(TypeAnnotation::Primitive(primitives.int64))
|
||||
} else if id == &"float".into() {
|
||||
Ok(TypeAnnotation::Primitive(primitives.float))
|
||||
} else if id == &"bool".into() {
|
||||
Ok(TypeAnnotation::Primitive(primitives.bool))
|
||||
} else if id == &"None".into() {
|
||||
Ok(TypeAnnotation::Primitive(primitives.none))
|
||||
} else if id == &"str".into() {
|
||||
Ok(TypeAnnotation::Primitive(primitives.str))
|
||||
} else if let Some(obj_id) = resolver.get_identifier_def(*id) {
|
||||
let type_vars = {
|
||||
let def_read = top_level_defs[obj_id.0].try_read();
|
||||
if let Some(def_read) = def_read {
|
||||
if let TopLevelDef::Class { type_vars, .. } = &*def_read {
|
||||
type_vars.clone()
|
||||
} else {
|
||||
locked.get(&obj_id).unwrap().clone()
|
||||
return Err(format!(
|
||||
"function cannot be used as a type (at {})",
|
||||
expr.location
|
||||
));
|
||||
}
|
||||
};
|
||||
// check param number here
|
||||
if !type_vars.is_empty() {
|
||||
return Err(format!(
|
||||
"expect {} type variable parameter but got 0 (at {})",
|
||||
type_vars.len(),
|
||||
expr.location,
|
||||
));
|
||||
}
|
||||
Ok(TypeAnnotation::CustomClass { id: obj_id, params: vec![] })
|
||||
} else if let Some(ty) = resolver.get_symbol_type(unifier, top_level_defs, primitives, *id) {
|
||||
if let TypeEnum::TVar { .. } = unifier.get_ty(ty).as_ref() {
|
||||
Ok(TypeAnnotation::TypeVar(ty))
|
||||
} else {
|
||||
Err(format!(
|
||||
"not a type variable identifier at {}",
|
||||
expr.location
|
||||
))
|
||||
locked.get(&obj_id).unwrap().clone()
|
||||
}
|
||||
};
|
||||
// check param number here
|
||||
if !type_vars.is_empty() {
|
||||
return Err(format!(
|
||||
"expect {} type variable parameter but got 0 (at {})",
|
||||
type_vars.len(),
|
||||
expr.location,
|
||||
));
|
||||
}
|
||||
Ok(TypeAnnotation::CustomClass { id: obj_id, params: vec![] })
|
||||
} else if let Some(ty) = resolver.get_symbol_type(unifier, top_level_defs, primitives, *id) {
|
||||
if let TypeEnum::TVar { .. } = unifier.get_ty(ty).as_ref() {
|
||||
Ok(TypeAnnotation::TypeVar(ty))
|
||||
} else {
|
||||
Err(format!(
|
||||
"name cannot be parsed as a type annotation at {}",
|
||||
"not a type variable identifier at {}",
|
||||
expr.location
|
||||
))
|
||||
}
|
||||
} else {
|
||||
Err(format!("name cannot be parsed as a type annotation at {}", expr.location))
|
||||
}
|
||||
};
|
||||
|
||||
let class_name_handle =
|
||||
|id: &StrRef, slice: &ast::Expr<T>, unifier: &mut Unifier, mut locked: HashMap<DefinitionId, Vec<Type>>| {
|
||||
if vec!["virtual".into(), "Generic".into(), "list".into(), "tuple".into()]
|
||||
.contains(id)
|
||||
{
|
||||
return Err(format!("keywords cannot be class name (at {})", expr.location));
|
||||
}
|
||||
let obj_id = resolver
|
||||
.get_identifier_def(*id)
|
||||
.ok_or_else(|| "unknown class name".to_string())?;
|
||||
let type_vars = {
|
||||
let def_read = top_level_defs[obj_id.0].try_read();
|
||||
if let Some(def_read) = def_read {
|
||||
if let TopLevelDef::Class { type_vars, .. } = &*def_read {
|
||||
type_vars.clone()
|
||||
} else {
|
||||
unreachable!("must be class here")
|
||||
}
|
||||
} else {
|
||||
locked.get(&obj_id).unwrap().clone()
|
||||
}
|
||||
};
|
||||
// we do not check whether the application of type variables are compatible here
|
||||
let param_type_infos = {
|
||||
let params_ast = if let ast::ExprKind::Tuple { elts, .. } = &slice.node {
|
||||
elts.iter().collect_vec()
|
||||
} else {
|
||||
vec![slice]
|
||||
};
|
||||
if type_vars.len() != params_ast.len() {
|
||||
return Err(format!(
|
||||
"expect {} type parameters but got {} (at {})",
|
||||
type_vars.len(),
|
||||
params_ast.len(),
|
||||
params_ast[0].location,
|
||||
));
|
||||
}
|
||||
let result = params_ast
|
||||
.iter()
|
||||
.map(|x| {
|
||||
parse_ast_to_type_annotation_kinds(
|
||||
resolver,
|
||||
top_level_defs,
|
||||
unifier,
|
||||
primitives,
|
||||
x,
|
||||
{
|
||||
locked.insert(obj_id, type_vars.clone());
|
||||
locked.clone()
|
||||
},
|
||||
)
|
||||
})
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
// make sure the result do not contain any type vars
|
||||
let no_type_var = result
|
||||
.iter()
|
||||
.all(|x| get_type_var_contained_in_type_annotation(x).is_empty());
|
||||
if no_type_var {
|
||||
result
|
||||
} else {
|
||||
return Err(format!(
|
||||
"application of type vars to generic class \
|
||||
is not currently supported (at {})",
|
||||
params_ast[0].location
|
||||
));
|
||||
}
|
||||
};
|
||||
Ok(TypeAnnotation::CustomClass { id: obj_id, params: param_type_infos })
|
||||
};
|
||||
match &expr.node {
|
||||
ast::ExprKind::Name { id, .. } => name_handle(id, unifier, locked),
|
||||
// virtual
|
||||
ast::ExprKind::Subscript { value, slice, .. }
|
||||
if {
|
||||
|
@ -176,74 +242,7 @@ pub fn parse_ast_to_type_annotation_kinds<T>(
|
|||
// custom class
|
||||
ast::ExprKind::Subscript { value, slice, .. } => {
|
||||
if let ast::ExprKind::Name { id, .. } = &value.node {
|
||||
if vec!["virtual".into(), "Generic".into(), "list".into(), "tuple".into()]
|
||||
.contains(id)
|
||||
{
|
||||
return Err(format!("keywords cannot be class name (at {})", value.location));
|
||||
}
|
||||
let obj_id = resolver
|
||||
.get_identifier_def(*id)
|
||||
.ok_or_else(|| "unknown class name".to_string())?;
|
||||
let type_vars = {
|
||||
let def_read = top_level_defs[obj_id.0].try_read();
|
||||
if let Some(def_read) = def_read {
|
||||
if let TopLevelDef::Class { type_vars, .. } = &*def_read {
|
||||
type_vars.clone()
|
||||
} else {
|
||||
unreachable!("must be class here")
|
||||
}
|
||||
} else {
|
||||
locked.get(&obj_id).unwrap().clone()
|
||||
}
|
||||
};
|
||||
// we do not check whether the application of type variables are compatible here
|
||||
let param_type_infos = {
|
||||
let params_ast = if let ast::ExprKind::Tuple { elts, .. } = &slice.node {
|
||||
elts.iter().collect_vec()
|
||||
} else {
|
||||
vec![slice.as_ref()]
|
||||
};
|
||||
if type_vars.len() != params_ast.len() {
|
||||
return Err(format!(
|
||||
"expect {} type parameters but got {} (at {})",
|
||||
type_vars.len(),
|
||||
params_ast.len(),
|
||||
params_ast[0].location
|
||||
));
|
||||
}
|
||||
let result = params_ast
|
||||
.iter()
|
||||
.map(|x| {
|
||||
parse_ast_to_type_annotation_kinds(
|
||||
resolver,
|
||||
top_level_defs,
|
||||
unifier,
|
||||
primitives,
|
||||
x,
|
||||
{
|
||||
locked.insert(obj_id, type_vars.clone());
|
||||
locked.clone()
|
||||
},
|
||||
)
|
||||
})
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
|
||||
// make sure the result do not contain any type vars
|
||||
let no_type_var = result
|
||||
.iter()
|
||||
.all(|x| get_type_var_contained_in_type_annotation(x).is_empty());
|
||||
if no_type_var {
|
||||
result
|
||||
} else {
|
||||
return Err(format!(
|
||||
"application of type vars to generic class \
|
||||
is not currently supported (at {})",
|
||||
params_ast[0].location
|
||||
));
|
||||
}
|
||||
};
|
||||
|
||||
Ok(TypeAnnotation::CustomClass { id: obj_id, params: param_type_infos })
|
||||
class_name_handle(id, slice, unifier, locked)
|
||||
} else {
|
||||
Err(format!("unsupported expression type for class name at {}", value.location))
|
||||
}
|
||||
|
@ -386,13 +385,7 @@ pub fn get_type_from_type_annotation_kinds(
|
|||
/// But note that here we do not make a duplication of `T`, `V`, we direclty
|
||||
/// use them as they are in the TopLevelDef::Class since those in the
|
||||
/// TopLevelDef::Class.type_vars will be substitute later when seeing applications/instantiations
|
||||
/// the Type of their fields and methods will also be subst when application/instantiation \
|
||||
/// \
|
||||
/// Note this implicit self type is different with seeing `A[T, V]` explicitly outside
|
||||
/// the class def ast body, where it is a new instantiation of the generic class `A`,
|
||||
/// but equivalent to seeing `A[T, V]` inside the class def body ast, where although we
|
||||
/// create copies of `T` and `V`, we will find them out as occured type vars in the analyze_class()
|
||||
/// and unify them with the class generic `T`, `V`
|
||||
/// the Type of their fields and methods will also be subst when application/instantiation
|
||||
pub fn make_self_type_annotation(type_vars: &[Type], object_id: DefinitionId) -> TypeAnnotation {
|
||||
TypeAnnotation::CustomClass {
|
||||
id: object_id,
|
||||
|
|
|
@ -167,7 +167,6 @@ impl<'a> Inferencer<'a> {
|
|||
}
|
||||
ExprKind::Constant { .. } => {}
|
||||
_ => {
|
||||
println!("{:?}", expr.node);
|
||||
unimplemented!()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -122,9 +122,36 @@ impl<'a> fold::Fold<()> for Inferencer<'a> {
|
|||
},
|
||||
}
|
||||
}
|
||||
ast::StmtKind::For { ref target, .. } => {
|
||||
self.infer_pattern(target)?;
|
||||
fold::fold_stmt(self, node)?
|
||||
ast::StmtKind::For { target, iter, body, orelse, config_comment, type_comment } => {
|
||||
self.infer_pattern(&target)?;
|
||||
let target = self.fold_expr(*target)?;
|
||||
let iter = self.fold_expr(*iter)?;
|
||||
if self.unifier.unioned(iter.custom.unwrap(), self.primitives.range) {
|
||||
self.unify(self.primitives.int32, target.custom.unwrap(), &target.location)?;
|
||||
} else {
|
||||
let list = self.unifier.add_ty(TypeEnum::TList { ty: target.custom.unwrap() });
|
||||
self.unify(list, iter.custom.unwrap(), &iter.location)?;
|
||||
}
|
||||
let body = body
|
||||
.into_iter()
|
||||
.map(|b| self.fold_stmt(b))
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
let orelse = orelse
|
||||
.into_iter()
|
||||
.map(|o| self.fold_stmt(o))
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
Located {
|
||||
location: node.location,
|
||||
node: ast::StmtKind::For {
|
||||
target: Box::new(target),
|
||||
iter: Box::new(iter),
|
||||
body,
|
||||
orelse,
|
||||
config_comment,
|
||||
type_comment,
|
||||
},
|
||||
custom: None
|
||||
}
|
||||
}
|
||||
ast::StmtKind::Assign { ref mut targets, ref config_comment, .. } => {
|
||||
for target in targets.iter_mut() {
|
||||
|
@ -201,14 +228,7 @@ impl<'a> fold::Fold<()> for Inferencer<'a> {
|
|||
_ => fold::fold_stmt(self, node)?,
|
||||
};
|
||||
match &stmt.node {
|
||||
ast::StmtKind::For { target, iter, .. } => {
|
||||
if self.unifier.unioned(iter.custom.unwrap(), self.primitives.range) {
|
||||
self.unify(self.primitives.int32, target.custom.unwrap(), &target.location)?;
|
||||
} else {
|
||||
let list = self.unifier.add_ty(TypeEnum::TList { ty: target.custom.unwrap() });
|
||||
self.unify(list, iter.custom.unwrap(), &iter.location)?;
|
||||
}
|
||||
}
|
||||
ast::StmtKind::For { .. } => {}
|
||||
ast::StmtKind::If { test, .. } | ast::StmtKind::While { test, .. } => {
|
||||
self.unify(test.custom.unwrap(), self.primitives.bool, &test.location)?;
|
||||
}
|
||||
|
|
|
@ -855,7 +855,6 @@ impl Unifier {
|
|||
}
|
||||
}
|
||||
_ => {
|
||||
println!("{}", ty.get_type_name());
|
||||
unreachable!("{} not expected", ty.get_type_name())
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
use std::iter::FromIterator;
|
||||
|
||||
use crate::ast;
|
||||
use crate::ast::{self, Constant};
|
||||
use crate::fstring::parse_located_fstring;
|
||||
use crate::function::{ArgumentList, parse_args, parse_params};
|
||||
use crate::error::LexicalError;
|
||||
|
@ -916,7 +916,17 @@ Factor: ast::Expr = {
|
|||
<location:@L> <op:UnaryOp> <e:Factor> => ast::Expr {
|
||||
location,
|
||||
custom: (),
|
||||
node: ast::ExprKind::UnaryOp { operand: Box::new(e), op }
|
||||
node: {
|
||||
match (&op, &e.node) {
|
||||
(ast::Unaryop::USub, ast::ExprKind::Constant { value: Constant::Int(val), kind }) => {
|
||||
ast::ExprKind::Constant {
|
||||
value: Constant::Int(-val),
|
||||
kind: kind.clone()
|
||||
}
|
||||
}
|
||||
_ => ast::ExprKind::UnaryOp { operand: Box::new(e), op }
|
||||
}
|
||||
}
|
||||
},
|
||||
Power,
|
||||
};
|
||||
|
|
|
@ -5,7 +5,12 @@ authors = ["M-Labs"]
|
|||
edition = "2018"
|
||||
|
||||
[dependencies]
|
||||
inkwell = { git = "https://github.com/TheDan64/inkwell", branch = "master", features = ["llvm12-0"] }
|
||||
parking_lot = "0.11.1"
|
||||
nac3parser = { path = "../nac3parser" }
|
||||
nac3core = { path = "../nac3core" }
|
||||
|
||||
[dependencies.inkwell]
|
||||
git = "https://github.com/TheDan64/inkwell"
|
||||
branch = "master"
|
||||
default-features = false
|
||||
features = ["llvm12-0", "target-x86", "target-arm", "target-riscv", "no-libffi-linking"]
|
||||
|
|
|
@ -1,5 +1,9 @@
|
|||
@extern
|
||||
def output_int(x: int32):
|
||||
def output_int32(x: int32):
|
||||
...
|
||||
|
||||
@extern
|
||||
def output_int64(x: int64):
|
||||
...
|
||||
|
||||
|
||||
|
@ -27,10 +31,10 @@ class B:
|
|||
|
||||
def run() -> int32:
|
||||
a = A(10)
|
||||
output_int(a.a)
|
||||
output_int32(a.a)
|
||||
|
||||
a = A(20)
|
||||
output_int(a.a)
|
||||
output_int(a.get_a())
|
||||
output_int(a.get_b().b)
|
||||
output_int32(a.a)
|
||||
output_int32(a.get_a())
|
||||
output_int32(a.get_b().b)
|
||||
return 0
|
||||
|
|
|
@ -1,10 +1,14 @@
|
|||
#include <stdio.h>
|
||||
#include <string.h>
|
||||
|
||||
void output_int(int x) {
|
||||
void output_int32(int x) {
|
||||
printf("%d\n", x);
|
||||
}
|
||||
|
||||
void output_int64(long x) {
|
||||
printf("%ld\n", x);
|
||||
}
|
||||
|
||||
void output_asciiart(int x) {
|
||||
static char chars[] = " .,-:;i+hHM$*#@ ";
|
||||
if(x < 0) {
|
||||
|
|
|
@ -3,20 +3,22 @@ use inkwell::{
|
|||
targets::*,
|
||||
OptimizationLevel,
|
||||
};
|
||||
use nac3core::typecheck::type_inferencer::PrimitiveStore;
|
||||
use nac3parser::{ast::{Expr, ExprKind, StmtKind}, parser};
|
||||
use std::{borrow::Borrow, env};
|
||||
use std::fs;
|
||||
use std::{collections::HashMap, path::Path, sync::Arc, time::SystemTime};
|
||||
use std::{borrow::Borrow, collections::HashMap, env, fs, path::Path, sync::Arc, time::SystemTime};
|
||||
use parking_lot::RwLock;
|
||||
|
||||
use nac3parser::{ast::{Expr, ExprKind, StmtKind}, parser};
|
||||
use nac3core::{
|
||||
codegen::{
|
||||
concrete_type::ConcreteTypeStore, CodeGenTask, DefaultCodeGenerator, WithCall,
|
||||
WorkerRegistry,
|
||||
},
|
||||
symbol_resolver::SymbolResolver,
|
||||
toplevel::{composer::TopLevelComposer, TopLevelDef, helper::parse_parameter_default_value},
|
||||
typecheck::typedef::FunSignature,
|
||||
toplevel::{
|
||||
composer::TopLevelComposer,
|
||||
TopLevelDef, helper::parse_parameter_default_value,
|
||||
type_annotation::*,
|
||||
},
|
||||
typecheck::{type_inferencer::PrimitiveStore, typedef::{Type, Unifier, FunSignature}}
|
||||
};
|
||||
|
||||
mod basic_symbol_resolver;
|
||||
|
@ -42,7 +44,10 @@ fn main() {
|
|||
};
|
||||
|
||||
let primitive: PrimitiveStore = TopLevelComposer::make_primitives().0;
|
||||
let (mut composer, builtins_def, builtins_ty) = TopLevelComposer::new(vec![]);
|
||||
let (mut composer, builtins_def, builtins_ty) = TopLevelComposer::new(
|
||||
vec![],
|
||||
Default::default()
|
||||
);
|
||||
|
||||
let internal_resolver: Arc<ResolverInternal> = ResolverInternal {
|
||||
id_to_type: builtins_ty.into(),
|
||||
|
@ -68,25 +73,84 @@ fn main() {
|
|||
|
||||
for stmt in parser_result.into_iter() {
|
||||
if let StmtKind::Assign { targets, value, .. } = &stmt.node {
|
||||
fn handle_typevar_definition(
|
||||
var: &Expr,
|
||||
resolver: &(dyn SymbolResolver + Send + Sync),
|
||||
def_list: &[Arc<RwLock<TopLevelDef>>],
|
||||
unifier: &mut Unifier,
|
||||
primitives: &PrimitiveStore,
|
||||
) -> Result<Type, String> {
|
||||
if let ExprKind::Call { func, args, .. } = &var.node {
|
||||
if matches!(&func.node, ExprKind::Name { id, .. } if id == &"TypeVar".into()) {
|
||||
let constraints = args
|
||||
.iter()
|
||||
.skip(1)
|
||||
.map(|x| -> Result<Type, String> {
|
||||
let ty = parse_ast_to_type_annotation_kinds(
|
||||
resolver,
|
||||
def_list,
|
||||
unifier,
|
||||
primitives,
|
||||
x,
|
||||
Default::default(),
|
||||
)?;
|
||||
get_type_from_type_annotation_kinds(def_list, unifier, primitives, &ty)
|
||||
})
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
Ok(unifier.get_fresh_var_with_range(&constraints).0)
|
||||
} else {
|
||||
Err(format!("expression {:?} cannot be handled as a TypeVar in global scope", var))
|
||||
}
|
||||
} else {
|
||||
Err(format!("expression {:?} cannot be handled as a TypeVar in global scope", var))
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_assignment_pattern(
|
||||
targets: &[Expr],
|
||||
value: &Expr,
|
||||
resolver: &(dyn SymbolResolver + Send + Sync),
|
||||
internal_resolver: &ResolverInternal,
|
||||
def_list: &[Arc<RwLock<TopLevelDef>>],
|
||||
unifier: &mut Unifier,
|
||||
primitives: &PrimitiveStore,
|
||||
) -> Result<(), String> {
|
||||
if targets.len() == 1 {
|
||||
match &targets[0].node {
|
||||
ExprKind::Name { id, .. } => {
|
||||
let val = parse_parameter_default_value(value.borrow(), resolver)?;
|
||||
internal_resolver.add_module_global(*id, val);
|
||||
Ok(())
|
||||
if let Ok(var) = handle_typevar_definition(
|
||||
value.borrow(),
|
||||
resolver,
|
||||
def_list,
|
||||
unifier,
|
||||
primitives,
|
||||
) {
|
||||
internal_resolver.add_id_type(*id, var);
|
||||
Ok(())
|
||||
} else if let Ok(val) = parse_parameter_default_value(value.borrow(), resolver) {
|
||||
internal_resolver.add_module_global(*id, val);
|
||||
Ok(())
|
||||
} else {
|
||||
Err(format!("fails to evaluate this expression `{:?}` as a constant or TypeVar at {}",
|
||||
targets[0].node,
|
||||
targets[0].location,
|
||||
))
|
||||
}
|
||||
}
|
||||
ExprKind::List { elts, .. }
|
||||
| ExprKind::Tuple { elts, .. } => {
|
||||
handle_assignment_pattern(elts, value, resolver, internal_resolver)?;
|
||||
handle_assignment_pattern(
|
||||
elts,
|
||||
value,
|
||||
resolver,
|
||||
internal_resolver,
|
||||
def_list,
|
||||
unifier,
|
||||
primitives
|
||||
)?;
|
||||
Ok(())
|
||||
}
|
||||
_ => unreachable!("cannot be assigned")
|
||||
_ => Err(format!("assignment to {:?} is not supported at {}", targets[0], targets[0].location))
|
||||
}
|
||||
} else {
|
||||
match &value.node {
|
||||
|
@ -105,7 +169,10 @@ fn main() {
|
|||
std::slice::from_ref(tar),
|
||||
val,
|
||||
resolver,
|
||||
internal_resolver
|
||||
internal_resolver,
|
||||
def_list,
|
||||
unifier,
|
||||
primitives
|
||||
)?;
|
||||
}
|
||||
Ok(())
|
||||
|
@ -115,7 +182,19 @@ fn main() {
|
|||
}
|
||||
}
|
||||
}
|
||||
if let Err(err) = handle_assignment_pattern(targets, value, resolver.as_ref(), internal_resolver.as_ref()) {
|
||||
|
||||
let def_list = composer.extract_def_list();
|
||||
let unifier = &mut composer.unifier;
|
||||
let primitives = &composer.primitives_ty;
|
||||
if let Err(err) = handle_assignment_pattern(
|
||||
targets,
|
||||
value,
|
||||
resolver.as_ref(),
|
||||
internal_resolver.as_ref(),
|
||||
&def_list,
|
||||
unifier,
|
||||
primitives,
|
||||
) {
|
||||
eprintln!("{}", err);
|
||||
return;
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue