Compare commits

..

37 Commits
master ... wfvm

Author SHA1 Message Date
Astro 4419818631 artiq-full: fix vivado.nix path 2020-06-17 22:25:42 +02:00
Astro 4bfe5420d3 artiq-board: let artiq-full pass vivado 2020-06-17 22:21:02 +02:00
Astro b876be5ef9 artiq-board: move inputs outside inner function 2020-06-17 22:16:58 +02:00
Astro f5429f28ae add more tracing 2020-06-17 22:10:28 +02:00
Astro 6ba6edd6f2 Revert "artiq-full: disable other jobs"
This reverts commit dcad2654a3.
2020-06-17 22:09:30 +02:00
Astro 5496b3fa05 add trace 2020-06-17 22:08:33 +02:00
Astro c1490f2c68 add some tracing 2020-06-17 21:11:22 +02:00
Astro dcad2654a3 artiq-full: disable other jobs 2020-06-17 19:08:20 +02:00
Astro 59d6df933e point to nix-scripts.git wfvm branch 2020-06-17 16:58:53 +02:00
Astro f9d5fd0520 windows: disable kvm
testing under kvm...
2020-06-17 16:58:53 +02:00
Astro 2c0015979c point to my nix-scripts.git 2020-06-17 16:58:53 +02:00
adisbladis 6edecf8671 Add a sleep after install script
Sometimes files are not fully synced to disk despite the install
script exiting with success status.
2020-06-17 16:58:53 +02:00
adisbladis 1f0d703646 Remove redhat cert (leftover from WHQL) 2020-06-17 16:58:53 +02:00
adisbladis e320bd6181 Remove nuget 2020-06-17 16:58:53 +02:00
adisbladis 73e247e5b9 Dead code removal 2020-06-17 16:58:53 +02:00
adisbladis 28da250925 Add support for incremental install of packages 2020-06-17 16:58:53 +02:00
adisbladis 7b4cd0b944 Split Qemu parameters
So we can re-use params for incremental install
2020-06-17 16:58:53 +02:00
adisbladis 6bbbd41ece Remove anaconda bundle
This one will be installed in a separate step incrementally
2020-06-17 16:58:53 +02:00
adisbladis 3e7e6941b8 Add a baseRtc option so set the VM clock 2020-06-17 16:58:53 +02:00
adisbladis 9e6775fe8d Override p7zip to fix build with nixpkgs >20.03 2020-06-17 16:58:53 +02:00
adisbladis c147ddee56 Document pure/impure mode 2020-06-17 16:58:53 +02:00
adisbladis 34404ddf6b Switch impure mode to parameter 2020-06-17 16:58:53 +02:00
Sebastien Bourdeauducq 895359eade windows: remove outdated installation instructions 2020-06-17 16:58:53 +02:00
adisbladis 7e82318fd1 Remove declarative SSH keys
Windows changes the naming structure of homedir directories if it
encounters an already present homedir so this is not working as intended.
2020-06-17 16:58:53 +02:00
adisbladis 23e9666550 Fix conda base package
I'm not sure whether to include this in the default image so it's
uncommented for now
2020-06-17 16:58:53 +02:00
adisbladis b410bd6b35 windows: Dont use deprecated method of openssh server installation
Add OpenSSH cab file extracted from Windows FOD iso
2020-06-17 16:58:53 +02:00
adisbladis 51f93e5852 windows: Fix build termination 2020-06-17 16:58:53 +02:00
adisbladis 6b4e6548e5 windows: Fix stupid quoting issues in autounattend XML
The windows XML parser is shit and bails out extremely late, making
debugging this a 1+ hour ordeal for every iteration.

Let's just write out a powershell script and be done with it.
2020-06-17 16:58:53 +02:00
adisbladis d90a2716bd windows: Fix quoting issues in XML 2020-06-17 16:58:53 +02:00
adisbladis a1e5d01f80 windows: Dont run in impure mode 2020-06-17 16:58:53 +02:00
adisbladis 738ce24d9c windows: Adapt tests to new build infra 2020-06-17 16:58:53 +02:00
adisbladis 9f3e515c01 windows: Add Anaconda to base windows install 2020-06-17 16:58:53 +02:00
adisbladis fbc4530388 windows: Fix extra quote 2020-06-17 16:58:53 +02:00
adisbladis 70562bcdfd windows: Remove impureMode from autounattend
It was always a hack to circumvent pure setup not being completed.
2020-06-17 16:58:53 +02:00
adisbladis fc3b685167 windows: Remove virtiowin & autohotkey
Let's consider these attempts of automation failures since virtio-win
cannot be reasonably automated.
Windows WHQL checks for drivers are not possible to remove.
2020-06-17 16:58:53 +02:00
adisbladis 3d0375c218 windows: Fix SSH key setup 2020-06-17 16:58:53 +02:00
adisbladis a5d93aea35 windows: Add automated declarative windows install 2020-06-17 16:58:51 +02:00
159 changed files with 21043 additions and 4538 deletions

View File

@ -1,98 +0,0 @@
# Install Vivado in /opt and add to /etc/nixos/configuration.nix:
# nix.sandboxPaths = ["/opt"];
{ pkgs ? import <nixpkgs> {}
, artiq-fast
}:
let
artiqSrc = import (artiq-fast + "/pkgs/artiq-src.nix") { fetchgit = pkgs.fetchgit; };
artiqpkgs = import artiq-fast { inherit pkgs; };
pythonEnv = pkgs.python3.withPackages (ps: with ps; [
jinja2 jsonschema numpy artiqpkgs.migen artiqpkgs.microscope artiqpkgs.misoc artiqpkgs.jesd204b artiqpkgs.artiq
]);
fetchcargo = import (artiq-fast + "/fetchcargo-legacy.nix") {
inherit (pkgs) stdenv lib cacert git;
cargo = artiqpkgs.cargo-legacy;
cargo-vendor = artiqpkgs.cargo-vendor-legacy;
};
cargoDeps = fetchcargo rec {
name = "artiq-firmware-cargo-deps";
src = "${artiqSrc}/artiq/firmware";
sha256 = import (artiqSrc + "/artiq/firmware/cargosha256.nix");
};
cargoVendored = pkgs.stdenv.mkDerivation {
name = "artiq-firmware-cargo-vendored";
src = cargoDeps;
phases = [ "unpackPhase" "installPhase" ];
installPhase =
''
mkdir -p $out/registry
cat << EOF > $out/config
[source.crates-io]
registry = "https://github.com/rust-lang/crates.io-index"
replace-with = "vendored-sources"
[source."https://github.com/m-labs/libfringe"]
git = "https://github.com/m-labs/libfringe"
rev = "b8a6d8f"
replace-with = "vendored-sources"
[source.vendored-sources]
directory = "$out/registry"
EOF
cp -R * $out/registry
'';
};
in
{ target
, variant
, src ? null
, buildCommand ? "python -m artiq.gateware.targets.${target} -V ${variant}"
, extraInstallCommands ? ""
, ...
}:
let
name = "artiq-board-${target}-${variant}-${artiqpkgs.artiq.version}-xxx";
installPath = "${pkgs.python3Packages.python.sitePackages}/artiq/board-support/${target}-${variant}";
in
# Board packages are Python modules so that they get added to the ARTIQ Python
# environment, and artiq_flash finds them.
pkgs.stdenv.mkDerivation {
name = "artiq-board-${target}-${variant}-${artiqpkgs.artiq.version}";
inherit src;
phases = [ "buildPhase" "installPhase" ];
nativeBuildInputs = [
pkgs.gnumake pkgs.which pythonEnv
artiqpkgs.cargo-legacy
artiqpkgs.rustc-legacy
artiqpkgs.binutils-or1k
artiqpkgs.llvm-or1k
];
buildInputs = [ pythonEnv ];
buildPhase =
''
export CARGO_HOME=${cargoVendored}
export TARGET_AR=or1k-linux-ar
${buildCommand} --no-compile-gateware --gateware-identifier-str=unprogrammed
'';
installPhase =
''
mkdir -p $out
cp -ar artiq_${target}/${variant}/gateware $out
TARGET_DIR=$out/${pkgs.python3Packages.python.sitePackages}/artiq/board-support/${target}-${variant}
mkdir -p $TARGET_DIR
if [ -e artiq_${target}/${variant}/software/bootloader/bootloader.bin ]
then cp artiq_${target}/${variant}/software/bootloader/bootloader.bin $TARGET_DIR
fi
if [ -e artiq_${target}/${variant}/software/runtime ]
then cp artiq_${target}/${variant}/software/runtime/runtime.{elf,fbi} $TARGET_DIR
else cp artiq_${target}/${variant}/software/satman/satman.{elf,fbi} $TARGET_DIR
fi
${extraInstallCommands}
'';
# don't mangle ELF files as they are not for NixOS
dontFixup = true;
}

View File

@ -1,54 +0,0 @@
{ pkgs ? import <nixpkgs> {}
, artiq-fast ? <artiq-fast>
}:
let
sinaraSystemsSrc = <sinaraSystemsSrc>;
generatedNix = pkgs.runCommand "generated-nix" { buildInputs = [ pkgs.nix pkgs.git ]; }
''
mkdir $out
cp ${./artiq-board.nix} $out/artiq-board.nix
cp ${../artiq-full/artiq-targets.nix} $out/artiq-targets.nix
cp -a ${artiq-fast} $out/fast
REV=`git --git-dir ${sinaraSystemsSrc}/.git rev-parse HEAD`
echo -n $REV > $out/sinara-rev.txt
SINARA_SRC_CLEAN=`mktemp -d`
cp -a ${sinaraSystemsSrc}/. $SINARA_SRC_CLEAN
chmod -R 755 $SINARA_SRC_CLEAN/.git
chmod 755 $SINARA_SRC_CLEAN
rm -rf $SINARA_SRC_CLEAN/.git
HASH=`nix-hash --type sha256 --base32 $SINARA_SRC_CLEAN`
echo -n $HASH > $out/sinara-hash.txt
cat > $out/default.nix << EOF
{ pkgs ? import <nixpkgs> {}
}:
let
sinaraSystemsSrc = pkgs.fetchgit {
url = "https://git.m-labs.hk/M-Labs/sinara-systems-legacy.git";
rev = "$REV";
sha256 = "$HASH";
};
artiq-fast = import ./fast { inherit pkgs; };
artiq-board = import ./artiq-board.nix {
inherit pkgs;
artiq-fast = ./fast;
};
in
builtins.mapAttrs (_: conf: pkgs.lib.hydraJob (artiq-board conf)) (
import ./artiq-targets.nix {
inherit pkgs sinaraSystemsSrc;
artiqVersion = artiq-fast.artiq.version;
}
)
EOF
'';
artiq-board-generated = import generatedNix {
inherit pkgs;
};
in
artiq-board-generated // {
generated-nix = pkgs.lib.hydraJob generatedNix;
}

View File

@ -1,5 +1,5 @@
let
pkgs = import <nixpkgs> { overlays = [ (import ./artiq-fast/mozilla-overlay.nix) ]; };
pkgs = import <nixpkgs> {};
artiqSrc = <artiqSrc>;
generatedNix = pkgs.runCommand "generated-nix" { buildInputs = [ pkgs.nix pkgs.git ]; }
# keep in sync with artiq-fast/pkgs/artiq-version.nix
@ -23,7 +23,7 @@ let
cat > $out/pkgs/artiq-src.nix << EOF
{ fetchgit }:
fetchgit {
url = "https://github.com/m-labs/artiq.git";
url = "git://github.com/m-labs/artiq.git";
rev = "$REV";
sha256 = "$HASH";
}
@ -31,11 +31,20 @@ let
echo "{ stdenv, git, fetchgit }: \"$MAJOR_VERSION.$COMMIT_COUNT.`cut -c1-8 <<< $REV`$SUFFIX\"" > $out/pkgs/artiq-version.nix
echo "{ stdenv, git, fetchgit }: \"$TIMESTAMP\"" > $out/pkgs/artiq-timestamp.nix
'';
generateTestOkHash = pkgs.runCommand "generate-test-ok-hash" { buildInputs = [ pkgs.nix ]; }
''
TMPDIR=`mktemp -d`
cp ${generatedNix}/pkgs/artiq-version.nix $TMPDIR/passed
HASH=`nix-hash --type sha256 --base32 $TMPDIR`
echo \"$HASH\" > $out
'';
artiqpkgs = import "${generatedNix}/default.nix" { inherit pkgs; };
artiqVersion = import "${generatedNix}/pkgs/artiq-version.nix" (with pkgs; { inherit stdenv fetchgit git; });
windowsRunner = overrides:
import "${generatedNix}/windows/run-test.nix" ({
inherit pkgs artiqpkgs;
inherit pkgs;
sipycoPkg = artiqpkgs.conda-sipyco;
artiqPkg = artiqpkgs.conda-artiq;
} // overrides);
jobs = (builtins.mapAttrs (key: value: pkgs.lib.hydraJob value) artiqpkgs);
in
@ -49,42 +58,51 @@ in
windows-no-hardware-tests = pkgs.stdenv.mkDerivation {
name = "windows-no-hardware-tests";
buildInputs = [ (windowsRunner {}) ];
phases = [ "buildPhase" ];
buildPhase = ''
${windowsRunner { testCommand = "python -m unittest discover -v sipyco.test && python -m unittest discover -v artiq.test"; }}/bin/wfvm-run-windows-tests
${windowsRunner {}}/bin/run.sh
touch $out
'';
};
# HACK: Abuse fixed-output derivations to escape the sandbox and run the hardware
# unit tests, all integrated in the Hydra interface.
# One major downside of this hack is the tests are only run when generateTestOkHash
# changes, i.e. when the ARTIQ version changes (and not the dependencies).
# Impure derivations, when they land in Nix/Hydra, should improve the situation.
extended-tests = pkgs.stdenv.mkDerivation {
name = "extended-tests";
__networked = true; # compatibility with old patched Nix
# breaks hydra, https://github.com/NixOS/hydra/issues/1216
#__impure = true; # Nix 2.8+
outputHashAlgo = "sha256";
outputHashMode = "recursive";
outputHash = import generateTestOkHash;
__hydraRetry = false;
buildInputs = [
(pkgs.python3.withPackages(ps: [ ps.paramiko artiqpkgs.artiq artiqpkgs.artiq-board-kc705-nist_clock ]))
artiqpkgs.binutils-or1k
artiqpkgs.openocd
pkgs.iputils
pkgs.openssh
] ++ (if (pkgs.lib.versionAtLeast artiqVersion "7.0") then [ pkgs.llvm_11 pkgs.lld_11 ] else [ artiqpkgs.binutils-or1k ]);
];
phases = [ "buildPhase" ];
buildPhase =
''
export HOME=`mktemp -d`
mkdir $HOME/.ssh
cp /opt/hydra_id_ed25519 $HOME/.ssh/id_ed25519
cp /opt/hydra_id_ed25519.pub $HOME/.ssh/id_ed25519.pub
echo "rpi-1 ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIACtBFDVBYoAE4fpJCTANZSE0bcVpTR3uvfNvb80C4i5" > $HOME/.ssh/known_hosts
chmod 600 $HOME/.ssh/id_ed25519
cp /opt/hydra_id_rsa $HOME/.ssh/id_rsa
cp /opt/hydra_id_rsa.pub $HOME/.ssh/id_rsa.pub
echo "rpi-1,192.168.1.188 ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIMc7waNkP2HjL5Eo94evoxJhC8CbYj4i2n1THe5TPIR3" > $HOME/.ssh/known_hosts
chmod 600 $HOME/.ssh/id_rsa
LOCKCTL=$(mktemp -d)
mkfifo $LOCKCTL/lockctl
cat $LOCKCTL/lockctl | ${pkgs.openssh}/bin/ssh \
-i $HOME/.ssh/id_ed25519 \
-i $HOME/.ssh/id_rsa \
-o UserKnownHostsFile=$HOME/.ssh/known_hosts \
rpi-1 \
'mkdir -p /tmp/board_lock && flock /tmp/board_lock/kc705-1 -c "echo Ok; cat"' \
sb@rpi-1 \
'flock /tmp/board_lock-kc705-1 -c "echo Ok; cat"' \
| (
# End remote flock via FIFO
atexit_unlock() {
@ -97,15 +115,18 @@ in
artiq_flash -t kc705 -H rpi-1
sleep 15
# ping: socket: Operation not permitted
#ping kc705-1 -c10 -w30
export ARTIQ_ROOT=`python -c "import artiq; print(artiq.__path__[0])"`/examples/kc705_nist_clock
export ARTIQ_LOW_LATENCY=1
python -m unittest discover -v artiq.test.coredevice
${windowsRunner { testCommand = "set ARTIQ_ROOT=%cd%\\Anaconda3\\envs\\artiq-env\\Lib\\site-packages\\artiq\\examples\\kc705_nist_clock&& python -m unittest discover -v artiq.test.coredevice"; }}/bin/wfvm-run-windows-tests
${windowsRunner { testCommand = "set ARTIQ_ROOT=%cd%\\anaconda\\envs\\artiq-env\\Lib\\site-packages\\artiq\\examples\\kc705_nist_clock&&python -m unittest discover -v artiq.test.coredevice"; }}/bin/run.sh
)
touch $out
mkdir $out
cp ${generatedNix}/pkgs/artiq-version.nix $out/passed
'';
};
}

View File

@ -2,27 +2,25 @@
# nix.sandboxPaths = ["/opt"];
{ pkgs
, rustPlatform
, vivado ? import ./vivado.nix { inherit pkgs; }
, vivado ? (builtins.trace "vivado" (import ./vivado.nix { inherit pkgs; }))
}:
let
artiqSrc = import ./pkgs/artiq-src.nix { fetchgit = pkgs.fetchgit; };
artiqpkgs = import ./default.nix { inherit pkgs; };
fetchcargo-legacy = import ./fetchcargo-legacy.nix {
inherit (pkgs) stdenv lib cacert git;
cargo = artiqpkgs.cargo-legacy;
cargo-vendor = artiqpkgs.cargo-vendor-legacy;
artiqpkgs = builtins.trace "artiqpkgs" (import ./default.nix { inherit pkgs; });
fetchcargo = import ./fetchcargo.nix {
inherit (pkgs) stdenv cacert git;
inherit (artiqpkgs) cargo cargo-vendor;
};
cargoDeps-legacy = fetchcargo-legacy rec {
cargoDeps = fetchcargo rec {
name = "artiq-firmware-cargo-deps";
src = "${artiqSrc}/artiq/firmware";
sha256 = (import "${artiqSrc}/artiq/firmware/cargosha256.nix");
};
cargoVendored-legacy = pkgs.stdenv.mkDerivation {
cargoVendored = pkgs.stdenv.mkDerivation {
name = "artiq-firmware-cargo-vendored";
src = cargoDeps-legacy;
src = cargoDeps;
phases = [ "unpackPhase" "installPhase" ];
installPhase =
''
@ -44,82 +42,44 @@ let
'';
};
cargoDeps = rustPlatform.fetchCargoTarball {
name = "artiq-firmware-cargo-deps";
src = "${artiqSrc}/artiq/firmware";
sha256 = "sha256-YyycMsDzR+JRcMZJd6A/CRi2J9nKmaWY/KXUnAQaZ00=";
};
cargo-xbuild = rustPlatform.buildRustPackage rec {
pname = "cargo-xbuild";
version = "0.6.5";
src = pkgs.fetchFromGitHub {
owner = "rust-osdev";
repo = pname;
rev = "v${version}";
sha256 = "18djvygq9v8rmfchvi2hfj0i6fhn36m716vqndqnj56fiqviwxvf";
};
cargoSha256 = "13sj9j9kl6js75h9xq0yidxy63vixxm9q3f8jil6ymarml5wkhx8";
};
artiq7 = pkgs.lib.strings.versionAtLeast artiqpkgs.artiq.version "7.0";
in
{ target
, variant
, src ? null
, buildCommand ? "python -m artiq.gateware.targets.${target} -V ${variant}"
, extraInstallCommands ? ""
, ... }:
, extraInstallCommands ? ""}:
let
# Board packages are Python modules so that they get added to the ARTIQ Python
# environment, and artiq_flash finds them.
pkgs.python3Packages.toPythonModule (pkgs.stdenv.mkDerivation rec {
in pkgs.python3Packages.toPythonModule (pkgs.stdenv.mkDerivation rec {
name = "artiq-board-${target}-${variant}-${version}";
version = import ./pkgs/artiq-version.nix (with pkgs; { inherit stdenv fetchgit git; });
inherit src;
phases = [ "buildPhase" "checkPhase" "installPhase" ];
nativeBuildInputs = [ vivado pkgs.gnumake ]
++ (if artiq7
then [
rustPlatform.rust.rustc
rustPlatform.rust.cargo
pkgs.llvmPackages_11.clang-unwrapped
pkgs.llvm_11
pkgs.lld_11
rustPlatform.cargoSetupHook
cargo-xbuild
] else [
artiqpkgs.cargo-legacy
artiqpkgs.rustc-legacy
artiqpkgs.binutils-or1k
artiqpkgs.llvm-or1k
]);
buildInputs = [ (pkgs.python3.withPackages(ps: with ps; [ artiqpkgs.migen artiqpkgs.microscope artiqpkgs.misoc artiqpkgs.jesd204b artiqpkgs.artiq ] ++ (pkgs.lib.optional artiq7 jsonschema))) ];
buildPhase = if artiq7
then
''
ARTIQ_PATH=`python -c "import artiq; print(artiq.__path__[0])"`
ln -s $ARTIQ_PATH/firmware/Cargo.lock .
cargoDeps=${cargoDeps}
cargoSetupPostUnpackHook
cargoSetupPostPatchHook
export TARGET_AR=llvm-ar
${buildCommand}
''
else
''
export CARGO_HOME=${cargoVendored-legacy}
export TARGET_AR=or1k-linux-ar
${buildCommand}
'';
# temporarily disabled because there is currently always at least one Kasli bitstream
# that fails timing and blocks the conda channel.
doCheck = artiq7;
phases = [ "buildPhase" "installCheckPhase" "installPhase" ];
buildInputs = [
vivado
pkgs.gnumake
(pkgs.python3.withPackages(ps: with ps; [ jinja2 numpy artiqpkgs.migen artiqpkgs.microscope artiqpkgs.misoc artiqpkgs.jesd204b artiqpkgs.artiq ]))
artiqpkgs.cargo
artiqpkgs.rustc
artiqpkgs.binutils-or1k
artiqpkgs.llvm-or1k
];
buildPhase =
''
export CARGO_HOME=${cargoVendored}
export TARGET_AR=or1k-linux-ar
${buildCommand}
'';
checkPhase = ''
# Search for PCREs in the Vivado output to check for errors
check_log() {
grep -Pe "$1" artiq_${target}/${variant}/gateware/vivado.log && exit 1 || true
set +e
grep -Pe "$1" artiq_${target}/${variant}/gateware/vivado.log
FOUND=$?
set -e
if [ $FOUND != 1 ]; then
exit 1
fi
}
check_log "\d+ constraint not met\."
check_log "Timing constraints are not met\."
@ -138,6 +98,4 @@ pkgs.python3Packages.toPythonModule (pkgs.stdenv.mkDerivation rec {
fi
${extraInstallCommands}
'';
# don't mangle ELF files as they are not for NixOS
dontFixup = true;
})

View File

@ -1,11 +0,0 @@
--- a/intl/relocatable.c 2018-02-28 18:19:46.318224392 +0000
+++ b/intl/relocatable.c 2018-02-28 18:19:37.614224749 +0000
@@ -145,7 +145,7 @@
libcharset_set_relocation_prefix (orig_prefix_arg, curr_prefix_arg);
#endif
#if DEPENDS_ON_LIBICONV && HAVE_ICONV && _LIBICONV_VERSION >= 0x0109
- libiconv_set_relocation_prefix (orig_prefix_arg, curr_prefix_arg);
+ // libiconv_set_relocation_prefix (orig_prefix_arg, curr_prefix_arg);
#endif
#if DEPENDS_ON_LIBINTL && ENABLE_NLS && defined libintl_set_relocation_prefix
libintl_set_relocation_prefix (orig_prefix_arg, curr_prefix_arg);

View File

@ -1,21 +0,0 @@
set MSYS=C:\MSYS64
set TRIPLE=x86_64-w64-mingw32
set PATH=%MSYS%\usr\bin;%MSYS%\mingw64\bin;%PATH%
mkdir build
cd build
set CFLAGS=-I%PREFIX:\=/%/Library/include/
set LDFLAGS=-L%PREFIX:\=/%/Library/lib/
sh ../configure --build=%TRIPLE% ^
--prefix="%PREFIX:\=/%/Library" ^
--target=##TARGET##
if errorlevel 1 exit 1
make -j4
if errorlevel 1 exit 1
make install
if errorlevel 1 exit 1
rem this is a copy of prefixed executables
rmdir /S /Q %PREFIX%\Library\##TARGET##

View File

@ -1,17 +0,0 @@
package:
name: binutils-##TARGET##
version: ##VERSION##
source:
url: ../src.tar.bz2
requirements:
build:
- libiconv
run:
- libiconv
about:
home: https://www.gnu.org/software/binutils/
license: GPL
summary: 'A set of programming tools for creating and managing binary programs, object files, libraries, profile data, and assembly source code.'

View File

@ -1,54 +1,72 @@
{ pkgs, version, src, target }:
let
wfvm = import ../wfvm.nix { inherit pkgs; };
libiconv-filename = "libiconv-1.15-h1df5818_7.tar.bz2";
wfvm = import ../wfvm { inherit pkgs; };
libiconv-filename = "libiconv-1.15-h0c8e037_1006.tar.bz2";
libiconv = pkgs.fetchurl {
url = "https://anaconda.org/anaconda/libiconv/1.15/download/win-64/${libiconv-filename}";
sha256 = "0p431madykrjmi9sbl2sy9kzb0l3vhgs677i8q7cx8g210ab5g52";
};
vc14-filename = "vc-14.1-h0510ff6_4.tar.bz2";
vc14 = pkgs.fetchurl {
url = "https://anaconda.org/anaconda/vc/14.1/download/win-64/${vc14-filename}";
sha256 = "0nsyxph667x8ky1nybakpnk816dkrzbf1684jd7pp6wm5x73p34v";
};
vs2015_runtime-filename = "vs2015_runtime-14.16.27012-hf0eaf9b_2.tar.bz2";
vs2015_runtime = pkgs.fetchurl {
url = "https://anaconda.org/anaconda/vs2015_runtime/14.16.27012/download/win-64/${vs2015_runtime-filename}";
sha256 = "1gbm6i6nkp8linmak5mm42hj1nzqd5ppak8kv1n3wfn52p21ngvs";
url = "https://anaconda.org/conda-forge/libiconv/1.15/download/win-64/${libiconv-filename}";
sha256 = "1jaxnpg5y5pkhvpp9kaq0kpvz7jlj5hynp567q35l7hpfk6xxghh";
};
build = wfvm.utils.wfvm-run {
name = "build-binutils";
image = wfvm.makeWindowsImage { installCommands = with wfvm.layers; [ anaconda3 msys2 (msys2-packages (import ./msys_packages.nix { inherit pkgs; } )) ]; };
image = wfvm.makeWindowsImage { installCommands = with wfvm.layers; [ anaconda3 msys2 msys2-packages ]; };
script = ''
# Create a fake channel to work around another pile of bugs and cretinous design decisions from conda.
${wfvm.utils.win-exec}/bin/win-exec "mkdir fake-channel && mkdir fake-channel\win-64"
ln -s ${libiconv} ${libiconv-filename}
${wfvm.utils.win-put}/bin/win-put ${libiconv-filename} ./fake-channel/win-64
ln -s ${vc14} ${vc14-filename}
${wfvm.utils.win-put}/bin/win-put ${vc14-filename} ./fake-channel/win-64
ln -s ${vs2015_runtime} ${vs2015_runtime-filename}
${wfvm.utils.win-put}/bin/win-put ${vs2015_runtime-filename} ./fake-channel/win-64
${wfvm.utils.win-exec}/bin/win-exec ".\Anaconda3\scripts\activate && conda index fake-channel"
${wfvm.utils.win-put}/bin/win-put ${libiconv} ${libiconv-filename}
${wfvm.utils.win-exec}/bin/win-exec ".\Anaconda3\scripts\activate && conda create -n build ${libiconv-filename}"
cp --no-preserve=mode,ownership -R ${./binutils-recipe} binutils
sed -i s/##TARGET##/${target}/g binutils/*
sed -i s/##VERSION##/${version}/g binutils/*
${wfvm.utils.win-put}/bin/win-put binutils .
tar xjf ${src}
patch -d binutils-${version} -p1 < ${./binutils-hack-libiconv.patch}
tar cjf src.tar.bz2 binutils-${version}
${wfvm.utils.win-put}/bin/win-put src.tar.bz2 .
cat > meta.yaml << EOF
package:
name: binutils-${target}
version: ${version}
${wfvm.utils.win-exec}/bin/win-exec ".\Anaconda3\scripts\activate && conda build --no-anaconda-upload --no-test -c file:///C:/users/wfvm/fake-channel --override-channels binutils"
source:
url: ../src.tar.bz2
${wfvm.utils.win-get}/bin/win-get "Anaconda3/conda-bld/win-64/binutils-${target}-${version}-0.tar.bz2"
requirements:
run:
- libiconv
EOF
cat > bld.bat << EOF
set MSYS=C:\MSYS64
set TOOLPREF=mingw-w64-x86_64-
set TRIPLE=x86_64-pc-mingw64
set PATH=%MSYS%\usr\bin;%MSYS%\mingw64\bin;%PATH%
mkdir build
cd build
set CFLAGS=-I%PREFIX:\=/%/Library/include/
set LDFLAGS=-L%PREFIX:\=/%/Library/lib/
sh ../configure --build=%TRIPLE% ^
--prefix="%PREFIX:\=/%/Library" ^
--target=${target}
if errorlevel 1 exit 1
make -j4
if errorlevel 1 exit 1
make install
if errorlevel 1 exit 1
rem this is a copy of prefixed executables
rmdir /S /Q %PREFIX%\Library\${target}
EOF
${wfvm.utils.win-exec}/bin/win-exec "mkdir binutils"
${wfvm.utils.win-put}/bin/win-put meta.yaml ".\binutils"
${wfvm.utils.win-put}/bin/win-put bld.bat ".\binutils"
${wfvm.utils.win-put}/bin/win-put ${src} ".\src.tar.bz2"
${wfvm.utils.win-exec}/bin/win-exec ".\Anaconda3\scripts\activate build && conda build --no-anaconda-upload --no-test binutils"
${wfvm.utils.win-get}/bin/win-get ".\Anaconda3\conda-bld\win-64\binutils-${target}-${version}-0.tar.bz2"
'';
};
in
pkgs.runCommand "conda-windows-binutils-${target}" { buildInputs = [ build ]; } ''
wfvm-run-build-binutils
mkdir -p $out/win-64 $out/nix-support
cp binutils-*.tar.bz2 $out/win-64
cp *.tar.bz2 $out/win-64
echo file conda $out/win-64/*.tar.bz2 >> $out/nix-support/hydra-build-products
''

View File

@ -1,28 +1,12 @@
{ pkgs, version, src }:
let
wfvm = import ../wfvm.nix { inherit pkgs; };
conda-vs2015_runtime-filename = "vs2015_runtime-14.16.27012-hf0eaf9b_2.tar.bz2";
conda-vs2015_runtime = pkgs.fetchurl {
url = "https://anaconda.org/anaconda/vs2015_runtime/14.16.27012/download/win-64/${conda-vs2015_runtime-filename}";
sha256 = "1gbm6i6nkp8linmak5mm42hj1nzqd5ppak8kv1n3wfn52p21ngvs";
};
conda-cmake-filename = "cmake-3.17.2-h33f27b4_0.tar.bz2";
conda-cmake = pkgs.fetchurl {
url = "https://anaconda.org/anaconda/cmake/3.17.2/download/win-64/${conda-cmake-filename}";
sha256 = "0lg782pj2i9h20rwfkwwskis038r98b3z4c9j1a6ih95rc6m2acn";
};
wfvm = import ../wfvm { inherit pkgs; };
build = wfvm.utils.wfvm-run {
name = "build-llvm-or1k";
image = wfvm.makeWindowsImage { installCommands = with wfvm.layers; [ anaconda3 msvc msvc-ide-unbreak ]; };
image = wfvm.makeWindowsImage { installCommands = with wfvm.layers; [ anaconda3 msys2 msys2-packages ]; };
script = ''
# Create a fake channel so that the conda garbage doesn't complain about not finding the packages it just installed.
ln -s ${conda-vs2015_runtime} ${conda-vs2015_runtime-filename}
ln -s ${conda-cmake} ${conda-cmake-filename}
${wfvm.utils.win-exec}/bin/win-exec "mkdir fake-channel && mkdir fake-channel\win-64"
${wfvm.utils.win-put}/bin/win-put ${conda-vs2015_runtime-filename} ./fake-channel/win-64
${wfvm.utils.win-put}/bin/win-put ${conda-cmake-filename} ./fake-channel/win-64
${wfvm.utils.win-exec}/bin/win-exec ".\Anaconda3\scripts\activate && conda index fake-channel"
${wfvm.utils.win-exec}/bin/win-exec ".\Anaconda3\scripts\activate && conda create -n build --offline"
cat > meta.yaml << EOF
package:
@ -32,19 +16,18 @@ let
source:
url: ../src.tar
requirements:
build:
- cmake
EOF
cat > bld.bat << EOF
set MSYS=C:\MSYS64
set PATH=%MSYS%\usr\bin;%MSYS%\mingw64\bin;%PATH%
set BUILD_TYPE=Release
set CMAKE_GENERATOR=Visual Studio 15 2017 Win64
set CMAKE_GENERATOR=MinGW Makefiles
mkdir build
cd build
cmake .. -G "%CMAKE_GENERATOR%" ^
-Thost=x64 ^
-DCMAKE_BUILD_TYPE="%BUILD_TYPE%" ^
-DCMAKE_INSTALL_PREFIX="%LIBRARY_PREFIX%" ^
-DLLVM_BUILD_LLVM_DYLIB=ON ^
@ -63,21 +46,21 @@ let
EOF
${wfvm.utils.win-exec}/bin/win-exec "mkdir llvm-or1k"
${wfvm.utils.win-put}/bin/win-put meta.yaml llvm-or1k
${wfvm.utils.win-put}/bin/win-put bld.bat llvm-or1k
${wfvm.utils.win-put}/bin/win-put meta.yaml ".\llvm-or1k"
${wfvm.utils.win-put}/bin/win-put bld.bat ".\llvm-or1k"
ln -s ${src} src
tar chf src.tar src
${wfvm.utils.win-put}/bin/win-put src.tar .
${wfvm.utils.win-put}/bin/win-put src.tar ".\src.tar"
${wfvm.utils.win-exec}/bin/win-exec ".\Anaconda3\scripts\activate && conda build --no-anaconda-upload --no-test -c file:///C:/users/wfvm/fake-channel --override-channels llvm-or1k"
${wfvm.utils.win-exec}/bin/win-exec ".\Anaconda3\scripts\activate build && conda build --no-anaconda-upload --no-test llvm-or1k"
${wfvm.utils.win-get}/bin/win-get "Anaconda3/conda-bld/win-64/llvm-or1k-${version}-0.tar.bz2"
${wfvm.utils.win-get}/bin/win-get ".\Anaconda3\conda-bld\win-64\llvm-or1k-${version}-0.tar.bz2"
'';
};
in
pkgs.runCommand "conda-windows-llvm-or1k" { buildInputs = [ build ]; } ''
wfvm-run-build-llvm-or1k
mkdir -p $out/win-64 $out/nix-support
cp llvm-or1k-*.tar.bz2 $out/win-64
cp *.tar.bz2 $out/win-64
echo file conda $out/win-64/*.tar.bz2 >> $out/nix-support/hydra-build-products
''

View File

@ -1,28 +1,15 @@
{ pkgs, conda-windows-llvm-or1k, version, src }:
# See: https://github.com/valtron/llvm-stuff/wiki/Build-llvmlite-with-MSYS2
let
wfvm = import ../wfvm.nix { inherit pkgs; };
conda-vs2015_runtime-filename = "vs2015_runtime-14.16.27012-hf0eaf9b_2.tar.bz2";
conda-vs2015_runtime = pkgs.fetchurl {
url = "https://anaconda.org/anaconda/vs2015_runtime/14.16.27012/download/win-64/${conda-vs2015_runtime-filename}";
sha256 = "1gbm6i6nkp8linmak5mm42hj1nzqd5ppak8kv1n3wfn52p21ngvs";
};
conda-cmake-filename = "cmake-3.17.2-h33f27b4_0.tar.bz2";
conda-cmake = pkgs.fetchurl {
url = "https://anaconda.org/anaconda/cmake/3.17.2/download/win-64/${conda-cmake-filename}";
sha256 = "0lg782pj2i9h20rwfkwwskis038r98b3z4c9j1a6ih95rc6m2acn";
};
wfvm = import ../wfvm { inherit pkgs; };
build = wfvm.utils.wfvm-run {
name = "build-llvmlite-artiq";
image = wfvm.makeWindowsImage { installCommands = with wfvm.layers; [ anaconda3 msvc msvc-ide-unbreak ]; };
image = wfvm.makeWindowsImage { installCommands = with wfvm.layers; [ anaconda3 msys2 msys2-packages ]; };
script = ''
ln -s ${conda-vs2015_runtime} ${conda-vs2015_runtime-filename}
ln -s ${conda-cmake} ${conda-cmake-filename}
${wfvm.utils.win-exec}/bin/win-exec "mkdir fake-channel && mkdir fake-channel\win-64"
${wfvm.utils.win-put}/bin/win-put ${conda-vs2015_runtime-filename} ./fake-channel/win-64
${wfvm.utils.win-put}/bin/win-put ${conda-cmake-filename} ./fake-channel/win-64
${wfvm.utils.win-put}/bin/win-put ${conda-windows-llvm-or1k}/win-64/llvm-or1k-*.tar.bz2 ./fake-channel/win-64
${wfvm.utils.win-exec}/bin/win-exec ".\Anaconda3\scripts\activate && conda index fake-channel"
${wfvm.utils.win-put}/bin/win-put "${conda-windows-llvm-or1k}/win-64/llvm-or1k-*.tar.bz2" ".\llvm-or1k.tar.bz2"
${wfvm.utils.win-exec}/bin/win-exec ".\Anaconda3\scripts\activate && conda create -n build llvm-or1k.tar.bz2"
cat > meta.yaml << EOF
package:
@ -31,46 +18,36 @@ let
source:
url: ../src.tar
requirements:
build:
- cmake
- llvm-or1k
run:
- python<3.9
EOF
cat > bld.bat << EOF
@rem Let CMake know about the LLVM install path, for find_package()
set CMAKE_PREFIX_PATH=%LIBRARY_PREFIX%
@rem Ensure there are no build leftovers (CMake can complain)
if exist ffi\build rmdir /S /Q ffi\build
set MSYS=C:\MSYS64
set PATH=%MSYS%\usr\bin;%MSYS%\mingw64\bin;%PATH%
python setup.py install \
--prefix=%PREFIX% \
--prefix=\$PREFIX \
--single-version-externally-managed \
--record=record.txt \
--no-compile
if errorlevel 1 exit 1
EOF
${wfvm.utils.win-exec}/bin/win-exec "mkdir llvmlite-artiq"
${wfvm.utils.win-put}/bin/win-put meta.yaml llvmlite-artiq
${wfvm.utils.win-put}/bin/win-put bld.bat llvmlite-artiq
ln -s ${src} src
${wfvm.utils.win-put}/bin/win-put meta.yaml ".\llvmlite-artiq"
${wfvm.utils.win-put}/bin/win-put bld.bat ".\llvmlite-artiq"
cp --no-preserve=mode,ownership -R ${src} src
patch -d src -p1 < ${./llvmlite-msys.diff}
tar chf src.tar src
${wfvm.utils.win-put}/bin/win-put src.tar .
${wfvm.utils.win-put}/bin/win-put src.tar ".\src.tar"
${wfvm.utils.win-exec}/bin/win-exec ".\Anaconda3\scripts\activate && conda build --no-anaconda-upload --no-test -c file:///C:/users/wfvm/fake-channel --override-channels llvmlite-artiq"
${wfvm.utils.win-exec}/bin/win-exec ".\Anaconda3\scripts\activate build && conda build --no-anaconda-upload --no-test llvmlite-artiq"
${wfvm.utils.win-get}/bin/win-get "Anaconda3/conda-bld/win-64/llvmlite-artiq-${version}-0.tar.bz2"
${wfvm.utils.win-get}/bin/win-get ".\Anaconda3\conda-bld\win-64\llvmlite-artiq-${version}-0.tar.bz2"
'';
};
in
pkgs.runCommand "conda-windows-llvmlite-artiq" { buildInputs = [ build ]; } ''
wfvm-run-build-llvmlite-artiq
mkdir -p $out/win-64 $out/nix-support
cp llvmlite-artiq-*.tar.bz2 $out/win-64
cp *.tar.bz2 $out/win-64
echo file conda $out/win-64/*.tar.bz2 >> $out/nix-support/hydra-build-products
''

View File

@ -0,0 +1,42 @@
diff --git a/ffi/CMakeLists.txt b/ffi/CMakeLists.txt
index 15470d4..11d06a5 100755
--- a/ffi/CMakeLists.txt
+++ b/ffi/CMakeLists.txt
@@ -29,6 +29,7 @@ list(REMOVE_ITEM LLVM_AVAILABLE_LIBS LTO LLVM)
# that we wish to use
# llvm_map_components_to_libnames(llvm_libs support core irreader)
llvm_map_components_to_libnames(llvm_libs all)
+list(REMOVE_ITEM llvm_libs "LTO")
# Link against LLVM libraries
target_link_libraries(llvmlite ${llvm_libs})
diff --git a/ffi/build.py b/ffi/build.py
index 9169d35..41a9a40 100755
--- a/ffi/build.py
+++ b/ffi/build.py
@@ -24,7 +24,7 @@ def try_cmake(cmake_dir, build_dir, generator):
old_dir = os.getcwd()
try:
os.chdir(build_dir)
- subprocess.check_call(['cmake', '-G', generator, cmake_dir])
+ subprocess.check_call(['cmake', '-G', generator, '-D', 'LLVM_DIR=$LLVM/lib/cmake/llvm', cmake_dir])
finally:
os.chdir(old_dir)
@@ -57,6 +57,7 @@ def find_win32_generator():
if is_64bit:
generator += ' Win64'
build_dir = tempfile.mkdtemp()
+ generator = 'MinGW Makefiles'
print("Trying generator %r" % (generator,))
try:
try_cmake(cmake_dir, build_dir, generator)
@@ -78,7 +79,7 @@ def main_win32():
# Run configuration step
try_cmake(here_dir, build_dir, generator)
subprocess.check_call(['cmake', '--build', build_dir, '--config', config])
- shutil.copy(os.path.join(build_dir, config, 'llvmlite.dll'), target_dir)
+ shutil.copy(os.path.join(build_dir, 'libllvmlite.dll'), os.path.join(target_dir, 'llvmlite.dll'))
def main_posix(kind, library_ext):

View File

@ -1,13 +0,0 @@
{ pkgs, name, filename, baseurl, sha256 }:
let
download = pkgs.fetchurl {
url = "${baseurl}${filename}";
inherit sha256;
};
in
pkgs.runCommand "conda-windows-${name}" { } ''
mkdir -p $out/win-64 $out/nix-support
ln -s ${download} $out/win-64/${filename}
echo file conda $out/win-64/${filename} >> $out/nix-support/hydra-build-products
''

View File

@ -0,0 +1,17 @@
[
"python >=3.5.3"
"llvmlite-artiq"
"binutils-or1k-linux >=2.27"
"pythonparser >=1.1"
"scipy"
"numpy"
"prettytable"
"h5py 2.8"
"python-dateutil"
"pyqt >=5.5"
"quamash"
"pyqtgraph 0.10.0"
"pygit2"
"python-levenshtein"
"sipyco"
]

View File

@ -6,20 +6,7 @@ let
name = "artiq";
inherit version;
src = import ../pkgs/artiq-src.nix { fetchgit = pkgs.fetchgit; };
dependencies = [
"pythonparser"
"scipy"
"numpy"
"prettytable"
"h5py"
"python-dateutil"
"pyqt"
(if (pkgs.lib.strings.versionAtLeast version "6.0") then "qasync" else "quamash")
"pyqtgraph"
"pygit2"
"python-levenshtein"
"sipyco"
] ++ (if (pkgs.lib.strings.versionAtLeast version "7.0") then ["llvmlite" "llvm-tools" "lld"] else ["llvmlite-artiq" "binutils-or1k-linux"]);
dependencies = import ./artiq-deps.nix;
extraYaml =
''
about:

View File

@ -6,7 +6,7 @@ let
mkdir -p $out/fake-conda;
# work around yet more idiotic conda behavior - build breaks if write permissions aren't set on source files.
cp --no-preserve=mode,ownership -L -R ${bscan_spi_bitstreams} workaround-conda
cp --no-preserve=mode,ownership -R ${bscan_spi_bitstreams} workaround-conda
pushd workaround-conda
tar cf $out/src.tar .
popd

View File

@ -2,22 +2,12 @@
# recipe must be a string pointing to a path within the source.
{ pkgs }:
{ name ? null
, src
, pname ? null
, version ? null
, recipe ? "fake-conda"
}:
{ name, src, recipe ? "fake-conda"}:
# Check that either name is specified or both pname & version are specified.
assert (name == null) -> pname != null && version != null;
assert (name != null) -> pname == null && version == null;
let
condaBuilderEnv = import ./builder-env.nix { inherit pkgs; };
realName = if (name != null) then name else "${pname}-${version}";
in pkgs.stdenvNoCC.mkDerivation {
name = realName;
inherit src;
in pkgs.stdenv.mkDerivation {
inherit name src;
buildCommand =
''
HOME=`pwd`

View File

@ -3,21 +3,14 @@
with pkgs;
let
condaDeps = [ zlib xorg.libSM xorg.libICE xorg.libX11 xorg.libXau xorg.libXi xorg.libXrender libselinux libGL ];
condaDeps = [ stdenv.cc xorg.libSM xorg.libICE xorg.libX11 xorg.libXau xorg.libXi xorg.libXrender libselinux libGL ];
# Use the full Anaconda distribution, which already contains conda-build and its many dependencies,
# so we don't have to manually deal with them.
condaInstaller = fetchurl {
url = "https://repo.anaconda.com/archive/Anaconda3-2021.05-Linux-x86_64.sh";
sha256 = "0lrxwd3pwz8k3jxwgkd9x47wgkqqy9s8m7hgx1x2gw4gcwysnl97";
url = "https://repo.anaconda.com/archive/Anaconda3-2019.03-Linux-x86_64.sh";
sha256 = "0fmpdd5876ylds98mydmv5klnwlzasa461k0l1f4vhbw96vm3j25";
};
condaSrcChmod = runCommand "conda-src-chmod" { }
''
mkdir $out
cp ${condaInstaller} $out/conda-installer.sh
chmod +x $out/conda-installer.sh
# keep the same file length to avoid breaking embedded payload offsets
sed -i 0,/unset\ LD_LIBRARY_PATH/s//\#nset\ LD_LIBRARY_PATH/ $out/conda-installer.sh
'';
condaSrcChmod = runCommand "conda-src-chmod" { } "mkdir $out; cp ${condaInstaller} $out/conda-installer.sh; chmod +x $out/conda-installer.sh";
condaInstallerEnv = buildFHSUserEnv {
name = "conda-installer-env";
targetPkgs = pkgs: ([ condaSrcChmod ] ++ condaDeps);
@ -33,7 +26,7 @@ let
condaInstalled = runCommand "conda-installed" { }
''
${condaInstallerEnv}/bin/conda-installer-env -c "${condaSrcChmod}/conda-installer.sh -p $out -b"
substituteInPlace $out/lib/python3.8/site-packages/conda/gateways/disk/__init__.py \
substituteInPlace $out/lib/python3.7/site-packages/conda/gateways/disk/__init__.py \
--replace "os.chmod(path, 0o2775)" "pass"
# The conda garbage breaks if the package filename is prefixed with the Nix store hash.
@ -43,15 +36,10 @@ let
ln -s ${libiconv} ${libiconv-filename}
${condaInstallerEnv}/bin/conda-installer-env -c "$out/bin/conda install ${libiconv-filename}"
'';
binutils-fhs = (pkgs.binutils.overrideAttrs(oa: {postFixup = oa.postFixup + "echo /lib64/ld-linux-x86-64.so.2 > $out/nix-support/dynamic-linker";}));
gcc-fhs = (pkgs.gcc.override {bintools = binutils-fhs;} );
in
buildFHSUserEnv {
name = "conda-builder-env";
targetPkgs = pkgs: ([ condaInstalled ] ++ condaDeps ++ [
binutils-fhs
gcc-fhs
# for llvm-or1k
cmake
]

View File

@ -25,7 +25,7 @@ let
# that they are not there if they have been installed from files.
requirements:
run:
- python<3.9
- python
- ncurses [linux]
EOF

View File

@ -1,88 +0,0 @@
{ pkgs, src }:
let
condaBuilderEnv = import ./builder-env.nix { inherit pkgs; };
fake-src = pkgs.runCommand "conda-fake-source-llvmlite" { }
''
mkdir -p $out/fake-conda;
mkdir conda-sucks
pushd conda-sucks
tar xvf ${src} --strip-components=1
tar cf $out/src.tar .
patch -p1 < ${../pkgs/llvmlite-callsite.diff}
patch -p1 < ${../pkgs/llvmlite-abiname.diff}
popd
rm -rf conda-sucks
cat << EOF > $out/fake-conda/meta.yaml
package:
name: llvmlite
version: 0.99 # high version number to entice the conda filth to choose it over others
source:
url: ../src.tar
# Again, we don't specify build dependencies since the conda garbage mistakenly thinks
# that they are not there if they have been installed from files.
requirements:
run:
- libllvm11
- python<3.9
- zlib
EOF
cat << EOF > $out/fake-conda/build.sh
#!/bin/bash
set -e
export LD_LIBRARY_PATH=/lib
python setup.py install \
--prefix=\$PREFIX \
--single-version-externally-managed \
--record=record.txt \
--no-compile
EOF
chmod 755 $out/fake-conda/build.sh
'';
conda-zlib = pkgs.fetchurl {
url = "https://anaconda.org/conda-forge/zlib/1.2.11/download/linux-64/zlib-1.2.11-h36c2ea0_1013.tar.bz2";
sha256 = "sha256-zsSNs1p97wARv9qiuR5eBdKgrXiLiHGiE+uMrP63QYo=";
};
conda-llvm = pkgs.fetchurl {
url = "https://anaconda.org/conda-forge/llvm/11.1.0/download/linux-64/llvm-11.1.0-h32600fe_2.tar.bz2";
sha256 = "sha256-E+jnVeGHad3LH+dKqFKH0/lBuQqZKtybXF44uArmNz8=";
};
conda-llvm-tools = pkgs.fetchurl {
url = "https://anaconda.org/conda-forge/llvm-tools/11.1.0/download/linux-64/llvm-tools-11.1.0-hf817b99_2.tar.bz2";
sha256 = "sha256-Y87krT+d9vdVIPliJVc/szIVBRA3NNcUDdY9Gc9KpXg=";
};
conda-llvmdev = pkgs.fetchurl {
url = "https://anaconda.org/conda-forge/llvmdev/11.1.0/download/linux-64/llvmdev-11.1.0-hf817b99_2.tar.bz2";
sha256 = "sha256-vN87BWggPfpFp51Qm60R3D5krQ4AQwiEJaqPfVb6x40=";
};
in
pkgs.stdenv.mkDerivation {
name = "conda-llvmlite";
src = fake-src;
buildCommand =
''
HOME=`pwd`
mkdir $out
cat << EOF > conda-commands.sh
set -e
conda create --prefix ./conda_tmp ${conda-zlib} ${conda-llvm} ${conda-llvm-tools} ${conda-llvmdev}
conda init
source .bashrc
conda activate ./conda_tmp
conda build --no-anaconda-upload --no-test --output-folder $out $src/fake-conda
EOF
${condaBuilderEnv}/bin/conda-builder-env conda-commands.sh
mkdir -p $out/nix-support
echo file conda $out/*/*.tar.bz2 >> $out/nix-support/hydra-build-products
'';
}

View File

@ -1,10 +1,7 @@
{ pkgs ? import <nixpkgs> { overlays = [ (import ./mozilla-overlay.nix) ]; }}:
{ pkgs ? import <nixpkgs> {}}:
with pkgs;
let
artiq6 = pkgs.lib.strings.versionAtLeast mainPackages.artiq.version "6.0";
artiq7 = pkgs.lib.strings.versionAtLeast mainPackages.artiq.version "7.0";
pythonDeps = import ./pkgs/python-deps.nix { inherit (pkgs) lib fetchgit fetchFromGitHub python3Packages; misoc-new = artiq7; };
rustPlatform = import ./rust-platform.nix { inherit pkgs; };
pythonDeps = import ./pkgs/python-deps.nix { inherit (pkgs) stdenv fetchFromGitHub python3Packages; };
boards = [
{ target = "kasli"; variant = "tester"; }
@ -12,7 +9,7 @@ let
];
boardPackages = pkgs.lib.lists.foldr (board: start:
let
boardBinaries = import ./artiq-board.nix { inherit pkgs rustPlatform; } {
boardBinaries = import ./artiq-board.nix { inherit pkgs; } {
target = board.target;
variant = board.variant;
};
@ -20,41 +17,54 @@ let
start // {
"artiq-board-${board.target}-${board.variant}" = boardBinaries;
}) {} boards;
mainPackages = rec {
inherit (pythonDeps) sipyco asyncserial pythonparser artiq-netboot misoc migen microscope jesd204b migen-axi lit outputcheck qasync;
inherit (pythonDeps) sipyco asyncserial pythonparser pyqtgraph-qt5 misoc migen microscope jesd204b migen-axi lit outputcheck;
binutils-or1k = callPackage ./pkgs/binutils.nix { platform = "or1k"; target = "or1k-linux"; };
binutils-arm = callPackage ./pkgs/binutils.nix { platform = "arm"; target = "armv7-unknown-linux-gnueabihf"; };
llvm-or1k = callPackage ./pkgs/llvm-or1k.nix {};
rustc-legacy = callPackage ./pkgs/rust-legacy/rustc-with-crates.nix
((lib.optionalAttrs (stdenv.cc.isGNU && stdenv.hostPlatform.isi686) {
rustc = callPackage ./pkgs/rust/rustc-with-crates.nix
((stdenv.lib.optionalAttrs (stdenv.cc.isGNU && stdenv.hostPlatform.isi686) {
stdenv = overrideCC stdenv gcc6; # with gcc-7: undefined reference to `__divmoddi4'
}) //
{ inherit llvm-or1k; });
rustc = if artiq7 then rustPlatform.rust.rustc else rustc-legacy;
cargo-legacy = callPackage ./pkgs/rust-legacy/cargo.nix { inherit rustc; rustPlatform = rustPackages_1_45.rustPlatform; };
cargo-vendor-legacy = callPackage ./pkgs/rust-legacy/cargo-vendor.nix {};
cargo = callPackage ./pkgs/rust/cargo.nix { inherit rustc; };
cargo-vendor = callPackage ./pkgs/rust/cargo-vendor.nix {};
llvmlite-artiq = callPackage ./pkgs/llvmlite-artiq.nix { inherit llvm-or1k; };
llvmlite-llvm11 = callPackage ./pkgs/llvmlite-llvm11.nix { };
libartiq-support = callPackage ./pkgs/libartiq-support.nix { inherit rustc; };
artiq = callPackage ./pkgs/artiq.nix { inherit pythonDeps binutils-or1k binutils-arm llvm-or1k llvmlite-artiq llvmlite-llvm11 libartiq-support lit outputcheck; };
artiq = callPackage ./pkgs/artiq.nix { inherit binutils-or1k llvm-or1k llvmlite-artiq libartiq-support lit outputcheck; };
artiq-env = (pkgs.python3.withPackages(ps: [ artiq ])).overrideAttrs (oldAttrs: { name = "${pkgs.python3.name}-artiq-env-${artiq.version}"; });
openocd = callPackage ./pkgs/openocd.nix { };
};
openocd = callPackage ./pkgs/openocd.nix {};
condaNoarch = {
conda-pythonparser = import ./conda/build.nix { inherit pkgs; } {
pname = "conda-pythonparser";
inherit (pythonDeps.pythonparser) version;
name = "conda-pythonparser";
src = import ./conda/fake-source.nix { inherit pkgs; } {
name = "pythonparser";
inherit (pythonDeps.pythonparser) version src;
extraSrcCommands = "patch -p1 < ${./pkgs/python37hack.patch}";
dependencies = ["regex"];
};
};
conda-binutils-or1k = import ./conda/binutils.nix {
inherit pkgs;
inherit (binutils-or1k) version src;
target = "or1k-linux";
};
conda-binutils-arm = import ./conda/binutils.nix {
inherit pkgs;
inherit (binutils-arm) version src;
target = "armv7-unknown-linux-gnueabihf";
};
conda-llvm-or1k = import ./conda/llvm-or1k.nix {
inherit pkgs;
inherit (llvm-or1k) version;
src = llvm-or1k.llvm-src;
};
conda-llvmlite-artiq = import ./conda/llvmlite-artiq.nix {
inherit pkgs conda-llvm-or1k;
inherit (llvmlite-artiq) version src;
};
conda-sipyco = import ./conda/build.nix { inherit pkgs; } {
pname = "conda-sipyco";
inherit (pythonDeps.sipyco) version;
name = "conda-sipyco";
src = import ./conda/fake-source.nix { inherit pkgs; } {
name = "sipyco";
inherit (pythonDeps.sipyco) version src;
@ -62,111 +72,45 @@ let
};
};
conda-quamash = import ./conda/build.nix { inherit pkgs; } {
pname = "conda-quamash";
inherit (pkgs.python3Packages.quamash) version;
name = "conda-quamash";
src = import ./conda/fake-source.nix { inherit pkgs; } {
name = "quamash";
inherit (pkgs.python3Packages.quamash) version src;
};
};
conda-qasync = import ./conda/build.nix { inherit pkgs; } {
pname = "conda-qasync";
inherit (pythonDeps.qasync) version;
src = import ./conda/fake-source.nix { inherit pkgs; } {
name = "qasync";
inherit (pythonDeps.qasync) version src;
};
};
conda-bscan-spi-bitstreams = import ./conda/bscan-spi-bitstreams.nix {
inherit pkgs;
bscan_spi_bitstreams = "${mainPackages.openocd}/share/bscan-spi-bitstreams";
inherit (openocd) bscan_spi_bitstreams;
};
conda-artiq = import ./conda/artiq.nix { inherit pkgs; };
conda-asyncserial = import ./conda/build.nix { inherit pkgs; } {
pname = "conda-asyncserial";
inherit (pythonDeps.asyncserial) version;
name = "conda-asyncserial";
src = import ./conda/fake-source.nix { inherit pkgs; } {
name = "asyncserial";
inherit (pythonDeps.asyncserial) version src;
dependencies = ["pyserial"];
};
};
};
condaLinux = if artiq7 then ({
# ARTIQ-7 uses upstream conda-forge packages except llvmlite
conda-llvmlite = import ./conda/llvmlite-patched.nix {
inherit pkgs;
inherit (mainPackages.llvmlite-llvm11) src;
};
}) else (rec {
conda-binutils-or1k = import ./conda/binutils.nix {
inherit pkgs;
inherit (mainPackages.binutils-or1k) version src;
target = "or1k-linux";
};
conda-binutils-arm = import ./conda/binutils.nix {
inherit pkgs;
inherit (mainPackages.binutils-arm) version src;
target = "armv7-unknown-linux-gnueabihf";
};
conda-llvm-or1k = import ./conda/llvm-or1k.nix {
inherit pkgs;
inherit (mainPackages.llvm-or1k) version;
src = mainPackages.llvm-or1k.llvm-src;
};
conda-llvmlite-artiq = import ./conda/llvmlite-artiq.nix {
inherit pkgs conda-llvm-or1k;
inherit (mainPackages.llvmlite-artiq) version src;
};
});
condaWindows5 = {
conda-windows-binutils-or1k = import ./conda-windows/redistribute.nix {
inherit pkgs;
name = "binutils-or1k";
filename = "binutils-or1k-linux-2.27-h93a10e1_6.tar.bz2";
baseurl = "https://anaconda.org/m-labs/binutils-or1k-linux/2.27/download/win-64/";
sha256 = "0gbks36hfsx3893mihj0bdmg5vwccrq5fw8xp9b9xb8p5pr8qhzx";
};
conda-windows-llvm-or1k = import ./conda-windows/redistribute.nix {
inherit pkgs;
name = "llvm-or1k";
filename = "llvm-or1k-6.0.0-25.tar.bz2";
baseurl = "https://anaconda.org/m-labs/llvm-or1k/6.0.0/download/win-64/";
sha256 = "06mnrg79rn9ni0d5z0x3jzb300nhqhbc2h9qbq5m50x3sgm8km63";
};
conda-windows-llvmlite-artiq = import ./conda-windows/redistribute.nix {
inherit pkgs;
name = "llvmlite-artiq";
filename = "llvmlite-artiq-0.23.0.dev-py35_5.tar.bz2";
baseurl = "https://anaconda.org/m-labs/llvmlite-artiq/0.23.0.dev/download/win-64/";
sha256 = "10w24w5ljvan06pbvwqj4pzal072jnyynmwm42dn06pq88ryz9wj";
};
};
condaWindows6 = rec {
conda-windows-binutils-or1k = import ./conda-windows/binutils.nix {
inherit pkgs;
inherit (mainPackages.binutils-or1k) version src;
inherit (binutils-or1k) version src;
target = "or1k-linux";
};
conda-windows-binutils-arm = import ./conda-windows/binutils.nix {
inherit pkgs;
inherit (mainPackages.binutils-or1k) version src;
inherit (binutils-arm) version src;
target = "armv7-unknown-linux-gnueabihf";
};
conda-windows-llvm-or1k = import ./conda-windows/llvm-or1k.nix {
inherit pkgs;
inherit (mainPackages.llvm-or1k) version;
src = mainPackages.llvm-or1k.llvm-src;
inherit (llvm-or1k) version;
src = llvm-or1k.llvm-src;
};
conda-windows-llvmlite-artiq = import ./conda-windows/llvmlite-artiq.nix {
inherit pkgs conda-windows-llvm-or1k;
inherit (mainPackages.llvmlite-artiq) version src;
inherit (llvmlite-artiq) version src;
};
};
condaWindows = if artiq6 then (if artiq7 then {} else condaWindows6) else condaWindows5;
in
boardPackages // mainPackages // condaNoarch // condaLinux // condaWindows
mainPackages // boardPackages

View File

@ -1,8 +1,7 @@
{ stdenv, lib, cacert, git, cargo, cargo-vendor }:
{ stdenv, cacert, git, cargo, cargo-vendor }:
{ name, src, sha256 }:
stdenv.mkDerivation {
name = "${name}-vendor";
strictDeps = true;
nativeBuildInputs = [ cacert git cargo cargo-vendor ];
inherit src;
@ -31,6 +30,6 @@ stdenv.mkDerivation {
outputHashMode = "recursive";
outputHash = sha256;
impureEnvVars = lib.fetchers.proxyImpureEnvVars;
impureEnvVars = stdenv.lib.fetchers.proxyImpureEnvVars;
preferLocalBuild = true;
}

View File

@ -1,10 +0,0 @@
let
pkgs = import <nixpkgs> {};
overlay = pkgs.fetchFromGitHub {
owner = "mozilla";
repo = "nixpkgs-mozilla";
rev = "0510159186dd2ef46e5464484fbdf119393afa58";
sha256 = "sha256-HJX4Pc5ZUAg4apxB/XHuJ+6ukzvRQqeZMjscOBst2bA=";
};
in
import overlay

View File

@ -1,52 +1,29 @@
{ stdenv, lib, pythonDeps, fetchgit, git, python3Packages, qt5, libartiq-support, lit, outputcheck, fontconfig,
binutils-or1k, binutils-arm, llvm-or1k, llvmlite-artiq,
llvm_11, lld_11, llvmlite-llvm11 }:
{ stdenv, callPackage, fetchgit, git, python3Packages, qt5Full, binutils-or1k, llvm-or1k, llvmlite-artiq, libartiq-support, lit, outputcheck }:
python3Packages.buildPythonPackage rec {
pname = "artiq";
version = import ./artiq-version.nix { inherit stdenv fetchgit git; };
src = import ./artiq-src.nix { inherit fetchgit; };
let
pythonDeps = callPackage ./python-deps.nix {};
in
builtins.trace "artiq" (python3Packages.buildPythonPackage rec {
name = "artiq-${version}";
version = import ./artiq-version.nix { inherit stdenv fetchgit git; };
src = import ./artiq-src.nix { inherit fetchgit; };
preBuild = "export VERSIONEER_OVERRIDE=${version}";
propagatedBuildInputs = [ binutils-or1k llvm-or1k llvmlite-artiq qt5Full ]
++ (with pythonDeps; [ sipyco pyqtgraph-qt5 pythonparser ])
++ (with python3Packages; [ pygit2 numpy dateutil quamash scipy prettytable pyserial python-Levenshtein h5py pyqt5 ]);
checkInputs = [ binutils-or1k outputcheck ];
checkPhase =
''
python -m unittest discover -v artiq.test
preBuild = "export VERSIONEER_OVERRIDE=${version}";
nativeBuildInputs = [ qt5.wrapQtAppsHook ];
propagatedBuildInputs = [ ]
++ (lib.lists.optionals (!lib.strings.versionAtLeast version "7.0") [ binutils-or1k llvm-or1k llvmlite-artiq ])
++ (lib.lists.optionals (lib.strings.versionAtLeast version "7.0") [ llvm_11 lld_11 llvmlite-llvm11 ])
++ (lib.lists.optionals (lib.strings.versionAtLeast version "6.0" && !lib.strings.versionAtLeast version "7.0") [ binutils-arm ])
++ (with pythonDeps; [ sipyco pythonparser ])
++ (with python3Packages; [ pygit2 numpy dateutil scipy prettytable pyserial python-Levenshtein h5py pyqt5 pyqtgraph ])
++ [(if (lib.strings.versionAtLeast version "6.0") then pythonDeps.qasync else python3Packages.quamash)];
dontWrapQtApps = true;
postFixup = ''
wrapQtApp "$out/bin/artiq_dashboard"
wrapQtApp "$out/bin/artiq_browser"
wrapQtApp "$out/bin/artiq_session"
'';
# Modifies PATH to pass the wrapped python environment (i.e. python3.withPackages(...) to subprocesses.
# Allows subprocesses using python to find all packages you have installed
makeWrapperArgs = [
''--run 'if [ ! -z "$NIX_PYTHONPREFIX" ]; then export PATH=$NIX_PYTHONPREFIX/bin:$PATH;fi' ''
"--set FONTCONFIG_FILE ${fontconfig.out}/etc/fonts/fonts.conf"
];
checkInputs = [ lit outputcheck ] ++ (if (lib.strings.versionAtLeast version "7.0") then [ lld_11 llvm_11 ] else [ binutils-or1k ]);
checkPhase =
''
python -m unittest discover -v artiq.test
TESTDIR=`mktemp -d`
cp --no-preserve=mode,ownership -R $src/artiq/test/lit $TESTDIR
LIBARTIQ_SUPPORT=${libartiq-support}/libartiq_support.so lit -v $TESTDIR/lit
'';
meta = with lib; {
description = "A leading-edge control system for quantum information experiments";
homepage = https://m-labs/artiq;
license = licenses.lgpl3;
maintainers = [ maintainers.sb0 ];
};
}
TESTDIR=`mktemp -d`
cp --no-preserve=mode,ownership -R ${src}/artiq/test/lit $TESTDIR
LIBARTIQ_SUPPORT=${libartiq-support}/libartiq_support.so ${lit}/bin/lit -v $TESTDIR/lit
'';
meta = with stdenv.lib; {
description = "A leading-edge control system for quantum information experiments";
homepage = https://m-labs/artiq;
license = licenses.lgpl3;
maintainers = [ maintainers.sb0 ];
};
})

View File

@ -1,4 +1,4 @@
{ stdenv, lib, buildPackages
{ stdenv, buildPackages
, fetchurl, zlib
, platform, target
}:
@ -27,7 +27,7 @@ stdenv.mkDerivation rec {
`gprof', `nm', `strip', etc.
'';
homepage = http://www.gnu.org/software/binutils/;
license = lib.licenses.gpl3Plus;
license = stdenv.lib.licenses.gpl3Plus;
/* Give binutils a lower priority than gcc-wrapper to prevent a
collision due to the ld/as wrappers/symlinks in the latter. */
priority = "10";

View File

@ -1,6 +1,6 @@
{ stdenv, fetchgit, git, rustc }:
stdenv.mkDerivation rec {
pname = "libartiq-support";
name = "libartiq-support-${version}";
version = import ./artiq-version.nix { inherit stdenv fetchgit git; };
src = import ./artiq-src.nix { inherit fetchgit; };
@ -10,6 +10,6 @@ stdenv.mkDerivation rec {
buildPhase =
''
mkdir $out
rustc $src/artiq/test/libartiq_support/lib.rs --out-dir $out -Cpanic=unwind -g
rustc ${src}/artiq/test/libartiq_support/lib.rs --out-dir $out -Cpanic=unwind -g
'';
}

View File

@ -1,14 +1,14 @@
{ stdenv, lib
{ stdenv
, fetchFromGitHub, runCommand
, perl, groff, cmake, libxml2, python, libffi, valgrind
}:
let
llvm-src = fetchFromGitHub {
rev = "7746fe85489e92e1caffda18b9d7b2ae9e5da1a8";
rev = "527aa86b578da5dfb9cf4510b71f0f46a11249f7";
owner = "m-labs";
repo = "llvm-or1k";
sha256 = "0jqbb3k9r91swsyrdak8fzvs1qi451zy8dqmpqriaxk5g83ny5b7";
sha256 = "0lmcg9xj66pf4mb6racipw67vm8kwm84dl861hyqnywd61kvhrwa";
};
clang-src = fetchFromGitHub {
rev = "9e996136d52ed506ed8f57ef8b13b0f0f735e6a3";
@ -25,12 +25,12 @@ let
'';
in
stdenv.mkDerivation rec {
pname = "llvm-or1k";
version = "6.0.0";
name = "llvm-or1k";
passthru.llvm-src = llvm-src;
src = llvm-clang-src;
version = "6.0.0";
buildInputs = [ perl groff cmake libxml2 python libffi ] ++ lib.optional stdenv.isLinux valgrind;
buildInputs = [ perl groff cmake libxml2 python libffi ] ++ stdenv.lib.optional stdenv.isLinux valgrind;
preBuild = ''
NIX_BUILD_CORES=4
@ -59,8 +59,8 @@ in
meta = {
description = "Collection of modular and reusable compiler and toolchain technologies";
homepage = http://llvm.org/;
license = lib.licenses.bsd3;
maintainers = with lib.maintainers; [ sb0 ];
platforms = lib.platforms.all;
license = stdenv.lib.licenses.bsd3;
maintainers = with stdenv.lib.maintainers; [ sb0 ];
platforms = stdenv.lib.platforms.all;
};
}

View File

@ -1,62 +0,0 @@
diff --git a/ffi/targets.cpp b/ffi/targets.cpp
index 98de259fc..1ce472c20 100644
--- a/ffi/targets.cpp
+++ b/ffi/targets.cpp
@@ -182,7 +182,8 @@ LLVMPY_CreateTargetMachine(LLVMTargetRef T,
const char *RelocModel,
const char *CodeModel,
int PrintMC,
- int JIT)
+ int JIT,
+ const char *ABIName)
{
using namespace llvm;
CodeGenOpt::Level cgol;
@@ -233,6 +234,7 @@ LLVMPY_CreateTargetMachine(LLVMTargetRef T,
TargetOptions opt;
opt.PrintMachineCode = PrintMC;
+ opt.MCOptions.ABIName = ABIName;
bool jit = JIT;
diff --git a/llvmlite/binding/targets.py b/llvmlite/binding/targets.py
index eb53f09f2..a7e6ffdc3 100644
--- a/llvmlite/binding/targets.py
+++ b/llvmlite/binding/targets.py
@@ -218,7 +218,7 @@ def __str__(self):
def create_target_machine(self, cpu='', features='',
opt=2, reloc='default', codemodel='jitdefault',
- printmc=False, jit=False):
+ printmc=False, jit=False, abiname=''):
"""
Create a new TargetMachine for this target and the given options.
@@ -230,6 +230,9 @@ def create_target_machine(self, cpu='', features='',
The `jit` option should be set when the target-machine is to be used
in a JIT engine.
+
+ The `abiname` option specifies the ABI. RISC-V targets with hard-float
+ needs to pass the ABI name to LLVM.
"""
assert 0 <= opt <= 3
assert reloc in RELOC
@@ -249,6 +252,7 @@ def create_target_machine(self, cpu='', features='',
_encode_string(codemodel),
int(printmc),
int(jit),
+ _encode_string(abiname),
)
if tm:
return TargetMachine(tm)
@@ -403,6 +407,8 @@ def has_svml():
c_int,
# JIT
c_int,
+ # ABIName
+ c_char_p,
]
ffi.lib.LLVMPY_CreateTargetMachine.restype = ffi.LLVMTargetMachineRef

View File

@ -1,6 +1,6 @@
{ stdenv, lib, fetchFromGitHub, llvm-or1k, makeWrapper, python3, ncurses, zlib, python3Packages }:
{ stdenv, fetchFromGitHub, llvm-or1k, makeWrapper, python3, ncurses, zlib, python3Packages }:
python3Packages.buildPythonPackage rec {
pname = "llvmlite-artiq";
name = "llvmlite-artiq";
version = "0.23.0.dev";
src = fetchFromGitHub {
rev = "158f9d3a898dbf055ca513d69505df288c681fea";
@ -13,7 +13,7 @@ python3Packages.buildPythonPackage rec {
preBuild = "export LLVM_CONFIG=${llvm-or1k}/bin/llvm-config";
meta = with lib; {
meta = with stdenv.lib; {
description = "A lightweight LLVM python binding for writing JIT compilers";
homepage = "http://llvmlite.pydata.org/";
maintainers = with maintainers; [ sb0 ];

View File

@ -1,185 +0,0 @@
diff --git a/llvmlite/ir/builder.py b/llvmlite/ir/builder.py
index f18a8d8bd..b4958770e 100644
--- a/llvmlite/ir/builder.py
+++ b/llvmlite/ir/builder.py
@@ -872,14 +872,14 @@ def resume(self, landingpad):
# Call APIs
def call(self, fn, args, name='', cconv=None, tail=False, fastmath=(),
- attrs=()):
+ attrs=(), arg_attrs=None):
"""
Call function *fn* with *args*:
name = fn(args...)
"""
inst = instructions.CallInstr(self.block, fn, args, name=name,
cconv=cconv, tail=tail, fastmath=fastmath,
- attrs=attrs)
+ attrs=attrs, arg_attrs=arg_attrs)
self._insert(inst)
return inst
@@ -908,9 +908,11 @@ def store_reg(self, value, reg_type, reg_name, name=''):
return self.asm(ftype, "", "{%s}" % reg_name, [value], True, name)
def invoke(self, fn, args, normal_to, unwind_to,
- name='', cconv=None, tail=False):
+ name='', cconv=None, fastmath=(), attrs=(), arg_attrs=None):
inst = instructions.InvokeInstr(self.block, fn, args, normal_to,
- unwind_to, name=name, cconv=cconv)
+ unwind_to, name=name, cconv=cconv,
+ fastmath=fastmath, attrs=attrs,
+ arg_attrs=arg_attrs)
self._set_terminator(inst)
return inst
diff --git a/llvmlite/ir/instructions.py b/llvmlite/ir/instructions.py
index 7e82ee032..f337c1586 100644
--- a/llvmlite/ir/instructions.py
+++ b/llvmlite/ir/instructions.py
@@ -5,7 +5,7 @@
from llvmlite.ir import types
from llvmlite.ir.values import (Block, Function, Value, NamedValue, Constant,
MetaDataArgument, MetaDataString, AttributeSet,
- Undefined)
+ Undefined, ArgumentAttributes)
from llvmlite.ir._utils import _HasMetadata
@@ -63,13 +63,20 @@ class FastMathFlags(AttributeSet):
class CallInstr(Instruction):
def __init__(self, parent, func, args, name='', cconv=None, tail=False,
- fastmath=(), attrs=()):
+ fastmath=(), attrs=(), arg_attrs=None):
self.cconv = (func.calling_convention
if cconv is None and isinstance(func, Function)
else cconv)
self.tail = tail
self.fastmath = FastMathFlags(fastmath)
self.attributes = CallInstrAttributes(attrs)
+ self.arg_attributes = {}
+ if arg_attrs:
+ for idx, attrs in arg_attrs.items():
+ if not (0 <= idx < len(args)):
+ raise ValueError("Invalid argument index {}"
+ .format(idx))
+ self.arg_attributes[idx] = ArgumentAttributes(attrs)
# Fix and validate arguments
args = list(args)
@@ -111,8 +118,13 @@ def called_function(self):
return self.callee
def _descr(self, buf, add_metadata):
- args = ', '.join(['{0} {1}'.format(a.type, a.get_reference())
- for a in self.args])
+ def descr_arg(i, a):
+ if i in self.arg_attributes:
+ attrs = ' '.join(self.arg_attributes[i]._to_list()) + ' '
+ else:
+ attrs = ''
+ return '{0} {1}{2}'.format(a.type, attrs, a.get_reference())
+ args = ', '.join([descr_arg(i, a) for i, a in enumerate(self.args)])
fnty = self.callee.function_type
# Only print function type if variable-argument
@@ -142,10 +154,12 @@ def descr(self, buf):
class InvokeInstr(CallInstr):
def __init__(self, parent, func, args, normal_to, unwind_to, name='',
- cconv=None):
+ cconv=None, fastmath=(), attrs=(), arg_attrs=None):
assert isinstance(normal_to, Block)
assert isinstance(unwind_to, Block)
- super(InvokeInstr, self).__init__(parent, func, args, name, cconv)
+ super(InvokeInstr, self).__init__(parent, func, args, name, cconv,
+ tail=False, fastmath=fastmath,
+ attrs=attrs, arg_attrs=arg_attrs)
self.opname = "invoke"
self.normal_to = normal_to
self.unwind_to = unwind_to
diff --git a/llvmlite/tests/test_ir.py b/llvmlite/tests/test_ir.py
index e97e528ac..ab5864719 100644
--- a/llvmlite/tests/test_ir.py
+++ b/llvmlite/tests/test_ir.py
@@ -1181,6 +1181,39 @@ def test_call_metadata(self):
call void @"llvm.dbg.declare"(metadata i32* %"a", metadata !0, metadata !0)
""") # noqa E501
+ def test_call_attributes(self):
+ block = self.block(name='my_block')
+ builder = ir.IRBuilder(block)
+ fun_ty = ir.FunctionType(
+ ir.VoidType(), (int32.as_pointer(), int32, int32.as_pointer()))
+ fun = ir.Function(builder.function.module, fun_ty, 'fun')
+ fun.args[0].add_attribute('sret')
+ retval = builder.alloca(int32, name='retval')
+ other = builder.alloca(int32, name='other')
+ builder.call(
+ fun,
+ (retval, ir.Constant(int32, 42), other),
+ arg_attrs={
+ 0: ('sret', 'noalias'),
+ 2: 'noalias'
+ }
+ )
+ self.check_block(block, """\
+ my_block:
+ %"retval" = alloca i32
+ %"other" = alloca i32
+ call void @"fun"(i32* noalias sret %"retval", i32 42, i32* noalias %"other")
+ """) # noqa E501
+
+ def test_invalid_call_attributes(self):
+ block = self.block()
+ builder = ir.IRBuilder(block)
+ fun_ty = ir.FunctionType(ir.VoidType(), ())
+ fun = ir.Function(builder.function.module, fun_ty, 'fun')
+ with self.assertRaises(ValueError):
+ # The function has no arguments, so this should fail.
+ builder.call(fun, (), arg_attrs={0: 'sret'})
+
def test_invoke(self):
block = self.block(name='my_block')
builder = ir.IRBuilder(block)
@@ -1196,6 +1229,39 @@ def test_invoke(self):
to label %"normal" unwind label %"unwind"
""")
+ def test_invoke_attributes(self):
+ block = self.block(name='my_block')
+ builder = ir.IRBuilder(block)
+ fun_ty = ir.FunctionType(
+ ir.VoidType(), (int32.as_pointer(), int32, int32.as_pointer()))
+ fun = ir.Function(builder.function.module, fun_ty, 'fun')
+ fun.calling_convention = "fastcc"
+ fun.args[0].add_attribute('sret')
+ retval = builder.alloca(int32, name='retval')
+ other = builder.alloca(int32, name='other')
+ bb_normal = builder.function.append_basic_block(name='normal')
+ bb_unwind = builder.function.append_basic_block(name='unwind')
+ builder.invoke(
+ fun,
+ (retval, ir.Constant(int32, 42), other),
+ bb_normal,
+ bb_unwind,
+ cconv='fastcc',
+ fastmath='fast',
+ attrs='noinline',
+ arg_attrs={
+ 0: ('sret', 'noalias'),
+ 2: 'noalias'
+ }
+ )
+ self.check_block(block, """\
+ my_block:
+ %"retval" = alloca i32
+ %"other" = alloca i32
+ invoke fast fastcc void @"fun"(i32* noalias sret %"retval", i32 42, i32* noalias %"other") noinline
+ to label %"normal" unwind label %"unwind"
+ """) # noqa E501
+
def test_landingpad(self):
block = self.block(name='my_block')
builder = ir.IRBuilder(block)

View File

@ -1,26 +0,0 @@
{ python3Packages, llvm_11 }:
python3Packages.buildPythonPackage rec {
pname = "llvmlite";
version = "0.37.0-artiq";
src = python3Packages.fetchPypi {
inherit pname;
version = "0.37.0";
sha256 = "sha256-Y5K4cM0BjsDGRda7uRjWqg7sqMYmdLqu4whi1raGWxU=";
};
# https://github.com/numba/llvmlite/pull/702
# https://github.com/numba/llvmlite/pull/775
patches = [ ./llvmlite-callsite.diff ./llvmlite-abiname.diff ];
nativeBuildInputs = [ llvm_11 ];
# Disable static linking
# https://github.com/numba/llvmlite/issues/93
postPatch = ''
substituteInPlace ffi/Makefile.linux --replace "-static-libstdc++" ""
substituteInPlace llvmlite/tests/test_binding.py --replace "test_linux" "nope"
'';
# Set directory containing llvm-config binary
preConfigure = ''
export LLVM_CONFIG=${llvm_11.dev}/bin/llvm-config
'';
doCheck = false; # FIXME
}

View File

@ -1,830 +0,0 @@
diff --git a/doc/openocd.texi b/doc/openocd.texi
index 138922d08..ad9f10d2e 100644
--- a/doc/openocd.texi
+++ b/doc/openocd.texi
@@ -5565,6 +5565,10 @@ will not work. These include all @command{*_image} and
functionality is available through the @command{flash write_bank},
@command{flash read_bank}, and @command{flash verify_bank} commands.
+According to device size, 1- to 4-byte addresses are sent. However, some
+flash chips additionally have to be switched to 4-byte addresses by an extra
+command, see below.
+
@itemize
@item @var{ir} ... is loaded into the JTAG IR to map the flash as the JTAG DR.
For the bitstreams generated from @file{xilinx_bscan_spi.py} this is the
@@ -5577,6 +5581,29 @@ set _XILINX_USER1 0x02
flash bank $_FLASHNAME spi 0x0 0 0 0 \
$_TARGETNAME $_XILINX_USER1
@end example
+
+@deffn Command {jtagspi set} bank_id name total_size page_size read_cmd unused pprg_cmd mass_erase_cmd sector_size sector_erase_cmd
+Sets flash parameters: @var{name} human readable string, @var{total_size}
+size in bytes, @var{page_size} is write page size. @var{read_cmd} and @var{pprg_cmd}
+are commands for read and page program, respectively. @var{mass_erase_cmd},
+@var{sector_size} and @var{sector_erase_cmd} are optional.
+@example
+jtagspi set 0 w25q128 0x1000000 0x100 0x03 0 0x02 0xC7 0x10000 0xD8
+@end example
+@end deffn
+
+@deffn Command {jtagspi cmd} bank_id resp_num cmd_byte ...
+Sends command @var{cmd_byte} and at most 20 following bytes and reads
+@var{resp_num} bytes afterwards. E.g. for 'Enter 4-byte address mode'
+@example
+jtagspi cmd 0 0 0xB7
+@end example
+@end deffn
+
+@deffn Command {jtagspi always_4byte} bank_id [ on | off ]
+Some devices use 4-byte addresses for all commands except the legacy 0x03 read
+regardless of device size. This command controls the corresponding hack.
+@end deffn
@end deffn
@deffn {Flash Driver} {xcf}
diff --git a/src/flash/nor/jtagspi.c b/src/flash/nor/jtagspi.c
index dc49fda61..e9a643d12 100644
--- a/src/flash/nor/jtagspi.c
+++ b/src/flash/nor/jtagspi.c
@@ -29,9 +29,12 @@
struct jtagspi_flash_bank {
struct jtag_tap *tap;
- const struct flash_device *dev;
+ struct flash_device dev;
+ char devname[32];
bool probed;
+ bool always_4byte; /* use always 4-byte address except for basic read 0x03 */
uint32_t ir;
+ unsigned int addr_len; /* address length in bytes */
};
FLASH_BANK_COMMAND_HANDLER(jtagspi_flash_bank_command)
@@ -42,10 +45,11 @@ FLASH_BANK_COMMAND_HANDLER(jtagspi_flash_bank_command)
return ERROR_COMMAND_SYNTAX_ERROR;
info = malloc(sizeof(struct jtagspi_flash_bank));
- if (!info) {
+ if (info == NULL) {
LOG_ERROR("no memory for flash bank info");
return ERROR_FAIL;
}
+ bank->sectors = NULL;
bank->driver_priv = info;
info->tap = NULL;
@@ -69,70 +73,59 @@ static void jtagspi_set_ir(struct flash_bank *bank)
jtag_add_ir_scan(info->tap, &field, TAP_IDLE);
}
-static void flip_u8(uint8_t *in, uint8_t *out, int len)
+static void flip_u8(const uint8_t *in, uint8_t *out, unsigned int len)
{
- for (int i = 0; i < len; i++)
+ for (unsigned int i = 0; i < len; i++)
out[i] = flip_u32(in[i], 8);
}
static int jtagspi_cmd(struct flash_bank *bank, uint8_t cmd,
- uint32_t *addr, uint8_t *data, int len)
+ uint8_t *write_buffer, unsigned int write_len, uint8_t *data_buffer, int data_len)
{
- struct jtagspi_flash_bank *info = bank->driver_priv;
+ assert(write_buffer || write_len == 0);
+ assert(data_buffer || data_len == 0);
+
struct scan_field fields[6];
- uint8_t marker = 1;
- uint8_t xfer_bits_buf[4];
- uint8_t addr_buf[3];
- uint8_t *data_buf;
- uint32_t xfer_bits;
- int is_read, lenb, n;
- /* LOG_DEBUG("cmd=0x%02x len=%i", cmd, len); */
+ LOG_DEBUG("cmd=0x%02x write_len=%d data_len=%d", cmd, write_len, data_len);
- is_read = (len < 0);
+ /* negative data_len == read operation */
+ const bool is_read = (data_len < 0);
if (is_read)
- len = -len;
-
- n = 0;
+ data_len = -data_len;
+ int n = 0;
+ const uint8_t marker = 1;
fields[n].num_bits = 1;
fields[n].out_value = &marker;
fields[n].in_value = NULL;
n++;
- xfer_bits = 8 + len - 1;
- /* cmd + read/write - 1 due to the counter implementation */
- if (addr)
- xfer_bits += 24;
- h_u32_to_be(xfer_bits_buf, xfer_bits);
- flip_u8(xfer_bits_buf, xfer_bits_buf, 4);
- fields[n].num_bits = 32;
- fields[n].out_value = xfer_bits_buf;
+ /* transfer length = cmd + address + read/write,
+ * -1 due to the counter implementation */
+ uint8_t xfer_bits[4];
+ h_u32_to_be(xfer_bits, ((sizeof(cmd) + write_len + data_len) * CHAR_BIT) - 1);
+ flip_u8(xfer_bits, xfer_bits, sizeof(xfer_bits));
+ fields[n].num_bits = sizeof(xfer_bits) * CHAR_BIT;
+ fields[n].out_value = xfer_bits;
fields[n].in_value = NULL;
n++;
- cmd = flip_u32(cmd, 8);
- fields[n].num_bits = 8;
+ flip_u8(&cmd, &cmd, sizeof(cmd));
+ fields[n].num_bits = sizeof(cmd) * CHAR_BIT;
fields[n].out_value = &cmd;
fields[n].in_value = NULL;
n++;
- if (addr) {
- h_u24_to_be(addr_buf, *addr);
- flip_u8(addr_buf, addr_buf, 3);
- fields[n].num_bits = 24;
- fields[n].out_value = addr_buf;
+ if (write_len) {
+ flip_u8(write_buffer, write_buffer, write_len);
+ fields[n].num_bits = write_len * CHAR_BIT;
+ fields[n].out_value = write_buffer;
fields[n].in_value = NULL;
n++;
}
- lenb = DIV_ROUND_UP(len, 8);
- data_buf = malloc(lenb);
- if (lenb > 0) {
- if (!data_buf) {
- LOG_ERROR("no memory for spi buffer");
- return ERROR_FAIL;
- }
+ if (data_len > 0) {
if (is_read) {
fields[n].num_bits = jtag_tap_count_enabled();
fields[n].out_value = NULL;
@@ -140,78 +133,313 @@ static int jtagspi_cmd(struct flash_bank *bank, uint8_t cmd,
n++;
fields[n].out_value = NULL;
- fields[n].in_value = data_buf;
+ fields[n].in_value = data_buffer;
} else {
- flip_u8(data, data_buf, lenb);
- fields[n].out_value = data_buf;
+ flip_u8(data_buffer, data_buffer, data_len);
+ fields[n].out_value = data_buffer;
fields[n].in_value = NULL;
}
- fields[n].num_bits = len;
+ fields[n].num_bits = data_len * CHAR_BIT;
n++;
}
jtagspi_set_ir(bank);
/* passing from an IR scan to SHIFT-DR clears BYPASS registers */
+ struct jtagspi_flash_bank *info = bank->driver_priv;
jtag_add_dr_scan(info->tap, n, fields, TAP_IDLE);
int retval = jtag_execute_queue();
if (is_read)
- flip_u8(data_buf, data, lenb);
- free(data_buf);
+ flip_u8(data_buffer, data_buffer, data_len);
return retval;
}
+COMMAND_HANDLER(jtagspi_handle_set)
+{
+ struct flash_bank *bank = NULL;
+ struct jtagspi_flash_bank *info = NULL;
+ struct flash_sector *sectors = NULL;
+ uint32_t temp;
+ unsigned int index = 1;
+ int retval;
+
+ LOG_DEBUG("%s", __func__);
+
+ /* there are 6 mandatory arguments:
+ * devname, size_in_bytes, pagesize, read_cmd, unused, pprog_cmd */
+ if (index + 6 > CMD_ARGC) {
+ command_print(CMD, "jtagspi: not enough arguments");
+ return ERROR_COMMAND_SYNTAX_ERROR;
+ }
+
+ retval = CALL_COMMAND_HANDLER(flash_command_get_bank, 0, &bank);
+ if (ERROR_OK != retval)
+ return retval;
+ info = bank->driver_priv;
+
+ /* invalidate all old info */
+ if (info->probed) {
+ bank->size = 0;
+ bank->num_sectors = 0;
+ if (bank->sectors)
+ free(bank->sectors);
+ bank->sectors = NULL;
+ info->always_4byte = false;
+ info->probed = false;
+ }
+ memset(&info->dev, 0, sizeof(info->dev));
+
+ strncpy(info->devname, CMD_ARGV[index++], sizeof(info->devname) - 1);
+ info->devname[sizeof(info->devname) - 1] = '\0';
+
+ COMMAND_PARSE_NUMBER(u32, CMD_ARGV[index++], temp);
+ info->dev.size_in_bytes = temp;
+ if ((temp & (temp - 1)) || (temp < (1UL << 8))) {
+ command_print(CMD, "jtagspi: device size must be 2^n with n >= 8");
+ return ERROR_COMMAND_SYNTAX_ERROR;
+ }
+
+ COMMAND_PARSE_NUMBER(u32, CMD_ARGV[index++], temp);
+ info->dev.pagesize = temp;
+ if (info->dev.pagesize == 0)
+ info->dev.pagesize = SPIFLASH_DEF_PAGESIZE;
+ if ((temp & (temp - 1)) || (temp > info->dev.size_in_bytes)) {
+ command_print(CMD, "jtagspi: page size must be 2^n and <= device size");
+ return ERROR_COMMAND_SYNTAX_ERROR;
+ }
+
+ COMMAND_PARSE_NUMBER(u8, CMD_ARGV[index++], info->dev.read_cmd);
+ if ((info->dev.read_cmd != 0x03) &&
+ (info->dev.read_cmd != 0x13)) {
+ command_print(CMD, "jtagspi: only 0x03/0x13 READ allowed");
+ return ERROR_COMMAND_SYNTAX_ERROR;
+ }
+
+ COMMAND_PARSE_NUMBER(u8, CMD_ARGV[index++], info->dev.qread_cmd);
+
+ COMMAND_PARSE_NUMBER(u8, CMD_ARGV[index++], info->dev.pprog_cmd);
+ if ((info->dev.pprog_cmd != 0x02) &&
+ (info->dev.pprog_cmd != 0x12)) {
+ command_print(CMD, "jtagspi: only 0x02/0x12 PPRG allowed");
+ return ERROR_COMMAND_SYNTAX_ERROR;
+ }
+
+ /* remaining params are optional */
+ if (index < CMD_ARGC)
+ COMMAND_PARSE_NUMBER(u8, CMD_ARGV[index++], info->dev.chip_erase_cmd);
+ else
+ info->dev.chip_erase_cmd = 0x00;
+
+ if (index < CMD_ARGC) {
+ COMMAND_PARSE_NUMBER(u32, CMD_ARGV[index++], temp);
+ info->dev.sectorsize = temp;
+ if ((info->dev.sectorsize > info->dev.size_in_bytes) ||
+ (info->dev.sectorsize < info->dev.pagesize) || (temp & (temp - 1))) {
+ command_print(CMD, "jtagspi: sector size must be 2^n and <= device size");
+ return ERROR_COMMAND_SYNTAX_ERROR;
+ }
+
+ if (index < CMD_ARGC)
+ COMMAND_PARSE_NUMBER(u8, CMD_ARGV[index++], info->dev.erase_cmd);
+ else {
+ command_print(CMD, "jtagspi: erase command missing");
+ return ERROR_COMMAND_SYNTAX_ERROR;
+ }
+ } else {
+ /* no sector size / sector erase cmd given, treat whole bank as a single sector */
+ info->dev.erase_cmd = 0x00;
+ info->dev.sectorsize = info->dev.size_in_bytes;
+ }
+
+ if (index < CMD_ARGC) {
+ command_print(CMD, "jtagspi: extra arguments");
+ return ERROR_COMMAND_SYNTAX_ERROR;
+ }
+
+ /* set correct size value */
+ bank->size = info->dev.size_in_bytes;
+
+ /* calculate address length in bytes */
+ if (bank->size <= (1UL << 8))
+ info->addr_len = 1;
+ else if (bank->size <= (1UL << 16))
+ info->addr_len = 2;
+ else if (bank->size <= (1UL << 24))
+ info->addr_len = 3;
+ else {
+ info->addr_len = 4;
+ LOG_WARNING("4-byte addresses needed, might need extra command to enable");
+ }
+
+ /* create and fill sectors array */
+ bank->num_sectors =
+ info->dev.size_in_bytes / info->dev.sectorsize;
+ sectors = malloc(sizeof(struct flash_sector) * bank->num_sectors);
+ if (sectors == NULL) {
+ LOG_ERROR("Not enough memory");
+ return ERROR_FAIL;
+ }
+
+ for (unsigned int sector = 0; sector < bank->num_sectors; sector++) {
+ sectors[sector].offset = sector * (info->dev.sectorsize);
+ sectors[sector].size = info->dev.sectorsize;
+ sectors[sector].is_erased = -1;
+ sectors[sector].is_protected = 0;
+ }
+
+ bank->sectors = sectors;
+ info->dev.name = info->devname;
+ if (info->dev.size_in_bytes / 4096)
+ LOG_INFO("flash \'%s\' id = unknown\nflash size = %" PRIu32 " kbytes",
+ info->dev.name, info->dev.size_in_bytes / 1024);
+ else
+ LOG_INFO("flash \'%s\' id = unknown\nflash size = %" PRIu32 " bytes",
+ info->dev.name, info->dev.size_in_bytes);
+ info->probed = true;
+
+ return ERROR_OK;
+}
+
+COMMAND_HANDLER(jtagspi_handle_cmd)
+{
+ struct flash_bank *bank;
+ unsigned int index = 1;
+ const int max = 21;
+ uint8_t num_write, num_read, write_buffer[max], read_buffer[1 << CHAR_BIT];
+ uint8_t data, *ptr;
+ char temp[4], output[(2 + max + (1 << CHAR_BIT)) * 3 + 8];
+ int retval;
+
+ LOG_DEBUG("%s", __func__);
+
+ if (CMD_ARGC < 3) {
+ command_print(CMD, "jtagspi: not enough arguments");
+ return ERROR_COMMAND_SYNTAX_ERROR;
+ }
+
+ num_write = CMD_ARGC - 2;
+ if (num_write > max) {
+ LOG_ERROR("at most %d bytes may be send", max);
+ return ERROR_COMMAND_SYNTAX_ERROR;
+ }
+
+ retval = CALL_COMMAND_HANDLER(flash_command_get_bank, 0, &bank);
+ if (ERROR_OK != retval)
+ return retval;
+
+ COMMAND_PARSE_NUMBER(u8, CMD_ARGV[index++], num_read);
+
+ snprintf(output, sizeof(output), "spi: ");
+ for (ptr = &write_buffer[0] ; index < CMD_ARGC; index++) {
+ COMMAND_PARSE_NUMBER(u8, CMD_ARGV[index], data);
+ *ptr++ = data;
+ snprintf(temp, sizeof(temp), "%02" PRIx8 " ", data);
+ strncat(output, temp, sizeof(output) - strlen(output) - 1);
+ }
+ strncat(output, "-> ", sizeof(output) - strlen(output) - 1);
+
+ /* process command */
+ ptr = &read_buffer[0];
+ jtagspi_cmd(bank, write_buffer[0], &write_buffer[1], num_write - 1, ptr, -num_read);
+ if (retval != ERROR_OK)
+ return retval;
+
+ for ( ; num_read > 0; num_read--) {
+ snprintf(temp, sizeof(temp), "%02" PRIx8 " ", *ptr++);
+ strncat(output, temp, sizeof(output) - strlen(output) - 1);
+ }
+ command_print(CMD, "%s", output);
+
+ return ERROR_OK;
+}
+
+COMMAND_HANDLER(jtagspi_handle_always_4byte)
+{
+ struct flash_bank *bank;
+ struct jtagspi_flash_bank *jtagspi_info;
+ int retval;
+
+ LOG_DEBUG("%s", __func__);
+
+ if ((CMD_ARGC != 1) && (CMD_ARGC != 2))
+ return ERROR_COMMAND_SYNTAX_ERROR;
+
+ retval = CALL_COMMAND_HANDLER(flash_command_get_bank, 0, &bank);
+ if (ERROR_OK != retval)
+ return retval;
+
+ jtagspi_info = bank->driver_priv;
+
+ if (CMD_ARGC == 1)
+ command_print(CMD, jtagspi_info->always_4byte ? "on" : "off");
+ else
+ COMMAND_PARSE_BOOL(CMD_ARGV[1], jtagspi_info->always_4byte, "on", "off");
+
+ return ERROR_OK;
+}
+
static int jtagspi_probe(struct flash_bank *bank)
{
struct jtagspi_flash_bank *info = bank->driver_priv;
struct flash_sector *sectors;
+ const struct flash_device *p;
uint8_t in_buf[3];
uint32_t id, sectorsize;
- if (info->probed)
+ if (bank->sectors) {
free(bank->sectors);
+ bank->sectors = NULL;
+ }
info->probed = false;
- if (!bank->target->tap) {
+ if (bank->target->tap == NULL) {
LOG_ERROR("Target has no JTAG tap");
return ERROR_FAIL;
}
info->tap = bank->target->tap;
- jtagspi_cmd(bank, SPIFLASH_READ_ID, NULL, in_buf, -24);
+ jtagspi_cmd(bank, SPIFLASH_READ_ID, NULL, 0, in_buf, -3);
/* the table in spi.c has the manufacturer byte (first) as the lsb */
id = le_to_h_u24(in_buf);
- info->dev = NULL;
- for (const struct flash_device *p = flash_devices; p->name ; p++)
+ memset(&info->dev, 0, sizeof(info->dev));
+ for (p = flash_devices; p->name ; p++)
if (p->device_id == id) {
- info->dev = p;
+ memcpy(&info->dev, p, sizeof(info->dev));
break;
}
- if (!(info->dev)) {
- LOG_ERROR("Unknown flash device (ID 0x%08" PRIx32 ")", id);
+ if (!(p->name)) {
+ LOG_ERROR("Unknown flash device (ID 0x%06" PRIx32 ")", id & 0xFFFFFF);
return ERROR_FAIL;
}
- LOG_INFO("Found flash device \'%s\' (ID 0x%08" PRIx32 ")",
- info->dev->name, info->dev->device_id);
+ LOG_INFO("Found flash device \'%s\' (ID 0x%06" PRIx32 ")",
+ info->dev.name, info->dev.device_id & 0xFFFFFF);
/* Set correct size value */
- bank->size = info->dev->size_in_bytes;
- if (bank->size <= (1UL << 16))
- LOG_WARNING("device needs 2-byte addresses - not implemented");
- if (bank->size > (1UL << 24))
- LOG_WARNING("device needs paging or 4-byte addresses - not implemented");
+ bank->size = info->dev.size_in_bytes;
+
+ /* calculate address length in bytes */
+ if (bank->size <= (1UL << 8))
+ info->addr_len = 1;
+ else if (bank->size <= (1UL << 16))
+ info->addr_len = 2;
+ else if (bank->size <= (1UL << 24))
+ info->addr_len = 3;
+ else {
+ info->addr_len = 4;
+ LOG_WARNING("4-byte addresses needed, might need extra command to enable");
+ }
/* if no sectors, treat whole bank as single sector */
- sectorsize = info->dev->sectorsize ?
- info->dev->sectorsize : info->dev->size_in_bytes;
+ sectorsize = info->dev.sectorsize ?
+ info->dev.sectorsize : info->dev.size_in_bytes;
/* create and fill sectors array */
- bank->num_sectors = info->dev->size_in_bytes / sectorsize;
+ bank->num_sectors = info->dev.size_in_bytes / sectorsize;
sectors = malloc(sizeof(struct flash_sector) * bank->num_sectors);
- if (!sectors) {
+ if (sectors == NULL) {
LOG_ERROR("not enough memory");
return ERROR_FAIL;
}
@@ -228,27 +456,35 @@ static int jtagspi_probe(struct flash_bank *bank)
return ERROR_OK;
}
+static int jtagspi_auto_probe(struct flash_bank *bank)
+{
+ struct jtagspi_flash_bank *info = bank->driver_priv;
+
+ if (info->probed)
+ return ERROR_OK;
+ return jtagspi_probe(bank);
+}
+
static int jtagspi_read_status(struct flash_bank *bank, uint32_t *status)
{
uint8_t buf;
- int err = jtagspi_cmd(bank, SPIFLASH_READ_STATUS, NULL, &buf, -8);
+ int err = jtagspi_cmd(bank, SPIFLASH_READ_STATUS, NULL, 0, &buf, -1);
if (err == ERROR_OK) {
*status = buf;
- /* LOG_DEBUG("status=0x%08" PRIx32, *status); */
+ LOG_DEBUG("status=0x%02" PRIx8, *status);
}
-
return err;
}
static int jtagspi_wait(struct flash_bank *bank, int timeout_ms)
{
- uint32_t status;
int64_t t0 = timeval_ms();
int64_t dt;
do {
dt = timeval_ms() - t0;
+ uint32_t status = (uint32_t)-1;
int retval = jtagspi_read_status(bank, &status);
if (retval != ERROR_OK)
return retval;
@@ -266,16 +502,15 @@ static int jtagspi_wait(struct flash_bank *bank, int timeout_ms)
static int jtagspi_write_enable(struct flash_bank *bank)
{
- uint32_t status;
-
- jtagspi_cmd(bank, SPIFLASH_WRITE_ENABLE, NULL, NULL, 0);
+ jtagspi_cmd(bank, SPIFLASH_WRITE_ENABLE, NULL, 0, NULL, 0);
+ uint32_t status = (uint32_t)-1;
int retval = jtagspi_read_status(bank, &status);
if (retval != ERROR_OK)
return retval;
if ((status & SPIFLASH_WE_BIT) == 0) {
- LOG_ERROR("Cannot enable write to flash. Status=0x%08" PRIx32, status);
+ LOG_ERROR("Cannot enable write to flash. Status=0x%02" PRIx8, status);
return ERROR_FAIL;
}
return ERROR_OK;
@@ -287,28 +522,51 @@ static int jtagspi_bulk_erase(struct flash_bank *bank)
int retval;
int64_t t0 = timeval_ms();
- if (info->dev->chip_erase_cmd == 0x00)
+ if (info->dev.chip_erase_cmd == 0x00)
return ERROR_FLASH_OPER_UNSUPPORTED;
retval = jtagspi_write_enable(bank);
if (retval != ERROR_OK)
return retval;
- jtagspi_cmd(bank, info->dev->chip_erase_cmd, NULL, NULL, 0);
- retval = jtagspi_wait(bank, bank->num_sectors*JTAGSPI_MAX_TIMEOUT);
+
+ jtagspi_cmd(bank, info->dev.chip_erase_cmd, NULL, 0, NULL, 0);
+ if (retval != ERROR_OK)
+ return retval;
+
+ retval = jtagspi_wait(bank, bank->num_sectors * JTAGSPI_MAX_TIMEOUT);
LOG_INFO("took %" PRId64 " ms", timeval_ms() - t0);
return retval;
}
+static uint8_t *fill_addr(uint32_t addr, unsigned int addr_len, uint8_t *buffer)
+{
+ for (buffer += addr_len; addr_len > 0; --addr_len) {
+ *--buffer = addr;
+ addr >>= 8;
+ }
+
+ return buffer;
+}
+
static int jtagspi_sector_erase(struct flash_bank *bank, unsigned int sector)
{
struct jtagspi_flash_bank *info = bank->driver_priv;
int retval;
+ uint8_t addr[sizeof(uint32_t)];
int64_t t0 = timeval_ms();
retval = jtagspi_write_enable(bank);
if (retval != ERROR_OK)
return retval;
- jtagspi_cmd(bank, info->dev->erase_cmd, &bank->sectors[sector].offset, NULL, 0);
+
+ /* ATXP032/064/128 use always 4-byte addresses except for 0x03 read */
+ unsigned int addr_len = info->always_4byte ? 4 : info->addr_len;
+
+ retval = jtagspi_cmd(bank, info->dev.erase_cmd, fill_addr(bank->sectors[sector].offset, addr_len, addr),
+ addr_len, NULL, 0);
+ if (retval != ERROR_OK)
+ return retval;
+
retval = jtagspi_wait(bank, JTAGSPI_MAX_TIMEOUT);
LOG_INFO("sector %u took %" PRId64 " ms", sector, timeval_ms() - t0);
return retval;
@@ -339,8 +597,9 @@ static int jtagspi_erase(struct flash_bank *bank, unsigned int first,
}
}
- if (first == 0 && last == (bank->num_sectors - 1)
- && info->dev->chip_erase_cmd != info->dev->erase_cmd) {
+ if (first == 0 && last == (bank->num_sectors - 1) &&
+ info->dev.chip_erase_cmd != 0x00 &&
+ info->dev.chip_erase_cmd != info->dev.erase_cmd) {
LOG_DEBUG("Trying bulk erase.");
retval = jtagspi_bulk_erase(bank);
if (retval == ERROR_OK)
@@ -349,7 +608,7 @@ static int jtagspi_erase(struct flash_bank *bank, unsigned int first,
LOG_WARNING("Bulk flash erase failed. Falling back to sector erase.");
}
- if (info->dev->erase_cmd == 0x00)
+ if (info->dev.erase_cmd == 0x00)
return ERROR_FLASH_OPER_UNSUPPORTED;
for (unsigned int sector = first; sector <= last; sector++) {
@@ -374,49 +633,93 @@ static int jtagspi_protect(struct flash_bank *bank, int set, unsigned int first,
static int jtagspi_read(struct flash_bank *bank, uint8_t *buffer, uint32_t offset, uint32_t count)
{
struct jtagspi_flash_bank *info = bank->driver_priv;
+ uint32_t pagesize, currsize;
+ uint8_t addr[sizeof(uint32_t)];
+ int retval;
if (!(info->probed)) {
- LOG_ERROR("Flash bank not yet probed.");
+ LOG_ERROR("Flash bank not probed.");
return ERROR_FLASH_BANK_NOT_PROBED;
}
- jtagspi_cmd(bank, SPIFLASH_READ, &offset, buffer, -count*8);
+ /* if no sectorsize, use reasonable default */
+ pagesize = info->dev.sectorsize ? info->dev.sectorsize : info->dev.pagesize;
+ if (pagesize == 0)
+ pagesize = (info->dev.size_in_bytes <= SPIFLASH_DEF_PAGESIZE) ?
+ info->dev.size_in_bytes : SPIFLASH_DEF_PAGESIZE;
+
+ /* ATXP032/064/128 use always 4-byte addresses except for 0x03 read */
+ unsigned int addr_len = ((info->dev.read_cmd != 0x03) && info->always_4byte) ? 4 : info->addr_len;
+
+ while (count > 0) {
+ /* length up to end of current page */
+ currsize = ((offset + pagesize) & ~(pagesize - 1)) - offset;
+ /* but no more than remaining size */
+ currsize = (count < currsize) ? count : currsize;
+
+ retval = jtagspi_cmd(bank, info->dev.read_cmd, fill_addr(offset, addr_len, addr),
+ addr_len, buffer, -currsize);
+ if (retval != ERROR_OK) {
+ LOG_ERROR("page read error");
+ return retval;
+ }
+ LOG_DEBUG("read page at 0x%08" PRIx32, offset);
+ offset += currsize;
+ buffer += currsize;
+ count -= currsize;
+ }
return ERROR_OK;
}
static int jtagspi_page_write(struct flash_bank *bank, const uint8_t *buffer, uint32_t offset, uint32_t count)
{
+ struct jtagspi_flash_bank *info = bank->driver_priv;
+ uint8_t addr[sizeof(uint32_t)];
int retval;
retval = jtagspi_write_enable(bank);
if (retval != ERROR_OK)
return retval;
- jtagspi_cmd(bank, SPIFLASH_PAGE_PROGRAM, &offset, (uint8_t *) buffer, count*8);
+
+ /* ATXP032/064/128 use always 4-byte addresses except for 0x03 read */
+ unsigned int addr_len = ((info->dev.read_cmd != 0x03) && info->always_4byte) ? 4 : info->addr_len;
+
+ retval = jtagspi_cmd(bank, info->dev.pprog_cmd, fill_addr(offset, addr_len, addr),
+ addr_len, (uint8_t *) buffer, count);
+ if (retval != ERROR_OK)
+ return retval;
return jtagspi_wait(bank, JTAGSPI_MAX_TIMEOUT);
}
static int jtagspi_write(struct flash_bank *bank, const uint8_t *buffer, uint32_t offset, uint32_t count)
{
struct jtagspi_flash_bank *info = bank->driver_priv;
+ uint32_t pagesize, currsize;
int retval;
- uint32_t n, pagesize;
if (!(info->probed)) {
- LOG_ERROR("Flash bank not yet probed.");
+ LOG_ERROR("Flash bank not probed.");
return ERROR_FLASH_BANK_NOT_PROBED;
}
/* if no write pagesize, use reasonable default */
- pagesize = info->dev->pagesize ? info->dev->pagesize : SPIFLASH_DEF_PAGESIZE;
+ pagesize = info->dev.pagesize ? info->dev.pagesize : SPIFLASH_DEF_PAGESIZE;
+
+ while (count > 0) {
+ /* length up to end of current page */
+ currsize = ((offset + pagesize) & ~(pagesize - 1)) - offset;
+ /* but no more than remaining size */
+ currsize = (count < currsize) ? count : currsize;
- for (n = 0; n < count; n += pagesize) {
- retval = jtagspi_page_write(bank, buffer + n, offset + n,
- MIN(count - n, pagesize));
+ retval = jtagspi_page_write(bank, buffer, offset, currsize);
if (retval != ERROR_OK) {
LOG_ERROR("page write error");
return retval;
}
- LOG_DEBUG("wrote page at 0x%08" PRIx32, offset + n);
+ LOG_DEBUG("wrote page at 0x%08" PRIx32, offset);
+ offset += currsize;
+ buffer += currsize;
+ count -= currsize;
}
return ERROR_OK;
}
@@ -430,22 +733,72 @@ static int jtagspi_info(struct flash_bank *bank, struct command_invocation *cmd)
return ERROR_OK;
}
- command_print_sameline(cmd, "\nSPIFI flash information:\n"
- " Device \'%s\' (ID 0x%08" PRIx32 ")\n",
- info->dev->name, info->dev->device_id);
+ command_print_sameline(cmd, "flash \'%s\', device id = 0x%06" PRIx32
+ ", flash size = %" PRIu32 " %sbytes\n(page size = %" PRIu32
+ ", read = 0x%02" PRIx8 ", qread = 0x%02" PRIx8
+ ", pprog = 0x%02" PRIx8 ", mass_erase = 0x%02" PRIx8
+ ", sector size = %" PRIu32 " %sbytes, sector_erase = 0x%02" PRIx8 ")",
+ info->dev.name, info->dev.device_id & 0xFFFFFF,
+ bank->size / 4096 ? bank->size / 1024 : bank->size,
+ bank->size / 4096 ? "k" : "", info->dev.pagesize,
+ info->dev.read_cmd, info->dev.qread_cmd,
+ info->dev.pprog_cmd, info->dev.chip_erase_cmd,
+ info->dev.sectorsize / 4096 ?
+ info->dev.sectorsize / 1024 : info->dev.sectorsize,
+ info->dev.sectorsize / 4096 ? "k" : "",
+ info->dev.erase_cmd);
return ERROR_OK;
}
+static const struct command_registration jtagspi_exec_command_handlers[] = {
+ {
+ .name = "set",
+ .handler = jtagspi_handle_set,
+ .mode = COMMAND_EXEC,
+ .usage = "bank_id name chip_size page_size read_cmd unused pprg_cmd "
+ "[ mass_erase_cmd ] [ sector_size sector_erase_cmd ]",
+ .help = "Set device parameters if not autodetected.",
+ },
+ {
+ .name = "cmd",
+ .handler = jtagspi_handle_cmd,
+ .mode = COMMAND_EXEC,
+ .usage = "bank_id num_resp cmd_byte ...",
+ .help = "Send low-level command cmd_byte and following bytes, read num_bytes.",
+ },
+ {
+ .name = "always_4byte",
+ .handler = jtagspi_handle_always_4byte,
+ .mode = COMMAND_EXEC,
+ .usage = "bank_id [ on | off ]",
+ .help = "Use always 4-byte address except for basic 0x03.",
+ },
+
+ COMMAND_REGISTRATION_DONE
+};
+
+static const struct command_registration jtagspi_command_handlers[] = {
+ {
+ .name = "jtagspi",
+ .mode = COMMAND_ANY,
+ .help = "jtagspi command group",
+ .usage = "",
+ .chain = jtagspi_exec_command_handlers,
+ },
+ COMMAND_REGISTRATION_DONE
+};
+
const struct flash_driver jtagspi_flash = {
.name = "jtagspi",
+ .commands = jtagspi_command_handlers,
.flash_bank_command = jtagspi_flash_bank_command,
.erase = jtagspi_erase,
.protect = jtagspi_protect,
.write = jtagspi_write,
.read = jtagspi_read,
.probe = jtagspi_probe,
- .auto_probe = jtagspi_probe,
+ .auto_probe = jtagspi_auto_probe,
.erase_check = default_flash_blank_check,
.info = jtagspi_info,
.free_driver_priv = default_flash_free_driver_priv,

View File

@ -1,35 +1,74 @@
{ stdenv, buildEnv, lib, fetchFromGitHub, autoreconfHook269, openocd }:
let
bscan_spi_bitstreams-pkg = stdenv.mkDerivation {
name = "bscan_spi_bitstreams";
src = fetchFromGitHub {
{ stdenv, fetchFromGitHub, autoreconfHook, libftdi, libusb1, pkgconfig, hidapi }:
stdenv.mkDerivation rec {
name = "openocd-mlabs-${version}";
version = "0.10.0";
src = fetchFromGitHub {
owner = "m-labs";
repo = "openocd";
fetchSubmodules = true;
rev = "c383a57adcff332b2c5cf8d55a84626285b42c2c";
sha256 = "0xlj9cs72acx3zqagvr7f1c0v6lnqhl8fgrlhgmhmvk5n9knk492";
};
bscan_spi_bitstreams = fetchFromGitHub {
owner = "quartiq";
repo = "bscan_spi_bitstreams";
rev = "01d8f819f15baf9a8cc5d96945a51e4d267ff564";
sha256 = "1zqv47kzgvbn4c8cr019a6wcja7gn5h1z4kvw5bhpc72fyhagal9";
};
phases = ["installPhase"];
installPhase =
''
mkdir -p $out/share/bscan-spi-bitstreams
cp $src/*.bit $out/share/bscan-spi-bitstreams
'';
};
openocd-fixed = openocd.overrideAttrs(oa: {
version = "unstable-2021-09-15";
src = fetchFromGitHub {
owner = "openocd-org";
repo = "openocd";
rev = "a0bd3c9924870c3b8f428648410181040dabc33c";
sha256 = "sha256-YgUsl4/FohfsOncM4uiz/3c6g2ZN4oZ0y5vV/2Skwqg=";
fetchSubmodules = true;
};
# https://review.openocd.org/c/openocd/+/4876
patches = oa.patches or [] ++ [ ./openocd-jtagspi.diff ];
nativeBuildInputs = oa.nativeBuildInputs or [] ++ [ autoreconfHook269 ];
});
in
buildEnv {
name = "openocd-bscanspi";
paths = [ openocd-fixed bscan_spi_bitstreams-pkg ];
}
nativeBuildInputs = [ pkgconfig ];
buildInputs = [ autoreconfHook libftdi libusb1 hidapi ];
configureFlags = [
"--enable-jtag_vpi"
"--enable-usb_blaster_libftdi"
"--enable-amtjtagaccel"
"--enable-gw16012"
"--enable-presto_libftdi"
"--enable-openjtag_ftdi"
"--enable-oocd_trace"
"--enable-buspirate"
"--enable-sysfsgpio"
"--enable-remote-bitbang"
"--disable-werror"
];
NIX_CFLAGS_COMPILE = [
"-Wno-implicit-fallthrough"
"-Wno-format-truncation"
"-Wno-format-overflow"
"-Wno-error=tautological-compare"
];
postInstall = ''
mkdir -p "$out/etc/udev/rules.d"
rules="$out/share/openocd/contrib/60-openocd.rules"
if [ ! -f "$rules" ]; then
echo "$rules is missing, must update the Nix file."
exit 1
fi
ln -s "$rules" "$out/etc/udev/rules.d/"
mkdir -p "$out/share/bscan-spi-bitstreams"
cp ${bscan_spi_bitstreams}/*.bit "$out/share/bscan-spi-bitstreams"
'';
meta = with stdenv.lib; {
description = "Free and Open On-Chip Debugging, In-System Programming and Boundary-Scan Testing";
longDescription = ''
OpenOCD provides on-chip programming and debugging support with a layered
architecture of JTAG interface and TAP support, debug target support
(e.g. ARM, MIPS), and flash chip drivers (e.g. CFI, NAND, etc.). Several
network interfaces are available for interactiving with OpenOCD: HTTP,
telnet, TCL, and GDB. The GDB server enables OpenOCD to function as a
"remote target" for source-level debugging of embedded systems using the
GNU GDB program.
'';
homepage = http://openocd.sourceforge.net/;
license = licenses.gpl2Plus;
maintainers = with maintainers; [ sb0 ];
platforms = platforms.linux;
};
}

View File

@ -1,21 +1,21 @@
{ lib, fetchgit, fetchFromGitHub, python3Packages, misoc-new }:
{ stdenv, fetchFromGitHub, python3Packages }:
rec {
# User dependencies
sipyco = python3Packages.buildPythonPackage rec {
pname = "sipyco";
version = "1.2";
name = "sipyco";
version = "1.1";
src = fetchFromGitHub {
owner = "m-labs";
repo = "sipyco";
rev = "v${version}";
sha256 = "02x2s66x9bbzj82d823vjg2i73w7iqwvkrjbbyrsav6ccj7f90sj";
sha256 = "09vyrzfhnbp65ybd7w2g96gvvnhzafpn72syls2kbg2paqjjf9gs";
};
propagatedBuildInputs = with python3Packages; [ numpy ];
};
asyncserial = python3Packages.buildPythonPackage rec {
pname = "asyncserial";
name = "asyncserial";
version = "0.1";
src = fetchFromGitHub {
owner = "m-labs";
@ -27,74 +27,49 @@ rec {
};
pythonparser = python3Packages.buildPythonPackage rec {
pname = "pythonparser";
version = "1.4";
name = "pythonparser";
version = "1.3";
src = fetchFromGitHub {
owner = "m-labs";
repo = "pythonparser";
rev = "5413ee5c9f8760e95c6acd5d6e88dabb831ad201";
sha256 = "1b9p3pjnfaiaf2k0a3iq39aj2vymfxx139hqdqkd3q4awrwy1957";
rev = "5b391fe86f43bb9f4f96c5bc0532e2a112db2936";
sha256 = "1gw1fk4y2l6bwq0fg2a9dfc1rvq8cv492dyil96amjdhsxvnx35b";
};
patches = [ ./python37hack.patch ];
propagatedBuildInputs = with python3Packages; [ regex ];
};
qasync = python3Packages.buildPythonPackage rec {
pname = "qasync";
pyqtgraph-qt5 = python3Packages.buildPythonPackage rec {
name = "pyqtgraph_qt5-${version}";
version = "0.10.0";
doCheck = false;
src = fetchFromGitHub {
owner = "CabbageDevelopment";
repo = "qasync";
rev = "v${version}";
sha256 = "1zga8s6dr7gk6awmxkh4pf25gbg8n6dv1j4b0by7y0fhi949qakq";
owner = "pyqtgraph";
repo = "pyqtgraph";
rev = "1426e334e1d20542400d77c72c132b04c6d17ddb";
sha256 = "1079haxyr316jf0wpirxdj0ry6j8mr16cqr0dyyrd5cnxwl7zssh";
};
propagatedBuildInputs = [ python3Packages.pyqt5 ];
checkInputs = [ python3Packages.pytest ];
checkPhase = ''
pytest -k 'test_qthreadexec.py' # the others cause the test execution to be aborted, I think because of asyncio
'';
propagatedBuildInputs = with python3Packages; [ scipy numpy pyqt5 pyopengl ];
};
# Development/firmware dependencies
artiq-netboot = python3Packages.buildPythonPackage rec {
pname = "artiq-netboot";
version = "unstable-2020-10-15";
src = fetchgit {
url = "https://git.m-labs.hk/m-labs/artiq-netboot.git";
rev = "04f69eb07df73abe4b89fde2c24084f7664f2104";
sha256 = "0ql4fr8m8gpb2yql8aqsdqsssxb8zqd6l65kl1f6s9845zy7shs9";
misoc = python3Packages.buildPythonPackage rec {
name = "misoc";
src = fetchFromGitHub {
owner = "m-labs";
repo = "misoc";
rev = "7e5fe8d38835175202dad2c51d37b20b76fd9e16";
sha256 = "0i8bppz7x2s45lx9n49c0r87pqps09z35yzc17amvx21qsplahxn";
fetchSubmodules = true;
};
};
misoc = python3Packages.buildPythonPackage {
pname = "misoc";
version = if misoc-new then "unstable-2021-10-10" else "unstable-2021-02-15";
src = if misoc-new
then (fetchFromGitHub {
owner = "m-labs";
repo = "misoc";
rev = "f5203e406520874e15ab5d070058ef642fc57fd9";
sha256 = "sha256-/2XTejqj0Bo81HaTrlTSWwInnWwsuqnq+CURXbpIrkA=";
fetchSubmodules = true;
})
else (fetchFromGitHub {
owner = "m-labs";
repo = "misoc";
rev = "d84551418042cea0891ea743442e20684b51e77a";
sha256 = "1id5qjr9dl4r3vi6jdn7dfpnr2wb08nrm3nfscn18clbbdxybyjn";
fetchSubmodules = true;
});
# TODO: fix misoc bitrot and re-enable tests
doCheck = false;
propagatedBuildInputs = with python3Packages; [ pyserial jinja2 numpy asyncserial migen ];
meta = with lib; {
meta = with stdenv.lib; {
description = "A high performance and small footprint system-on-chip based on Migen";
homepage = "https://m-labs.hk/migen";
license = licenses.bsd2;
@ -103,19 +78,18 @@ rec {
};
migen = python3Packages.buildPythonPackage rec {
pname = "migen";
version = "unstable-2021-12-16";
name = "migen";
src = fetchFromGitHub {
owner = "m-labs";
repo = "migen";
rev = "ac703010eaa06ac9b6e32f97c6fa98b15de22b31";
sha256 = "sha256-qpVj/yJf4hDDc99XXpVPH4EbLC8aCmEtACn5qNc3DGI=";
rev = "b1b2b298b85a795239daad84c75be073ddc4f8bd";
sha256 = "1qy2ydk8xqqv92i992j1g71fbi185zd6s3kigzsf3169874dyh81";
};
propagatedBuildInputs = with python3Packages; [ colorama ];
meta = with lib; {
meta = with stdenv.lib; {
description = "A Python toolbox for building complex digital hardware";
homepage = "https://m-labs.hk/migen";
license = licenses.bsd2;
@ -124,8 +98,7 @@ rec {
};
microscope = python3Packages.buildPythonPackage rec {
pname = "microscope";
version = "unstable-2019-05-17";
name = "microscope";
src = fetchFromGitHub {
owner = "m-labs";
@ -136,7 +109,7 @@ rec {
propagatedBuildInputs = with python3Packages; [ pyserial prettytable msgpack migen ];
meta = with lib; {
meta = with stdenv.lib; {
description = "Finding the bacteria in rotting FPGA designs";
homepage = "https://m-labs.hk/migen";
license = licenses.bsd2;
@ -145,19 +118,18 @@ rec {
};
jesd204b = python3Packages.buildPythonPackage rec {
pname = "jesd204b";
version = "unstable-2021-05-05";
name = "jesd204b";
src = fetchFromGitHub {
owner = "m-labs";
repo = "jesd204b";
rev = "bf1cd9014c8b7a9db67609f653634daaf3bcd39b";
sha256 = "035csm6as4p75cjz7kd6gnras14856i2jzi9g1gd800g284hw9n3";
rev = "ac877ac5975411a438415f824e182338ed773529";
sha256 = "1lkb7cyj87bq4y0hp6379jq4q4lm2ijldccpyhawiizcfkawxa10";
};
propagatedBuildInputs = with python3Packages; [ migen misoc ];
meta = with lib; {
meta = with stdenv.lib; {
description = "JESD204B core for Migen/MiSoC";
homepage = "https://m-labs.hk/migen";
license = licenses.bsd2;
@ -174,7 +146,7 @@ rec {
sha256 = "0j15i54p7nri6hkzn1wal9pxri4pgql01wgjccig6ar0v5jjbvsy";
};
meta = with lib; {
meta = with stdenv.lib; {
description = "Super-fast and clean conversions to numbers";
homepage = "https://github.com/SethMMorton/fastnumbers";
license = licenses.mit;
@ -183,8 +155,7 @@ rec {
};
ramda = python3Packages.buildPythonPackage {
pname = "ramda";
version = "unstable-2019-02-01";
name = "ramda";
src = fetchFromGitHub {
owner = "peteut";
@ -203,7 +174,7 @@ rec {
export PBR_VERSION=0.0.1
'';
meta = with lib; {
meta = with stdenv.lib; {
description = "Ramda, ported to Python";
homepage = "https://github.com/peteut/ramda.py";
license = licenses.mit;
@ -212,14 +183,13 @@ rec {
};
migen-axi = python3Packages.buildPythonPackage {
pname = "migen-axi";
version = "unstable-2021-09-15";
name = "migen-axi";
src = fetchFromGitHub {
owner = "peteut";
repo = "migen-axi";
rev = "9763505ee96acd7572280a2d1233721342dc7c3f";
sha256 = "15c7g05n183rka66fl1glzp6h7xjlpy1p6k8biry24dangsmxmvg";
rev = "c4002f7db62cb9c4599336a9413006ee1d138fbd";
sha256 = "0p2ndznch7z4sbp4m8hq49rkg7p4vcrlbbfk6l8644wyl1kk0fvg";
};
nativeBuildInputs = [ python3Packages.pbr ];
@ -227,28 +197,24 @@ rec {
postPatch = ''
substituteInPlace requirements.txt \
--replace "jinja2==2.11.3" "jinja2"
--replace "jinja2==2.10.3" "jinja2"
substituteInPlace requirements.txt \
--replace "future==0.18.2" "future"
substituteInPlace requirements.txt \
--replace "ramda==0.5.5" "ramda"
substituteInPlace requirements.txt \
--replace "colorama==0.4.3" "colorama"
substituteInPlace requirements.txt \
--replace "toolz==0.10.0" "toolz"
substituteInPlace requirements.txt \
--replace "pyserial==3.4" "pyserial"
'';
checkInputs = [ python3Packages.pytest python3Packages.pytest-timeout python3Packages.pytest-flake8 ];
checkInputs = [ python3Packages.pytest python3Packages.pytest-flake8 ];
checkPhase = "pytest";
preBuild = ''
export PBR_VERSION=0.0.1
'';
meta = with lib; {
meta = with stdenv.lib; {
description = "AXI support for Migen/MiSoC";
homepage = "https://github.com/peteut/migen-axi";
license = licenses.mit;
@ -269,7 +235,7 @@ rec {
# Non-standard test suite. Needs custom checkPhase.
doCheck = false;
meta = with lib; {
meta = with stdenv.lib; {
description = "Portable tool for executing LLVM and Clang style test suites";
homepage = http://llvm.org/docs/CommandGuide/lit.html;
license = licenses.ncsa;
@ -288,7 +254,7 @@ rec {
};
prePatch = "echo ${version} > RELEASE-VERSION";
meta = with lib; {
meta = with stdenv.lib; {
description = "A tool for checking tool output inspired by LLVM's FileCheck";
homepage = "https://github.com/stp/OutputCheck";
license = licenses.bsd3;

View File

@ -0,0 +1,33 @@
diff --git a/pythonparser/lexer.py b/pythonparser/lexer.py
index a62eaf1..2c48d36 100644
--- a/pythonparser/lexer.py
+++ b/pythonparser/lexer.py
@@ -79,6 +79,7 @@ class Lexer:
(3, 4): _reserved_3_1,
(3, 5): _reserved_3_5,
(3, 6): _reserved_3_5,
+ (3, 7): _reserved_3_5,
}
"""
A map from a tuple (*major*, *minor*) corresponding to Python version to
@@ -102,6 +103,7 @@ class Lexer:
(3, 4): _string_prefixes_3_3,
(3, 5): _string_prefixes_3_3,
(3, 6): _string_prefixes_3_6,
+ (3, 7): _string_prefixes_3_6,
}
"""
A map from a tuple (*major*, *minor*) corresponding to Python version to
diff --git a/pythonparser/parser.py b/pythonparser/parser.py
index 10c741d..f748695 100644
--- a/pythonparser/parser.py
+++ b/pythonparser/parser.py
@@ -419,7 +419,7 @@ class Parser(object):
self.expr_stmt_1 = self.expr_stmt_1__26
self.yield_expr = self.yield_expr__26
return
- elif version in ((3, 0), (3, 1), (3, 2), (3, 3), (3, 4), (3, 5), (3, 6)):
+ elif version in ((3, 0), (3, 1), (3, 2), (3, 3), (3, 4), (3, 5), (3, 6), (3, 7)):
if version == (3, 0):
self.with_stmt = self.with_stmt__26 # lol
else:

View File

@ -1,4 +1,4 @@
{ stdenv, lib, makeWrapper, bash, buildRustPackage, curl, darwin
{ stdenv, makeWrapper, bash, buildRustPackage, curl, darwin
, version
, src
, platform
@ -6,7 +6,7 @@
}:
let
inherit (lib) optionalString;
inherit (stdenv.lib) optionalString;
inherit (darwin.apple_sdk.frameworks) Security;
bootstrapping = versionType == "bootstrap";
@ -26,14 +26,14 @@ rec {
inherit version;
inherit src;
meta = with lib; {
meta = with stdenv.lib; {
homepage = http://www.rust-lang.org/;
description = "A safe, concurrent, practical language";
maintainers = with maintainers; [ sb0 ];
license = [ licenses.mit licenses.asl20 ];
};
buildInputs = [ bash ] ++ lib.optional stdenv.isDarwin Security;
buildInputs = [ bash ] ++ stdenv.lib.optional stdenv.isDarwin Security;
postPatch = ''
patchShebangs .
@ -60,7 +60,7 @@ rec {
install_name_tool -change /usr/lib/libresolv.9.dylib '${darwin.libresolv}/lib/libresolv.9.dylib' "$out/bin/rustdoc"
install_name_tool -change /usr/lib/libiconv.2.dylib '${darwin.libiconv}/lib/libiconv.2.dylib' "$out/bin/cargo"
install_name_tool -change /usr/lib/libresolv.9.dylib '${darwin.libresolv}/lib/libresolv.9.dylib' "$out/bin/cargo"
install_name_tool -change /usr/lib/libcurl.4.dylib '${lib.getLib curl}/lib/libcurl.4.dylib' "$out/bin/cargo"
install_name_tool -change /usr/lib/libcurl.4.dylib '${stdenv.lib.getLib curl}/lib/libcurl.4.dylib' "$out/bin/cargo"
for f in $out/lib/lib*.dylib; do
install_name_tool -change /usr/lib/libresolv.9.dylib '${darwin.libresolv}/lib/libresolv.9.dylib' "$f"
done
@ -80,14 +80,14 @@ rec {
inherit version;
inherit src;
meta = with lib; {
meta = with stdenv.lib; {
homepage = http://www.rust-lang.org/;
description = "A safe, concurrent, practical language";
maintainers = with maintainers; [ sb0 ];
license = [ licenses.mit licenses.asl20 ];
};
buildInputs = [ makeWrapper bash ] ++ lib.optional stdenv.isDarwin Security;
buildInputs = [ makeWrapper bash ] ++ stdenv.lib.optional stdenv.isDarwin Security;
postPatch = ''
patchShebangs .
@ -107,7 +107,7 @@ rec {
${optionalString (stdenv.isDarwin && bootstrapping) ''
install_name_tool -change /usr/lib/libiconv.2.dylib '${darwin.libiconv}/lib/libiconv.2.dylib' "$out/bin/cargo"
install_name_tool -change /usr/lib/libresolv.9.dylib '${darwin.libresolv}/lib/libresolv.9.dylib' "$out/bin/cargo"
install_name_tool -change /usr/lib/libcurl.4.dylib '${lib.getLib curl}/lib/libcurl.4.dylib' "$out/bin/cargo"
install_name_tool -change /usr/lib/libcurl.4.dylib '${stdenv.lib.getLib curl}/lib/libcurl.4.dylib' "$out/bin/cargo"
''}
wrapProgram "$out/bin/cargo" \

View File

@ -1,4 +1,4 @@
{ stdenv, lib, file, curl, pkgconfig, python, openssl, cmake, zlib
{ stdenv, file, curl, pkgconfig, python, openssl, cmake, zlib
, makeWrapper, libiconv, cacert, rustPlatform, rustc, libgit2
, fetchurl
}:
@ -6,12 +6,11 @@
rustPlatform.buildRustPackage rec {
# Note: we can't build cargo 1.28.0 because rustc tightened the borrow checker rules and broke
# backward compatibility, which affects old cargo versions.
# There are also issues with asm/llvm_asm with recent rustc and cargo versions prior to 1.39.
pname = "cargo";
version = "1.39.0";
name = "cargo-${version}";
version = "1.37.0";
src = fetchurl {
url = "https://static.rust-lang.org/dist/rustc-1.39.0-src.tar.gz";
sha256 = "0mwkc1bnil2cfyf6nglpvbn2y0zfbv44zfhsd5qg4c9rm6vgd8dl";
url = "https://static.rust-lang.org/dist/rustc-1.37.0-src.tar.gz";
sha256 = "1hrqprybhkhs6d9b5pjskfnc5z9v2l2gync7nb39qjb5s0h703hj";
};
# the rust source tarball already has all the dependencies vendored, no need to fetch them again
@ -52,7 +51,7 @@ rustPlatform.buildRustPackage rec {
# Disable check phase as there are failures (4 tests fail)
doCheck = false;
meta = with lib; {
meta = with stdenv.lib; {
homepage = https://crates.io;
description = "Downloads your Rust project's dependencies and builds your project";
maintainers = with maintainers; [ wizeman retrry ];

View File

@ -1,4 +1,4 @@
{ stdenv, lib, targetPackages
{ stdenv, targetPackages
, fetchurl, file, python2, tzdata, ps
, llvm-or1k, ncurses, zlib, darwin, rustPlatform, git, cmake, curl
, which, libffi, gdb
@ -14,7 +14,7 @@
}:
let
inherit (lib) optional optionalString;
inherit (stdenv.lib) optional optionalString;
inherit (darwin.apple_sdk.frameworks) Security;
target = builtins.replaceStrings [" "] [","] (builtins.toString targets);
@ -26,7 +26,7 @@ let
in
stdenv.mkDerivation {
pname = "rustc";
name = "rustc-${version}";
inherit version;
inherit src;
@ -171,7 +171,7 @@ stdenv.mkDerivation {
# https://github.com/rust-lang/rust/issues/30181
# enableParallelBuilding = false;
meta = with lib; {
meta = with stdenv.lib; {
homepage = https://www.rust-lang.org/;
description = "A safe, concurrent, practical language";
maintainers = with maintainers; [ sb0 ];

View File

@ -1,20 +0,0 @@
{ pkgs }:
let
rustManifest = pkgs.fetchurl {
url = "https://static.rust-lang.org/dist/2021-01-29/channel-rust-nightly.toml";
sha256 = "sha256-EZKgw89AH4vxaJpUHmIMzMW/80wAFQlfcxRoBD9nz0c=";
};
targets = [];
rustChannelOfTargets = _channel: _date: targets:
(pkgs.lib.rustLib.fromManifestFile rustManifest {
inherit (pkgs) stdenv lib fetchurl patchelf;
}).rust.override {
inherit targets;
extensions = ["rust-src"];
};
rust = rustChannelOfTargets "nightly" null targets;
in
pkgs.recurseIntoAttrs (pkgs.makeRustPlatform {
rustc = rust;
cargo = rust;
})

View File

@ -4,14 +4,13 @@ let
artiqpkgs = import ./default.nix { inherit pkgs; };
vivado = import ./vivado.nix { inherit pkgs; };
in
assert pkgs.lib.asserts.assertMsg (!pkgs.lib.strings.versionAtLeast artiqpkgs.artiq.version "7.0") "For ARTIQ 7+, use 'nix develop' on the flake instead.";
pkgs.mkShell {
buildInputs = [
vivado
pkgs.gnumake
(pkgs.python3.withPackages(ps: (with ps; [ jinja2 jsonschema numpy paramiko ]) ++ (with artiqpkgs; [ migen microscope misoc jesd204b migen-axi artiq ])))
artiqpkgs.cargo-legacy
artiqpkgs.rustc-legacy
(pkgs.python3.withPackages(ps: (with ps; [ jinja2 numpy paramiko ]) ++ (with artiqpkgs; [ migen microscope misoc jesd204b migen-axi artiq ])))
artiqpkgs.cargo
artiqpkgs.rustc
artiqpkgs.binutils-or1k
artiqpkgs.binutils-arm
artiqpkgs.llvm-or1k

View File

@ -1,7 +1,7 @@
# Install Vivado in /opt and add to /etc/nixos/configuration.nix:
# nix.sandboxPaths = ["/opt"];
{ pkgs, vivadoPath ? "/opt/Xilinx/Vivado/2021.2" }:
{ pkgs, vivadoPath ? "/opt/Xilinx/Vivado/2020.1" }:
pkgs.buildFHSUserEnv {
name = "vivado";
@ -17,7 +17,7 @@ pkgs.buildFHSUserEnv {
xorg.libXext
xorg.libXtst
xorg.libXi
] ++ ( if pkgs ? libxcrypt-legacy then [ pkgs.libxcrypt-legacy ] else [])
]
);
profile = "source ${vivadoPath}/settings64.sh";
runScript = "vivado";

View File

@ -1,16 +0,0 @@
{ pkgs }:
let
# Pin nixpkgs to avoid frequent resource-intensive Windows reinstallations on Hydra.
wfvm-pkgs = pkgs.fetchFromGitHub {
owner = "NixOS";
repo = "nixpkgs";
rev = "f8248ab6d9e69ea9c07950d73d48807ec595e923";
sha256 = "009i9j6mbq6i481088jllblgdnci105b2q4mscprdawg3knlyahk";
};
wfvm = pkgs.fetchgit {
url = "https://git.m-labs.hk/M-Labs/wfvm.git";
rev = "4b497938ffd9fcddf84a3dbe2f01524395292adb";
sha256 = "0m3kdbbcskqc1lf8b5f7ccbll9b7vkl4r00kbyx3yjb2rs6cqvil";
};
in import "${wfvm}/wfvm" { pkgs = (import wfvm-pkgs {}); }

View File

@ -1,16 +0,0 @@
{ pkgs }:
let
# Pin nixpkgs to avoid frequent resource-intensive Windows reinstallations on Hydra.
wfvm-pkgs = pkgs.fetchFromGitHub {
owner = "NixOS";
repo = "nixpkgs";
rev = "f8248ab6d9e69ea9c07950d73d48807ec595e923";
sha256 = "009i9j6mbq6i481088jllblgdnci105b2q4mscprdawg3knlyahk";
};
wfvm = pkgs.fetchgit {
url = "https://git.m-labs.hk/M-Labs/wfvm.git";
rev = "c7d9060eeef46bebaf376c95ca37c7a65a2ea896";
sha256 = "022fb7zpn48hg9qihmqmzqdphks7b7cbnw6f5s1qy1in5c7f8rx9";
};
in import "${wfvm}/wfvm" { pkgs = (import wfvm-pkgs {}); }

26
artiq-fast/wfvm/README.md Normal file
View File

@ -0,0 +1,26 @@
# Preparation steps
## Install a Windows image
1. Adjust demo-image.nix accordingly
2. Run:
If in impure mode
```shell
nix-build demo-image.nix
./result
```
Results in a file called c.img
If in pure mode
```shell
nix-build demo-image.nix
ls -la ./result
```
Results in a symlink to the image in the nix store
# Impure/pure mode
Sometimes it can be useful to build the image _outside_ of the Nix sandbox for debugging purposes.
For this purpose we have an attribute called `impureMode` which outputs the shell script used by Nix inside the sandbox to build the image.

View File

@ -0,0 +1,317 @@
{ pkgs
, fullName ? "John Doe"
, organization ? "KVM Authority"
, administratorPassword ? "123456"
, uiLanguage ? "en-US"
, inputLocale ? "en-US"
, userLocale ? "en-US"
, systemLocale ? "en-US"
, users ? {}
, productKey ? null
, defaultUser ? "wfvm"
, setupCommands ? []
, timeZone ? "UTC"
, services ? {}
, impureShellCommands ? []
, driveLetter ? "E:"
, ...
}:
let
lib = pkgs.lib;
serviceCommands = lib.mapAttrsToList (
serviceName: attrs: "powershell Set-Service -Name ${serviceName} " + (
lib.concatStringsSep " " (
(
lib.mapAttrsToList (
n: v: if builtins.typeOf v != "bool" then "-${n} ${v}" else "-${n}"
)
) (
# Always run without interaction
{ Force = true; } // attrs
)
)
)
) services;
sshSetupCommands =
# let
# makeDirs = lib.mapAttrsToList (n: v: ''mkdir C:\Users\${n}\.ssh'') users;
# writeKeys = lib.flatten (lib.mapAttrsToList (n: v: builtins.map (key: let
# commands = [
# ''powershell.exe Set-Content -Path C:\Users\${n}\.ssh\authorized_keys -Value '${key}' ''
# ];
# in lib.concatStringsSep "\n" commands) (v.sshKeys or [])) users);
# mkDirsDesc = builtins.map (c: {Path = c; Description = "Make SSH key dir";}) makeDirs;
# writeKeysDesc = builtins.map (c: {Path = c; Description = "Add SSH key";}) writeKeys;
# in
# mkDirsDesc ++ writeKeysDesc ++
[
{
Path = ''powershell.exe Add-WindowsCapability -Online -Name OpenSSH.Server~~~~0.0.1.0 -Source ${driveLetter}\fod -LimitAccess'';
Description = "Add OpenSSH service.";
}
{
Path = ''powershell.exe Set-Service -Name sshd -StartupType Automatic'';
Description = "Enable SSH by default.";
}
];
assertCommand = c: builtins.typeOf c == "string" || builtins.typeOf c == "set" && builtins.hasAttr "Path" c && builtins.hasAttr "Description" c;
commands = builtins.map (x: assert assertCommand x; if builtins.typeOf x == "string" then { Path = x; Description = x; } else x) (
[
{
Path = "powershell.exe Set-ExecutionPolicy -Force Unrestricted";
Description = "Allow unsigned powershell scripts.";
}
]
++ [
{
Path = ''powershell.exe ${driveLetter}\win-bundle-installer.exe'';
Description = "Install any declared packages.";
}
]
++ setupCommands
++ [
{
Path = ''powershell.exe ${driveLetter}\ssh-setup.ps1'';
Description = "Setup SSH and keys";
}
]
++ serviceCommands
++ impureShellCommands
);
mkCommand = attrs: ''
<RunSynchronousCommand wcm:action="add">
${lib.concatStringsSep "\n" (lib.attrsets.mapAttrsToList (n: v: "<${n}>${v}</${n}>") attrs)}
</RunSynchronousCommand>
'';
mkCommands = commands: (
builtins.foldl' (
acc: v: rec {
i = acc.i + 1;
values = acc.values ++ [ (mkCommand (v // { Order = builtins.toString i; })) ];
}
) {
i = 0;
values = [];
} commands
).values;
mkUser =
{ name
, password
, description ? ""
, displayName ? ""
, groups ? []
# , sshKeys ? [] # Handled in scripts
}: ''
<LocalAccount wcm:action="add">
<Password>
<Value>${password}</Value>
<PlainText>true</PlainText>
</Password>
<Description>${description}</Description>
<DisplayName>${displayName}</DisplayName>
<Group>${builtins.concatStringsSep ";" (lib.unique ([ "Users" ] ++ groups))}</Group>
<Name>${name}</Name>
</LocalAccount>
'';
# Windows expects a flat list of users while we want to manage them as a set
flatUsers = builtins.attrValues (builtins.mapAttrs (name: s: s // { inherit name; }) users);
autounattendXML = pkgs.writeText "autounattend.xml" ''
<?xml version="1.0" encoding="utf-8"?>
<unattend xmlns="urn:schemas-microsoft-com:unattend">
<settings pass="windowsPE">
<component name="Microsoft-Windows-PnpCustomizationsWinPE" processorArchitecture="amd64" publicKeyToken="31bf3856ad364e35" language="neutral" versionScope="nonSxS" xmlns:wcm="http://schemas.microsoft.com/WMIConfig/2002/State" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<DriverPaths>
<PathAndCredentials wcm:action="add" wcm:keyValue="1">
<Path>D:\</Path>
</PathAndCredentials>
<PathAndCredentials wcm:action="add" wcm:keyValue="2">
<Path>E:\</Path>
</PathAndCredentials>
</DriverPaths>
</component>
<component name="Microsoft-Windows-Setup" processorArchitecture="amd64" publicKeyToken="31bf3856ad364e35" language="neutral" versionScope="nonSxS" xmlns:wcm="http://schemas.microsoft.com/WMIConfig/2002/State" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<DiskConfiguration>
<Disk wcm:action="add">
<CreatePartitions>
<CreatePartition wcm:action="add">
<Order>1</Order>
<Type>EFI</Type>
<Size>100</Size>
</CreatePartition>
<CreatePartition wcm:action="add">
<Order>2</Order>
<Type>MSR</Type>
<Size>16</Size>
</CreatePartition>
<CreatePartition wcm:action="add">
<Order>3</Order>
<Type>Primary</Type>
<Extend>true</Extend>
</CreatePartition>
</CreatePartitions>
<ModifyPartitions>
<ModifyPartition wcm:action="add">
<Order>1</Order>
<Format>FAT32</Format>
<Label>System</Label>
<PartitionID>1</PartitionID>
</ModifyPartition>
<ModifyPartition wcm:action="add">
<Order>2</Order>
<PartitionID>2</PartitionID>
</ModifyPartition>
<ModifyPartition wcm:action="add">
<Order>3</Order>
<Format>NTFS</Format>
<Label>Windows</Label>
<Letter>C</Letter>
<PartitionID>3</PartitionID>
</ModifyPartition>
</ModifyPartitions>
<DiskID>0</DiskID>
<WillWipeDisk>true</WillWipeDisk>
</Disk>
</DiskConfiguration>
<ImageInstall>
<OSImage>
<InstallTo>
<DiskID>0</DiskID>
<PartitionID>3</PartitionID>
</InstallTo>
<InstallFrom>
<MetaData wcm:action="add">
<Key>/IMAGE/INDEX</Key>
<Value>1</Value>
</MetaData>
</InstallFrom>
</OSImage>
</ImageInstall>
<UserData>
<ProductKey>
${if productKey != null then "<Key>${productKey}</Key>" else ""}
<WillShowUI>OnError</WillShowUI>
</ProductKey>
<AcceptEula>true</AcceptEula>
<FullName>${fullName}</FullName>
<Organization>${organization}</Organization>
</UserData>
</component>
<component name="Microsoft-Windows-International-Core-WinPE" processorArchitecture="amd64" publicKeyToken="31bf3856ad364e35" language="neutral" versionScope="nonSxS" xmlns:wcm="http://schemas.microsoft.com/WMIConfig/2002/State" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<SetupUILanguage>
<UILanguage>${uiLanguage}</UILanguage>
</SetupUILanguage>
<InputLocale>${inputLocale}</InputLocale>
<SystemLocale>${systemLocale}</SystemLocale>
<UILanguage>${uiLanguage}</UILanguage>
<UILanguageFallback>en-US</UILanguageFallback>
<UserLocale>${userLocale}</UserLocale>
</component>
</settings>
<settings pass="oobeSystem">
<component name="Microsoft-Windows-International-Core" processorArchitecture="amd64" publicKeyToken="31bf3856ad364e35" language="neutral" versionScope="nonSxS" xmlns:wcm="http://schemas.microsoft.com/WMIConfig/2002/State" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<InputLocale>${inputLocale}</InputLocale>
<SystemLocale>${systemLocale}</SystemLocale>
<UILanguage>${uiLanguage}</UILanguage>
<UILanguageFallback>en-US</UILanguageFallback>
<UserLocale>${userLocale}</UserLocale>
</component>
<component name="Microsoft-Windows-Shell-Setup" processorArchitecture="amd64" publicKeyToken="31bf3856ad364e35" language="neutral" versionScope="nonSxS" xmlns:wcm="http://schemas.microsoft.com/WMIConfig/2002/State" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<OOBE>
<HideEULAPage>true</HideEULAPage>
<HideLocalAccountScreen>true</HideLocalAccountScreen>
<HideOEMRegistrationScreen>true</HideOEMRegistrationScreen>
<HideOnlineAccountScreens>true</HideOnlineAccountScreens>
<HideWirelessSetupInOOBE>true</HideWirelessSetupInOOBE>
<ProtectYourPC>1</ProtectYourPC>
</OOBE>
<TimeZone>${timeZone}</TimeZone>
<UserAccounts>
${if administratorPassword != null then ''
<AdministratorPassword>
<Value>${administratorPassword}</Value>
<PlainText>true</PlainText>
</AdministratorPassword>
'' else ""}
<LocalAccounts>
${builtins.concatStringsSep "\n" (builtins.map mkUser flatUsers)}
</LocalAccounts>
</UserAccounts>
${if defaultUser == null then "" else ''
<AutoLogon>
<Password>
<Value>${(builtins.getAttr defaultUser users).password}</Value>
<PlainText>true</PlainText>
</Password>
<Enabled>true</Enabled>
<Username>${defaultUser}</Username>
</AutoLogon>
''}
<FirstLogonCommands>
<SynchronousCommand wcm:action="add">
<Order>1</Order>
<CommandLine>cmd /C shutdown /s /f /t 00</CommandLine>
<Description>ChangeHideFiles</Description>
</SynchronousCommand>
</FirstLogonCommands>
</component>
</settings>
<settings pass="specialize">
<component name="Microsoft-Windows-Deployment" processorArchitecture="amd64" publicKeyToken="31bf3856ad364e35" language="neutral" versionScope="nonSxS" xmlns:wcm="http://schemas.microsoft.com/WMIConfig/2002/State" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<RunSynchronous>
${lib.concatStringsSep "\n" (mkCommands commands)}
</RunSynchronous>
</component>
<component name="Microsoft-Windows-SQMApi" processorArchitecture="amd64" publicKeyToken="31bf3856ad364e35" language="neutral" versionScope="NonSxS" xmlns:wcm="http://schemas.microsoft.com/WMIConfig/2002/State" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<CEIPEnabled>0</CEIPEnabled>
</component>
</settings>
<!-- Disable Windows UAC -->
<settings pass="offlineServicing">
<component name="Microsoft-Windows-LUA-Settings" processorArchitecture="amd64" publicKeyToken="31bf3856ad364e35" language="neutral" versionScope="nonSxS" xmlns:wcm="http://schemas.microsoft.com/WMIConfig/2002/State" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<EnableLUA>false</EnableLUA>
</component>
</settings>
<cpi:offlineImage cpi:source="wim:c:/wim/windows-10/install.wim#Windows 10 Enterprise LTSC 2019 Evaluation" xmlns:cpi="urn:schemas-microsoft-com:cpi" />
</unattend>
'';
in {
# Lint and format as a sanity check
autounattendXML = pkgs.runCommandNoCC "autounattend.xml" {} ''
${pkgs.libxml2}/bin/xmllint --format ${autounattendXML} > $out
'';
# autounattend.xml is _super_ picky about quotes and other things
setupScript = pkgs.writeText "ssh-setup.ps1" (
''
# Setup SSH and keys
'' +
lib.concatStrings (
builtins.map (c: ''
# ${c.Description}
${c.Path}
'') sshSetupCommands
)
);
}

View File

@ -0,0 +1,7 @@
{ pkgs }:
pkgs.runCommandNoCC "win-bundle-installer.exe" {} ''
cp ${./main.go} main.go
env HOME=$(mktemp -d) GOOS=windows GOARCH=amd64 ${pkgs.go}/bin/go build
mv build.exe $out
''

View File

@ -0,0 +1,116 @@
package main
import (
"archive/tar"
"fmt"
"io"
"io/ioutil"
"log"
"os"
"os/exec"
"path/filepath"
)
func Untar(dst string, r io.Reader) error {
tr := tar.NewReader(r)
for {
header, err := tr.Next()
switch {
case err == io.EOF:
return nil
case err != nil:
return err
case header == nil:
continue
}
target := filepath.Join(dst, header.Name)
switch header.Typeflag {
case tar.TypeDir:
if _, err := os.Stat(target); err != nil {
if err := os.MkdirAll(target, 0755); err != nil {
return err
}
}
case tar.TypeReg:
f, err := os.OpenFile(target, os.O_CREATE|os.O_RDWR, os.FileMode(header.Mode))
if err != nil {
return err
}
if _, err := io.Copy(f, tr); err != nil {
return err
}
f.Close()
}
}
}
func InstallBundle(bundlePath string) error {
reader, err := os.Open(bundlePath)
if err != nil {
log.Fatal(err)
}
workDir, err := ioutil.TempDir("", "bundle_install")
if err != nil {
return err
}
defer os.RemoveAll(workDir)
err = Untar(workDir, reader)
if err != nil {
return err
}
installScript := filepath.Join(workDir, "install.ps1")
cmd := exec.Command("powershell", installScript)
cmd.Stdout = os.Stdout
cmd.Stderr = os.Stderr
cmd.Dir = workDir
err = cmd.Run()
return err
}
func main() {
// Get path relative to binary
baseDir, err := filepath.Abs(filepath.Dir(os.Args[0]))
if err != nil {
log.Fatal(err)
}
var dirs = [2]string{"bootstrap", "user"}
for _, pkgDir := range dirs {
dir := filepath.Join(baseDir, pkgDir)
files, err := ioutil.ReadDir(dir)
if err != nil {
log.Fatal(err)
}
for _, file := range files {
bundle := filepath.Join(dir, file.Name())
fmt.Println(fmt.Sprintf("Installing: %s", bundle))
err := InstallBundle(bundle)
if err != nil {
log.Fatal(err)
}
}
}
}

View File

@ -0,0 +1,13 @@
{ pkgs ? import <nixpkgs> {} }:
pkgs.mkShell {
buildInputs = [
pkgs.go
];
shellHook = ''
unset GOPATH
'';
}

View File

@ -0,0 +1,7 @@
{ pkgs }:
{
makeWindowsImage = attrs: import ./win.nix ({ inherit pkgs; } // attrs);
layers = (import ./layers { inherit pkgs; });
utils = (import ./utils.nix { inherit pkgs; });
}

View File

@ -0,0 +1,59 @@
{ pkgs ? import <nixpkgs> {}, impureMode ? false }:
let
wfvm = (import ./default.nix { inherit pkgs; });
in
wfvm.makeWindowsImage {
# Build install script & skip building iso
inherit impureMode;
# Custom base iso
# windowsImage = pkgs.fetchurl {
# url = "https://software-download.microsoft.com/download/sg/17763.107.101029-1455.rs5_release_svc_refresh_CLIENT_LTSC_EVAL_x64FRE_en-us.iso";
# sha256 = "668fe1af70c2f7416328aee3a0bb066b12dc6bbd2576f40f812b95741e18bc3a";
# };
# impureShellCommands = [
# "powershell.exe echo Hello"
# ];
# User accounts
# users = {
# artiq = {
# password = "1234";
# # description = "Default user";
# # displayName = "Display name";
# groups = [
# "Administrators"
# ];
# };
# };
# Auto login
# defaultUser = "artiq";
# fullName = "M-Labs";
# organization = "m-labs";
# administratorPassword = "12345";
# Imperative installation commands, to be installed incrementally
installCommands = with wfvm.layers; [ anaconda3 msys2 msys2-packages ];
# services = {
# # Enable remote management
# WinRm = {
# Status = "Running";
# PassThru = true;
# };
# };
# License key
# productKey = "iboughtthisone";
# Locales
# uiLanguage = "en-US";
# inputLocale = "en-US";
# userLocale = "en-US";
# systemLocale = "en-US";
}

View File

@ -0,0 +1,13 @@
{ pkgs ? import <nixpkgs> {} }:
let
wfvm = (import ./default.nix { inherit pkgs; });
in
wfvm.utils.wfvm-run {
name = "demo-ssh";
image = import ./demo-image.nix { inherit pkgs; };
isolateNetwork = false;
script = ''
${pkgs.sshpass}/bin/sshpass -p1234 -- ${pkgs.openssh}/bin/ssh -p 2022 wfvm@localhost -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null
'';
}

View File

@ -0,0 +1,107 @@
{ pkgs }:
{
anaconda3 = {
name = "Anaconda3";
script = let
Anaconda3 = pkgs.fetchurl {
name = "Anaconda3.exe";
url = "https://repo.anaconda.com/archive/Anaconda3-2020.02-Windows-x86_64.exe";
sha256 = "0n31l8l89jrjrbzbifxbjnr3g320ly9i4zfyqbf3l9blf4ygbhl3";
};
in ''
ln -s ${Anaconda3} ./Anaconda3.exe
win-put Anaconda3.exe 'C:\Users\wfvm'
echo Running Anaconda installer...
win-exec 'start /wait "" .\Anaconda3.exe /S /D=%UserProfile%\Anaconda3'
echo Anaconda installer finished
'';
};
msys2 = {
name = "MSYS2";
buildInputs = [ pkgs.expect ];
script = let
msys2 = pkgs.fetchurl {
name = "msys2.exe";
url = "https://github.com/msys2/msys2-installer/releases/download/2020-06-02/msys2-x86_64-20200602.exe";
sha256 = "1mswlfybvk42vdr4r85dypgkwhrp5ff47gcbxgjqwq86ym44xzd4";
};
msys2-auto-install = pkgs.fetchurl {
url = "https://raw.githubusercontent.com/msys2/msys2-installer/master/auto-install.js";
sha256 = "0ww48xch2q427c58arg5llakfkfzh3kb32kahwplp0s7jc8224g7";
};
in ''
ln -s ${msys2} ./msys2.exe
ln -s ${msys2-auto-install} ./auto-install.js
win-put msys2.exe 'C:\Users\wfvm'
win-put auto-install.js 'C:\Users\wfvm'
echo Running MSYS2 installer...
# work around MSYS2 installer bug that prevents it from closing at the end of unattended install
expect -c 'set timeout 600; spawn win-exec ".\\msys2.exe --script auto-install.js -v InstallPrefix=C:\\msys64"; expect FinishedPageCallback { close }'
echo MSYS2 installer finished
'';
};
msys2-packages = {
name = "MSYS2-packages";
script = let
msys-packages = import ./msys_packages.nix { inherit pkgs; };
msys-packages-put = pkgs.lib.strings.concatStringsSep "\n"
(map (package: ''win-put ${package} 'C:\Users\wfvm\msyspackages' '') msys-packages);
in
# Windows command line is so shitty it can't even do glob expansion. Why do people use Windows?
''
win-exec 'mkdir msyspackages'
${msys-packages-put}
cat > installmsyspackages.bat << EOF
set MSYS=c:\msys64
set ARCH=64
set PATH=%MSYS%\usr\bin;%MSYS%\mingw%ARCH%\bin;%PATH%
bash -c "pacman -U --noconfirm C:/Users/wfvm/msyspackages/*"
EOF
win-put installmsyspackages.bat 'C:\Users\wfvm'
win-exec installmsyspackages
'';
};
cmake = {
name = "CMake";
script = let
cmake = pkgs.fetchurl {
name = "cmake.msi";
url = "https://github.com/Kitware/CMake/releases/download/v3.18.0-rc1/cmake-3.18.0-rc1-win64-x64.msi";
sha256 = "1n32jzbg9w3vfbvyi9jqijz97gn1zsk1w5226wlrxd2a9d4w1hrn";
};
in
''
ln -s ${cmake} cmake.msi
win-put cmake.msi
win-exec "msiexec.exe /q /i cmake.msi ADD_CMAKE_TO_PATH=System"
'';
};
msvc = {
name = "MSVC";
script = let
msvc-wine = pkgs.fetchFromGitHub {
owner = "mstorsjo";
repo = "msvc-wine";
rev = "b953f996401c19df3039c04e4ac7f962e435a4b2";
sha256 = "12rqx0r3d836x4k1ccda5xmzsd2938v5gmrp27awmzv1j3wplfsq";
};
vs = pkgs.stdenv.mkDerivation {
name = "vs";
outputHashAlgo = "sha256";
outputHashMode = "recursive";
outputHash = "1ngq7mg02kzfysh559j3fkjh2hngmay4jjar55p2db4d9rkvqh22";
src = msvc-wine;
phases = [ "buildPhase" ];
buildInputs = [ pkgs.cacert (pkgs.python3.withPackages(ps: [ ps.simplejson ps.six ])) pkgs.msitools ];
buildPhase = "python $src/vsdownload.py --accept-license --dest $out";
};
in
''
win-put ${vs}/VC/Tools/MSVC 'C:\'
win-exec 'setx PATH C:\MSVC\14.26.28801\bin\Hostx64\x64;%PATH% /m'
'';
};
}

View File

@ -0,0 +1,40 @@
#!/usr/bin/env bash
set -e
nix-build -E "
let
pkgs = import <nixpkgs> {};
wfvm = import ../default.nix { inherit pkgs; };
in
wfvm.utils.wfvm-run {
name = \"get-msys-packages\";
image = wfvm.makeWindowsImage { installCommands = [ wfvm.layers.msys2 ]; };
script = ''
cat > getmsyspackages.bat << EOF
set MSYS=C:\\MSYS64
set TOOLPREF=mingw-w64-x86_64-
set PATH=%MSYS%\usr\bin;%MSYS%\mingw64\bin;%PATH%
pacman -Sp %TOOLPREF%gcc %TOOLPREF%binutils make autoconf automake libtool texinfo %TOOLPREF%make %TOOLPREF%cmake > packages.txt
EOF
\${wfvm.utils.win-put}/bin/win-put getmsyspackages.bat
\${wfvm.utils.win-exec}/bin/win-exec getmsyspackages
\${wfvm.utils.win-get}/bin/win-get packages.txt
'';
}
"
./result/bin/wfvm-run-get-msys-packages
echo "{ pkgs } : [" > msys_packages.nix
while read package; do
hash=$(nix-prefetch-url $package)
echo "
(pkgs.fetchurl {
url = \"$package\";
sha256 = \"$hash\";
})" >> msys_packages.nix
done < packages.txt
echo "]" >> msys_packages.nix
rm result getmsyspackages.bat packages.txt

View File

@ -174,4 +174,159 @@
url = "http://repo.msys2.org/msys/x86_64/texinfo-6.7-1-x86_64.pkg.tar.xz";
sha256 = "0c50809yg9g95m8yib867q8m28sjabqppz2qbzh3gr83z55kknnw";
})
(pkgs.fetchurl {
url = "http://repo.msys2.org/mingw/x86_64/mingw-w64-x86_64-expat-2.2.9-1-any.pkg.tar.xz";
sha256 = "16fz2r902mmc0kka3pm7g54xjd8x3q07bi7y54vzpbmic31rrvh4";
})
(pkgs.fetchurl {
url = "http://repo.msys2.org/mingw/x86_64/mingw-w64-x86_64-gettext-0.19.8.1-8-any.pkg.tar.xz";
sha256 = "1g28871qgc66k4csmc4rk4vcajzw5wavicc2x3iw4pnigh9vsj83";
})
(pkgs.fetchurl {
url = "http://repo.msys2.org/mingw/x86_64/mingw-w64-x86_64-make-4.3-1-any.pkg.tar.xz";
sha256 = "0v133ip1r3djcki5znn946r1c81vvyc6xk5xf35ad8b30wmlfqvq";
})
(pkgs.fetchurl {
url = "http://repo.msys2.org/mingw/x86_64/mingw-w64-x86_64-pkg-config-0.29.2-1-any.pkg.tar.xz";
sha256 = "1w6s9nb7kjwnlz2vgimzvyjmay47d6g008c82xab4k8nhd7nm77n";
})
(pkgs.fetchurl {
url = "http://repo.msys2.org/mingw/x86_64/mingw-w64-x86_64-c-ares-1.16.1-1-any.pkg.tar.zst";
sha256 = "13sfv0cs4rj3vw4y9pibp02qvvcv5qnzs87282m7pxxnjzccv9an";
})
(pkgs.fetchurl {
url = "http://repo.msys2.org/mingw/x86_64/mingw-w64-x86_64-brotli-1.0.7-4-any.pkg.tar.xz";
sha256 = "02i5jxmwbvraszy5rm31gm6wi21vclzsbqq9rx4qxjdgjwgn4rfl";
})
(pkgs.fetchurl {
url = "http://repo.msys2.org/mingw/x86_64/mingw-w64-x86_64-libunistring-0.9.10-1-any.pkg.tar.xz";
sha256 = "1q03qjyndbv65j0w71x41gc7nhdcbmdsc5xb882gmzlgwrdi77hq";
})
(pkgs.fetchurl {
url = "http://repo.msys2.org/mingw/x86_64/mingw-w64-x86_64-libidn2-2.3.0-1-any.pkg.tar.xz";
sha256 = "06523dq5q3dq07iz6f11pwk3b4v18z3b72ly3wvxl0kdy89khqjj";
})
(pkgs.fetchurl {
url = "http://repo.msys2.org/mingw/x86_64/mingw-w64-x86_64-libmetalink-0.1.3-3-any.pkg.tar.xz";
sha256 = "1nvjvygcxmrb7xlqzxym3g6vhz31nr83qx2vfsqrc0haw4r08d5j";
})
(pkgs.fetchurl {
url = "http://repo.msys2.org/mingw/x86_64/mingw-w64-x86_64-libpsl-0.21.0-2-any.pkg.tar.xz";
sha256 = "13456p4kl53i49hz6b9cpjbkb19k4443nksbii9c29x09lagbzwv";
})
(pkgs.fetchurl {
url = "http://repo.msys2.org/mingw/x86_64/mingw-w64-x86_64-libtasn1-4.16.0-1-any.pkg.tar.xz";
sha256 = "0aziyg127l9742g7i8dl4ffp80v55272i8p3jqk3pvz8qaf8dfyh";
})
(pkgs.fetchurl {
url = "http://repo.msys2.org/mingw/x86_64/mingw-w64-x86_64-libffi-3.3-1-any.pkg.tar.xz";
sha256 = "05sh8hwr171bbpjw9yf4z04sa3m4dg37kqbdz90y68glrj43i4xd";
})
(pkgs.fetchurl {
url = "http://repo.msys2.org/mingw/x86_64/mingw-w64-x86_64-p11-kit-0.23.20-2-any.pkg.tar.xz";
sha256 = "02f3k46b09b4rd0fmadavjj04f4a2v1c56r9qlkr5lkjlmfm7a5a";
})
(pkgs.fetchurl {
url = "http://repo.msys2.org/mingw/x86_64/mingw-w64-x86_64-ca-certificates-20190110-1-any.pkg.tar.xz";
sha256 = "1wjbm67rb07sp803dl51lfsrrih2xjnwbrif0hvsc6nq63q1i3dq";
})
(pkgs.fetchurl {
url = "http://repo.msys2.org/mingw/x86_64/mingw-w64-x86_64-openssl-1.1.1.g-1-any.pkg.tar.xz";
sha256 = "047x6dxxqm8y8fj236cd3p9jk4cdnmzdp3pgh84gsqa2vgxdn64f";
})
(pkgs.fetchurl {
url = "http://repo.msys2.org/mingw/x86_64/mingw-w64-x86_64-libssh2-1.9.0-1-any.pkg.tar.xz";
sha256 = "10pd4mmvsrvcs4sw0v786ry3w2xwrli6prnhpwcjfjvb25jn0y9a";
})
(pkgs.fetchurl {
url = "http://repo.msys2.org/mingw/x86_64/mingw-w64-x86_64-jansson-2.12-1-any.pkg.tar.xz";
sha256 = "133al0y3fg38b303934ls7f7l5f76qy7v6wx2cnxmfq2k0fxj7cc";
})
(pkgs.fetchurl {
url = "http://repo.msys2.org/mingw/x86_64/mingw-w64-x86_64-jemalloc-5.2.1-1-any.pkg.tar.xz";
sha256 = "1w0mm0wlsx37gbf5vcrbf7c4hvkcrhls8a1aiq3s4vbld8maccdl";
})
(pkgs.fetchurl {
url = "http://repo.msys2.org/mingw/x86_64/mingw-w64-x86_64-nghttp2-1.40.0-2-any.pkg.tar.xz";
sha256 = "0m2xww09f5j8ii6nqk8wz6g8dy1qbgvv185ikrpabpbdaqgkaijj";
})
(pkgs.fetchurl {
url = "http://repo.msys2.org/mingw/x86_64/mingw-w64-x86_64-curl-7.70.0-1-any.pkg.tar.zst";
sha256 = "013b04dxcfgcbx9ccknm8rkkxp5lxzi2473li678f0n1dagcxn0d";
})
(pkgs.fetchurl {
url = "http://repo.msys2.org/mingw/x86_64/mingw-w64-x86_64-jsoncpp-1.9.2-1-any.pkg.tar.xz";
sha256 = "0wdxn26lv9j9fdixcvgbg299wix9xxl48jjdnqf1387aiprhsj4m";
})
(pkgs.fetchurl {
url = "http://repo.msys2.org/mingw/x86_64/mingw-w64-x86_64-bzip2-1.0.8-1-any.pkg.tar.xz";
sha256 = "1ipndg1lg96hfznhcv8ifazv07944vk387i35rzaaamac2hm7nyf";
})
(pkgs.fetchurl {
url = "http://repo.msys2.org/mingw/x86_64/mingw-w64-x86_64-lz4-1.9.2-1-any.pkg.tar.xz";
sha256 = "067rm6fjziid747b9lzng4hpzlddqq8d2xfrxd95nzvj4qrj1xli";
})
(pkgs.fetchurl {
url = "http://repo.msys2.org/mingw/x86_64/mingw-w64-x86_64-libtre-git-r128.6fb7206-2-any.pkg.tar.xz";
sha256 = "0dp3ca83j8jlx32gml2qvqpwp5b42q8r98gf6hyiki45d910wb7x";
})
(pkgs.fetchurl {
url = "http://repo.msys2.org/mingw/x86_64/mingw-w64-x86_64-libsystre-1.0.1-4-any.pkg.tar.xz";
sha256 = "037gkzaaj8kp5nspcbc8ll64s9b3mj8d6m663lk1za94bq2axff1";
})
(pkgs.fetchurl {
url = "http://repo.msys2.org/mingw/x86_64/mingw-w64-x86_64-nettle-3.6-1-any.pkg.tar.zst";
sha256 = "1m5kakcfmwvmvajblscq541b40f5zhc01hqgvwlcgpdm4c1mjxhx";
})
(pkgs.fetchurl {
url = "http://repo.msys2.org/mingw/x86_64/mingw-w64-x86_64-xz-5.2.5-1-any.pkg.tar.xz";
sha256 = "09h7qpy8nrrk3z9fh31k9jc17449qs9cf5v183rz6v6526x3v7jg";
})
(pkgs.fetchurl {
url = "http://repo.msys2.org/mingw/x86_64/mingw-w64-x86_64-libarchive-3.4.3-1-any.pkg.tar.zst";
sha256 = "1c9wxa9i1hm1yvv82qdzc1pqgrw3gcfc0s9wjah7w1civ9a63flf";
})
(pkgs.fetchurl {
url = "http://repo.msys2.org/mingw/x86_64/mingw-w64-x86_64-libuv-1.38.0-1-any.pkg.tar.zst";
sha256 = "1x9vz2ib8mgx0g2gxjmyshdcf1qgql0d6hycyh4xf7ns4zk70mh0";
})
(pkgs.fetchurl {
url = "http://repo.msys2.org/mingw/x86_64/mingw-w64-x86_64-rhash-1.3.9-1-any.pkg.tar.xz";
sha256 = "1a5b1wvljbdn38jcw2w46mcw377aw8k7j93fxsjzghhf9msscl1a";
})
(pkgs.fetchurl {
url = "http://repo.msys2.org/mingw/x86_64/mingw-w64-x86_64-cmake-3.17.3-1-any.pkg.tar.zst";
sha256 = "0b9zaa11qazsgz88yfm6j93rddnw8mz9zzh8maz3vrmi9p4asldd";
})
]

View File

@ -0,0 +1 @@
This file is not publicaly acessible anywhere so had to be extracted from a connected instance

Binary file not shown.

98
artiq-fast/wfvm/utils.nix Normal file
View File

@ -0,0 +1,98 @@
{ pkgs, baseRtc ? "2020-04-20T14:21:42", cores ? "4", qemuMem ? "4G" }:
rec {
# qemu_test is a smaller closure only building for a single system arch
qemu = pkgs.qemu_test;
mkQemuFlags = extraFlags: [
"-enable-kvm"
"-cpu host"
"-smp ${cores}"
"-m ${qemuMem}"
"-bios ${pkgs.OVMF.fd}/FV/OVMF.fd"
"-vga virtio"
"-rtc base=${baseRtc}"
"-device piix3-usb-uhci"
"-device e1000,netdev=n1"
] ++ extraFlags;
# Pass empty config file to prevent ssh from failing to create ~/.ssh
sshOpts = "-F /dev/null -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -o ConnectTimeout=1";
win-exec = pkgs.writeShellScriptBin "win-exec" ''
${pkgs.sshpass}/bin/sshpass -p1234 -- \
${pkgs.openssh}/bin/ssh -np 2022 ${sshOpts} \
wfvm@localhost \
$1
'';
win-wait = pkgs.writeShellScriptBin "win-wait" ''
# If the machine is not up within 10 minutes it's likely never coming up
timeout=600
# Wait for VM to be accessible
sleep 20
echo "Waiting for SSH..."
while true; do
if test "$timeout" -eq 0; then
echo "SSH connection timed out"
exit 1
fi
output=$(${win-exec}/bin/win-exec 'echo|set /p="Ran command"' || echo "")
if test "$output" = "Ran command"; then
break
fi
echo "Retrying in 1 second, timing out in $timeout seconds"
((timeout=$timeout-1))
sleep 1
done
echo "SSH OK"
'';
win-put = pkgs.writeShellScriptBin "win-put" ''
echo win-put $1 -\> $2
${pkgs.sshpass}/bin/sshpass -p1234 -- \
${pkgs.openssh}/bin/scp -r -P 2022 ${sshOpts} \
$1 wfvm@localhost:$2
'';
win-get = pkgs.writeShellScriptBin "win-get" ''
echo win-get $1
${pkgs.sshpass}/bin/sshpass -p1234 -- \
${pkgs.openssh}/bin/scp -r -T -P 2022 ${sshOpts} \
wfvm@localhost:$1 .
'';
wfvm-run = { name, image, script, display ? false, isolateNetwork ? true, forwardedPorts ? [] }:
let
restrict =
if isolateNetwork
then "on"
else "off";
# use socat instead of `tcp:...` to allow multiple connections
guestfwds =
builtins.concatStringsSep ""
(map ({ listenAddr, targetAddr, port }:
",guestfwd=tcp:${listenAddr}:${toString port}-cmd:${pkgs.socat}/bin/socat\\ -\\ tcp:${targetAddr}:${toString port}"
) forwardedPorts);
qemuParams = mkQemuFlags (pkgs.lib.optional (!display) "-display none" ++ [
"-drive"
"file=${image},index=0,media=disk,cache=unsafe"
"-snapshot"
"-netdev user,id=n1,net=192.168.1.0/24,restrict=${restrict},hostfwd=tcp::2022-:22${guestfwds}"
]);
in pkgs.writeShellScriptBin "wfvm-run-${name}" ''
set -m
${qemu}/bin/qemu-system-x86_64 ${pkgs.lib.concatStringsSep " " qemuParams} &
${win-wait}/bin/win-wait
${script}
echo "Shutting down..."
${win-exec}/bin/win-exec 'shutdown /s'
echo "Waiting for VM to terminate..."
fg
echo "Done"
'';
}

163
artiq-fast/wfvm/win.nix Normal file
View File

@ -0,0 +1,163 @@
{ pkgs
, diskImageSize ? "22G"
, windowsImage ? null
, autoUnattendParams ? {}
, impureMode ? false
, installCommands ? []
, users ? {}
, ...
}@attrs:
let
lib = pkgs.lib;
utils = import ./utils.nix { inherit pkgs; };
libguestfs = pkgs.libguestfs-with-appliance;
# p7zip on >20.03 has known vulns but we have no better option
p7zip = pkgs.p7zip.overrideAttrs(old: {
meta = old.meta // {
knownVulnerabilities = [];
};
});
runQemuCommand = name: command: (
pkgs.runCommandNoCC name { buildInputs = [ p7zip utils.qemu libguestfs ]; }
(
''
if ! test -f; then
echo "KVM not available, bailing out" >> /dev/stderr
exit 1
fi
'' + command
)
);
windowsIso = if windowsImage != null then windowsImage else pkgs.fetchurl {
url = "https://software-download.microsoft.com/download/sg/17763.107.101029-1455.rs5_release_svc_refresh_CLIENT_LTSC_EVAL_x64FRE_en-us.iso";
sha256 = "668fe1af70c2f7416328aee3a0bb066b12dc6bbd2576f40f812b95741e18bc3a";
};
openSshServerPackage = ./openssh/server-package.cab;
autounattend = import ./autounattend.nix (
attrs // {
inherit pkgs;
users = users // {
wfvm = {
password = "1234";
description = "WFVM Administrator";
groups = [
"Administrators"
];
};
};
}
);
bundleInstaller = pkgs.callPackage ./bundle {};
# Packages required to drive installation of other packages
bootstrapPkgs =
runQemuCommand "bootstrap-win-pkgs.img" ''
mkdir -p pkgs/fod
cp ${bundleInstaller} pkgs/"$(stripHash "${bundleInstaller}")"
# Install optional windows features
cp ${openSshServerPackage} pkgs/fod/OpenSSH-Server-Package~31bf3856ad364e35~amd64~~.cab
# SSH setup script goes here because windows XML parser sucks
cp ${autounattend.setupScript} pkgs/ssh-setup.ps1
virt-make-fs --partition --type=fat pkgs/ $out
'';
installScript = pkgs.writeScript "windows-install-script" (
let
qemuParams = utils.mkQemuFlags (lib.optional (!impureMode) "-display none" ++ [
# "CD" drive with bootstrap pkgs
"-drive"
"id=virtio-win,file=${bootstrapPkgs},if=none,format=raw,readonly=on"
"-device"
"usb-storage,drive=virtio-win"
# USB boot
"-drive"
"id=win-install,file=usbimage.img,if=none,format=raw,readonly=on"
"-device"
"usb-storage,drive=win-install"
# Output image
"-drive"
"file=c.img,index=0,media=disk,cache=unsafe"
# Network
"-netdev user,id=n1,net=192.168.1.0/24,restrict=on"
]);
in
''
#!${pkgs.runtimeShell}
set -euxo pipefail
export PATH=${lib.makeBinPath [ p7zip utils.qemu libguestfs ]}:$PATH
# Create a bootable "USB" image
# Booting in USB mode circumvents the "press any key to boot from cdrom" prompt
#
# Also embed the autounattend answer file in this image
mkdir -p win
mkdir -p win/nix-win
7z x -y ${windowsIso} -owin
cp ${autounattend.autounattendXML} win/autounattend.xml
virt-make-fs --partition --type=fat win/ usbimage.img
rm -rf win
# Qemu requires files to be rw
qemu-img create -f qcow2 c.img ${diskImageSize}
qemu-system-x86_64 ${lib.concatStringsSep " " qemuParams}
''
);
baseImage = pkgs.runCommandNoCC "windows.img" {} ''
${installScript}
mv c.img $out
'';
finalImage = builtins.foldl' (acc: v: pkgs.runCommandNoCC "${v.name}.img" {
buildInputs = with utils; [
qemu win-wait win-exec win-put
] ++ (v.buildInputs or []);
} (let
script = pkgs.writeScript "${v.name}-script" v.script;
qemuParams = utils.mkQemuFlags (lib.optional (!impureMode) "-display none" ++ [
# Output image
"-drive"
"file=c.img,index=0,media=disk,cache=unsafe"
# Network - enable SSH forwarding
"-netdev user,id=n1,net=192.168.1.0/24,restrict=on,hostfwd=tcp::2022-:22"
]);
in ''
# Create an image referencing the previous image in the chain
qemu-img create -f qcow2 -b ${acc} c.img
set -m
qemu-system-x86_64 ${lib.concatStringsSep " " qemuParams} &
win-wait
echo "Executing script to build layer..."
${script}
echo "Layer script done"
echo "Shutting down..."
win-exec 'shutdown /s'
echo "Waiting for VM to terminate..."
fg
echo "Done"
mv c.img $out
'')) baseImage installCommands;
in
# impureMode is meant for debugging the base image, not the full incremental build process
if !(impureMode) then finalImage else assert installCommands == []; installScript

View File

@ -0,0 +1,26 @@
# Preparation steps
## Install a Windows image
1. Adjust build.nix accordingly
2. Run:
If in impure mode
```shell
nix-build build.nix
./result
```
Results in a file called c.img
If in pure mode
```shell
nix-build build.nix
ls -la ./result
```
Results in a symlink to the image in the nix store
# Impure/pure mode
Sometimes it can be useful to build the image _outside_ of the Nix sandbox for debugging purposes.
For this purpose we have an attribute called `impureMode` which outputs the shell script used by Nix inside the sandbox to build the image.

View File

@ -0,0 +1,318 @@
{ pkgs
, lib ? pkgs.lib
, fullName
, organization
, administratorPassword
, uiLanguage ? "en-US"
, inputLocale ? "en-US"
, userLocale ? "en-US"
, systemLocale ? "en-US"
, users ? {}
, productKey ? null
, defaultUser ? null
, setupCommands ? []
, timeZone ? "UTC"
, services ? {}
, impureShellCommands ? []
, driveLetter ? "E:"
, ...
}:
let
serviceCommands = lib.mapAttrsToList (
serviceName: attrs: "powershell Set-Service -Name ${serviceName} " + (
lib.concatStringsSep " " (
(
lib.mapAttrsToList (
n: v: if builtins.typeOf v != "bool" then "-${n} ${v}" else "-${n}"
)
) (
# Always run without interaction
{ Force = true; } // attrs
)
)
)
) services;
sshSetupCommands =
# let
# makeDirs = lib.mapAttrsToList (n: v: ''mkdir C:\Users\${n}\.ssh'') users;
# writeKeys = lib.flatten (lib.mapAttrsToList (n: v: builtins.map (key: let
# commands = [
# ''powershell.exe Set-Content -Path C:\Users\${n}\.ssh\authorized_keys -Value '${key}' ''
# ];
# in lib.concatStringsSep "\n" commands) (v.sshKeys or [])) users);
# mkDirsDesc = builtins.map (c: {Path = c; Description = "Make SSH key dir";}) makeDirs;
# writeKeysDesc = builtins.map (c: {Path = c; Description = "Add SSH key";}) writeKeys;
# in
# mkDirsDesc ++ writeKeysDesc ++
[
{
Path = ''powershell.exe Add-WindowsCapability -Online -Name OpenSSH.Server~~~~0.0.1.0 -Source ${driveLetter}\fod -LimitAccess'';
Description = "Add OpenSSH service.";
}
{
Path = ''powershell.exe Set-Service -Name sshd -StartupType Automatic'';
Description = "Enable SSH by default.";
}
];
assertCommand = c: builtins.typeOf c == "string" || builtins.typeOf c == "set" && builtins.hasAttr "Path" c && builtins.hasAttr "Description" c;
commands = builtins.map (x: assert assertCommand x; if builtins.typeOf x == "string" then { Path = x; Description = x; } else x) (
[
{
Path = "powershell.exe Set-ExecutionPolicy -Force Unrestricted";
Description = "Allow unsigned powershell scripts.";
}
]
++ [
{
Path = ''powershell.exe ${driveLetter}\win-bundle-installer.exe'';
Description = "Install any declared packages.";
}
]
++ setupCommands
++ [
{
Path = ''powershell.exe ${driveLetter}\ssh-setup.ps1'';
Description = "Setup SSH and keys";
}
]
++ serviceCommands
++ impureShellCommands
);
mkCommand = attrs: ''
<RunSynchronousCommand wcm:action="add">
${lib.concatStringsSep "\n" (lib.attrsets.mapAttrsToList (n: v: "<${n}>${v}</${n}>") attrs)}
</RunSynchronousCommand>
'';
mkCommands = commands: (
builtins.foldl' (
acc: v: rec {
i = acc.i + 1;
values = acc.values ++ [ (mkCommand (v // { Order = builtins.toString i; })) ];
}
) {
i = 0;
values = [];
} commands
).values;
mkUser =
{ name
, password
, description ? ""
, displayName ? ""
, groups ? []
# , sshKeys ? [] # Handled in scripts
}: ''
<LocalAccount wcm:action="add">
<Password>
<Value>${password}</Value>
<PlainText>true</PlainText>
</Password>
<Description>${description}</Description>
<DisplayName>${displayName}</DisplayName>
<Group>${builtins.concatStringsSep ";" (lib.unique ([ "Users" ] ++ groups))}</Group>
<Name>${name}</Name>
</LocalAccount>
'';
# Windows expects a flat list of users while we want to manage them as a set
flatUsers = builtins.attrValues (builtins.mapAttrs (name: s: s // { inherit name; }) users);
autounattendXML = pkgs.writeText "autounattend.xml" ''
<?xml version="1.0" encoding="utf-8"?>
<unattend xmlns="urn:schemas-microsoft-com:unattend">
<settings pass="windowsPE">
<component name="Microsoft-Windows-PnpCustomizationsWinPE" processorArchitecture="amd64" publicKeyToken="31bf3856ad364e35" language="neutral" versionScope="nonSxS" xmlns:wcm="http://schemas.microsoft.com/WMIConfig/2002/State" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<DriverPaths>
<PathAndCredentials wcm:action="add" wcm:keyValue="1">
<Path>D:\</Path>
</PathAndCredentials>
<PathAndCredentials wcm:action="add" wcm:keyValue="2">
<Path>E:\</Path>
</PathAndCredentials>
</DriverPaths>
</component>
<component name="Microsoft-Windows-Setup" processorArchitecture="amd64" publicKeyToken="31bf3856ad364e35" language="neutral" versionScope="nonSxS" xmlns:wcm="http://schemas.microsoft.com/WMIConfig/2002/State" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<DiskConfiguration>
<Disk wcm:action="add">
<CreatePartitions>
<CreatePartition wcm:action="add">
<Order>1</Order>
<Type>EFI</Type>
<Size>100</Size>
</CreatePartition>
<CreatePartition wcm:action="add">
<Order>2</Order>
<Type>MSR</Type>
<Size>16</Size>
</CreatePartition>
<CreatePartition wcm:action="add">
<Order>3</Order>
<Type>Primary</Type>
<Extend>true</Extend>
</CreatePartition>
</CreatePartitions>
<ModifyPartitions>
<ModifyPartition wcm:action="add">
<Order>1</Order>
<Format>FAT32</Format>
<Label>System</Label>
<PartitionID>1</PartitionID>
</ModifyPartition>
<ModifyPartition wcm:action="add">
<Order>2</Order>
<PartitionID>2</PartitionID>
</ModifyPartition>
<ModifyPartition wcm:action="add">
<Order>3</Order>
<Format>NTFS</Format>
<Label>Windows</Label>
<Letter>C</Letter>
<PartitionID>3</PartitionID>
</ModifyPartition>
</ModifyPartitions>
<DiskID>0</DiskID>
<WillWipeDisk>true</WillWipeDisk>
</Disk>
</DiskConfiguration>
<ImageInstall>
<OSImage>
<InstallTo>
<DiskID>0</DiskID>
<PartitionID>3</PartitionID>
</InstallTo>
<InstallFrom>
<MetaData wcm:action="add">
<Key>/IMAGE/INDEX</Key>
<Value>1</Value>
</MetaData>
</InstallFrom>
</OSImage>
</ImageInstall>
<UserData>
<ProductKey>
${if productKey != null then "<Key>${productKey}</Key>" else ""}
<WillShowUI>OnError</WillShowUI>
</ProductKey>
<AcceptEula>true</AcceptEula>
<FullName>${fullName}</FullName>
<Organization>${organization}</Organization>
</UserData>
</component>
<component name="Microsoft-Windows-International-Core-WinPE" processorArchitecture="amd64" publicKeyToken="31bf3856ad364e35" language="neutral" versionScope="nonSxS" xmlns:wcm="http://schemas.microsoft.com/WMIConfig/2002/State" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<SetupUILanguage>
<UILanguage>${uiLanguage}</UILanguage>
</SetupUILanguage>
<InputLocale>${inputLocale}</InputLocale>
<SystemLocale>${systemLocale}</SystemLocale>
<UILanguage>${uiLanguage}</UILanguage>
<UILanguageFallback>en-US</UILanguageFallback>
<UserLocale>${userLocale}</UserLocale>
</component>
</settings>
<settings pass="oobeSystem">
<component name="Microsoft-Windows-International-Core" processorArchitecture="amd64" publicKeyToken="31bf3856ad364e35" language="neutral" versionScope="nonSxS" xmlns:wcm="http://schemas.microsoft.com/WMIConfig/2002/State" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<InputLocale>${inputLocale}</InputLocale>
<SystemLocale>${systemLocale}</SystemLocale>
<UILanguage>${uiLanguage}</UILanguage>
<UILanguageFallback>en-US</UILanguageFallback>
<UserLocale>${userLocale}</UserLocale>
</component>
<component name="Microsoft-Windows-Shell-Setup" processorArchitecture="amd64" publicKeyToken="31bf3856ad364e35" language="neutral" versionScope="nonSxS" xmlns:wcm="http://schemas.microsoft.com/WMIConfig/2002/State" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<OOBE>
<HideEULAPage>true</HideEULAPage>
<HideLocalAccountScreen>true</HideLocalAccountScreen>
<HideOEMRegistrationScreen>true</HideOEMRegistrationScreen>
<HideOnlineAccountScreens>true</HideOnlineAccountScreens>
<HideWirelessSetupInOOBE>true</HideWirelessSetupInOOBE>
<ProtectYourPC>1</ProtectYourPC>
</OOBE>
<TimeZone>${timeZone}</TimeZone>
<UserAccounts>
${if administratorPassword != null then ''
<AdministratorPassword>
<Value>${administratorPassword}</Value>
<PlainText>true</PlainText>
</AdministratorPassword>
'' else ""}
<LocalAccounts>
${builtins.concatStringsSep "\n" (builtins.map mkUser flatUsers)}
</LocalAccounts>
</UserAccounts>
${if defaultUser == null then "" else ''
<AutoLogon>
<Password>
<Value>${(builtins.getAttr defaultUser users).password}</Value>
<PlainText>true</PlainText>
</Password>
<Enabled>true</Enabled>
<Username>${defaultUser}</Username>
</AutoLogon>
''}
<FirstLogonCommands>
<SynchronousCommand wcm:action="add">
<Order>1</Order>
<CommandLine>cmd /C shutdown /s /f /t 00</CommandLine>
<Description>ChangeHideFiles</Description>
</SynchronousCommand>
</FirstLogonCommands>
</component>
</settings>
<settings pass="specialize">
<component name="Microsoft-Windows-Deployment" processorArchitecture="amd64" publicKeyToken="31bf3856ad364e35" language="neutral" versionScope="nonSxS" xmlns:wcm="http://schemas.microsoft.com/WMIConfig/2002/State" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<RunSynchronous>
${lib.concatStringsSep "\n" (mkCommands commands)}
</RunSynchronous>
</component>
<component name="Microsoft-Windows-SQMApi" processorArchitecture="amd64" publicKeyToken="31bf3856ad364e35" language="neutral" versionScope="NonSxS" xmlns:wcm="http://schemas.microsoft.com/WMIConfig/2002/State" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<CEIPEnabled>0</CEIPEnabled>
</component>
</settings>
<!-- Disable Windows UAC -->
<settings pass="offlineServicing">
<component name="Microsoft-Windows-LUA-Settings" processorArchitecture="amd64" publicKeyToken="31bf3856ad364e35" language="neutral" versionScope="nonSxS" xmlns:wcm="http://schemas.microsoft.com/WMIConfig/2002/State" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<EnableLUA>false</EnableLUA>
</component>
</settings>
<cpi:offlineImage cpi:source="wim:c:/wim/windows-10/install.wim#Windows 10 Enterprise LTSC 2019 Evaluation" xmlns:cpi="urn:schemas-microsoft-com:cpi" />
</unattend>
'';
in {
# Lint and format as a sanity check
autounattendXML = pkgs.runCommandNoCC "autounattend.xml" {} ''
${pkgs.libxml2}/bin/xmllint --format ${autounattendXML} > $out
'';
# autounattend.xml is _super_ picky about quotes and other things
setupScript = pkgs.writeText "ssh-setup.ps1" (
''
# Setup SSH and keys
'' +
lib.concatStrings (
builtins.map (c: ''
# ${c.Description}
${c.Path}
'') sshSetupCommands
)
);
}

View File

@ -0,0 +1,100 @@
{
pkgs ? import <nixpkgs> {}
, impureMode ? false
}:
let
win = (import ./default.nix { inherit pkgs; });
in
win.makeWindowsImage {
# Custom base iso
# windowsImage = pkgs.fetchurl {
# url = "https://software-download.microsoft.com/download/sg/17763.107.101029-1455.rs5_release_svc_refresh_CLIENT_LTSC_EVAL_x64FRE_en-us.iso";
# sha256 = "668fe1af70c2f7416328aee3a0bb066b12dc6bbd2576f40f812b95741e18bc3a";
# };
# User accounts
users = {
artiq = {
password = "1234";
# description = "Default user";
# displayName = "Display name";
groups = [
"Administrators"
];
};
};
# Build install script & skip building iso
inherit impureMode;
# impureShellCommands = [
# "powershell.exe echo Hello"
# ];
fullName = "M-Labs";
organization = "m-labs";
administratorPassword = "12345";
# Auto login
defaultUser = "artiq";
# Imperative installation commands, to be installed incrementally
installCommands = [
{
name = "Anaconda3";
script = let
Anaconda3 = pkgs.fetchurl {
name = "Anaconda3.exe";
url = "https://repo.anaconda.com/archive/Anaconda3-2019.03-Windows-x86_64.exe";
sha256 = "1f9icm5rwab6l1f23a70dw0qixzrl62wbglimip82h4zhxlh3jfj";
};
in ''
cp ${Anaconda3} ./Anaconda3.exe
win put Anaconda3.exe 'C:\Users\artiq'
win exec 'start /wait "" .\Anaconda3.exe /S /D=%UserProfile%\Anaconda3'
'';
}
];
# services = {
# # Enable remote management
# WinRm = {
# Status = "Running";
# PassThru = true;
# };
# };
# License key
# productKey = "iboughtthisone";
# Locales
# uiLanguage = "en-US";
# inputLocale = "en-US";
# userLocale = "en-US";
# systemLocale = "en-US";
# packages = [
# (
# win.pkgs.makeMSIPkg {
# # Note: File not in repository, it's meant as an example to subsitute
# name = "notepadplusplus";
# msi = ./Notepad++7.7.msi;
# # Custom cert
# # cert = ./notepad++-cert.cer
# }
# )
# (
# win.pkgs.makeCrossPkg {
# name = "hello";
# pkg = pkgs.pkgsCross.mingwW64.hello;
# }
# )
# ];
}

View File

@ -0,0 +1 @@
use nix

View File

@ -0,0 +1,9 @@
{ pkgs ? import <nixpkgs> {}
, lib ? pkgs.lib
}:
pkgs.runCommandNoCC "win-bundle-installer.exe" {} ''
cp ${./main.go} main.go
env HOME=$(mktemp -d) GOOS=windows GOARCH=amd64 ${pkgs.go}/bin/go build
mv build.exe $out
''

View File

@ -0,0 +1,116 @@
package main
import (
"archive/tar"
"fmt"
"io"
"io/ioutil"
"log"
"os"
"os/exec"
"path/filepath"
)
func Untar(dst string, r io.Reader) error {
tr := tar.NewReader(r)
for {
header, err := tr.Next()
switch {
case err == io.EOF:
return nil
case err != nil:
return err
case header == nil:
continue
}
target := filepath.Join(dst, header.Name)
switch header.Typeflag {
case tar.TypeDir:
if _, err := os.Stat(target); err != nil {
if err := os.MkdirAll(target, 0755); err != nil {
return err
}
}
case tar.TypeReg:
f, err := os.OpenFile(target, os.O_CREATE|os.O_RDWR, os.FileMode(header.Mode))
if err != nil {
return err
}
if _, err := io.Copy(f, tr); err != nil {
return err
}
f.Close()
}
}
}
func InstallBundle(bundlePath string) error {
reader, err := os.Open(bundlePath)
if err != nil {
log.Fatal(err)
}
workDir, err := ioutil.TempDir("", "bundle_install")
if err != nil {
return err
}
defer os.RemoveAll(workDir)
err = Untar(workDir, reader)
if err != nil {
return err
}
installScript := filepath.Join(workDir, "install.ps1")
cmd := exec.Command("powershell", installScript)
cmd.Stdout = os.Stdout
cmd.Stderr = os.Stderr
cmd.Dir = workDir
err = cmd.Run()
return err
}
func main() {
// Get path relative to binary
baseDir, err := filepath.Abs(filepath.Dir(os.Args[0]))
if err != nil {
log.Fatal(err)
}
var dirs = [2]string{"bootstrap", "user"}
for _, pkgDir := range dirs {
dir := filepath.Join(baseDir, pkgDir)
files, err := ioutil.ReadDir(dir)
if err != nil {
log.Fatal(err)
}
for _, file := range files {
bundle := filepath.Join(dir, file.Name())
fmt.Println(fmt.Sprintf("Installing: %s", bundle))
err := InstallBundle(bundle)
if err != nil {
log.Fatal(err)
}
}
}
}

View File

@ -0,0 +1,13 @@
{ pkgs ? import <nixpkgs> {} }:
pkgs.mkShell {
buildInputs = [
pkgs.go
];
shellHook = ''
unset GOPATH
'';
}

View File

@ -1,17 +0,0 @@
{ pkgs } : [
(pkgs.fetchurl {
url = "https://conda.anaconda.org/conda-forge/noarch/prettytable-0.7.2-py_3.tar.bz2";
sha256 = "0b7s4xm6bbkcg37sf1i3mxrbac0vxhryq22m3qx4x9kh6k2c5g5q";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/noarch/pycparser-2.20-py_0.tar.bz2";
sha256 = "1qwcb07q8cjz0qpj6pfxb0qb68kddmx9bv9wr5pghwz78q8073z9";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/noarch/six-1.15.0-py_0.tar.bz2";
sha256 = "08rsfp9bd2mz8r120s8w5vgncy0gn732xa0lfgbmx833548cfqmb";
})
]

View File

@ -1,22 +0,0 @@
{ pkgs } : [
(pkgs.fetchurl {
url = "https://conda.anaconda.org/conda-forge/noarch/prettytable-2.1.0-pyhd8ed1ab_0.tar.bz2";
sha256 = "1w71padwzy6ay5g8zl575ali994cssgcgzf5917rap3fmw2mgg4d";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/noarch/six-1.16.0-pyhd3eb1b0_0.tar.bz2";
sha256 = "120wav3bxbyv0jsvbl94rxsigqqchsqg4qqxccg9ij7ydirmqaql";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/noarch/pyqtgraph-0.11.0-py_0.tar.bz2";
sha256 = "1jnid69dpvhd8nscmkm761qpqz8ip0gka5av90xs3i0pqkqmffqg";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/noarch/cached-property-1.5.2-py_0.tar.bz2";
sha256 = "01mcbrsrdwvinyvp0fs2hbkczydb33gbz59ldhb1484w5mm9y9bi";
})
]

View File

@ -1,52 +0,0 @@
{ pkgs } : [
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/noarch/cached-property-1.5.2-py_0.tar.bz2";
sha256 = "01mcbrsrdwvinyvp0fs2hbkczydb33gbz59ldhb1484w5mm9y9bi";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/noarch/zipp-3.6.0-pyhd3eb1b0_0.tar.bz2";
sha256 = "1wj0hmhn09b4szs5zyslpd1mggy90pbjil6q3lyqkw3z492za80q";
})
(pkgs.fetchurl {
url = "https://conda.anaconda.org/conda-forge/noarch/prettytable-2.4.0-pyhd8ed1ab_0.tar.bz2";
sha256 = "1iv2x8m8xf2y8v68kz2lil2zaji7gsz099zs8wsrap03j5vpraf0";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/noarch/wheel-0.37.0-pyhd3eb1b0_1.tar.bz2";
sha256 = "10bxbfy7dlmbr8b21ddb1k2wkrzhs7j2zgmss38pv4g5xidv9v74";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/noarch/pyqtgraph-0.11.0-py_0.tar.bz2";
sha256 = "1jnid69dpvhd8nscmkm761qpqz8ip0gka5av90xs3i0pqkqmffqg";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/noarch/six-1.16.0-pyhd3eb1b0_0.tar.bz2";
sha256 = "120wav3bxbyv0jsvbl94rxsigqqchsqg4qqxccg9ij7ydirmqaql";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/noarch/python-dateutil-2.8.2-pyhd3eb1b0_0.tar.bz2";
sha256 = "1brzm9v9yvs3xhdh89jzw5xjq4a3r9vizhkhdfcax86d2q52ji97";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/noarch/pycparser-2.21-pyhd3eb1b0_0.tar.bz2";
sha256 = "1dyi89xx73kq0caz4jx493czn16w0dl1gjhw0c5kw28bxz6i8wm8";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/noarch/wcwidth-0.2.5-pyhd3eb1b0_0.tar.bz2";
sha256 = "1x3sncbrp7bml6qjss24qyy0rsjbdhnzjwpf6apcd14kzspnr21a";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/noarch/tzdata-2021e-hda174b7_0.tar.bz2";
sha256 = "1sxgc0pamsskszm29cxpwzlffydxjr3aqpgly7j1f3ansvchxvb4";
})
]

View File

@ -1,167 +0,0 @@
{ pkgs } : [
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/win-64/pyqt-5.9.2-py35h6538335_2.tar.bz2";
sha256 = "1anwq53nic50cijngxaylpn6232j9wdc2wz2rykqpgzvs1ms108s";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/win-64/zlib-1.2.11-h62dcd97_4.tar.bz2";
sha256 = "1jxd7sg0c278hqv09q6hridpdnyhkd34gbs92wkravj3gwsr1adk";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/win-64/openssl-1.1.1g-he774522_0.tar.bz2";
sha256 = "1gwfj33qb8inikdhmgcm30iz1ag8x71lzicsxbdr7ni4153df5gb";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/win-64/mkl_random-1.0.1-py35h77b88f5_1.tar.bz2";
sha256 = "0899qg9ih8srpw9q5mxd85hg4gpawb7lmz25x9xi401cfwhgwq7l";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/win-64/hdf5-1.10.2-hac2f561_1.tar.bz2";
sha256 = "0l1i5bpxl0bn9hf738ywygjwc32d4cq5fjkgzij3x27cxi8nvd21";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/win-64/tbb4py-2018.0.5-py35he980bc4_0.tar.bz2";
sha256 = "0gypjcmciw3rnd2cq0sqmvspgzaas13fghv633dqj8g3bvl4lfif";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/win-64/wheel-0.31.1-py35_0.tar.bz2";
sha256 = "07zmbg57lpvqd9nmahiff1mhzxmzchx1v128dcrj4iamymjcdlzf";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/win-64/vs2015_runtime-14.16.27012-hf0eaf9b_2.tar.bz2";
sha256 = "1gbm6i6nkp8linmak5mm42hj1nzqd5ppak8kv1n3wfn52p21ngvs";
})
(pkgs.fetchurl {
url = "https://conda.anaconda.org/conda-forge/win-64/pygit2-0.27.1-py35hfa6e2cd_0.tar.bz2";
sha256 = "1jypm8vxs4j4yr37ai4ki9qsslv3wz6slklmisnvjraz1a4vzaf8";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/win-64/pip-10.0.1-py35_0.tar.bz2";
sha256 = "0zwlkfgnag1s64wbwdcg44zqj2dpfcq1g4b6dsk82q24j7fw40i3";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/win-64/h5py-2.8.0-py35h3bdd7fb_2.tar.bz2";
sha256 = "0743wrf51b2vs8ybasjpn7ricbh740r7drxcimkhbxp8r6vd66vp";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/win-64/numpy-1.15.2-py35ha559c80_0.tar.bz2";
sha256 = "1igf4gm726s4kg7km24flxdxr73fafaz3z18y3ndcb5f6r9zwa44";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/win-64/mkl-2018.0.3-1.tar.bz2";
sha256 = "01pq0f2787q58avg5ylfrbpf4jlg2b6rbajvf3swjpm1cmzxkm81";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/win-64/libssh2-1.9.0-h7a1dbc1_1.tar.bz2";
sha256 = "0sz405ab3n7991hxy8l9affs1slivsimgadxsdr6wvpgx3j4aqgx";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/win-64/tk-8.6.10-he774522_0.tar.bz2";
sha256 = "15bfncacdxmcbn4xixmfz2m7a09k7hcpwxvwn1lki84bx0fmzsis";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/win-64/sqlite-3.32.2-h2a8f88b_0.tar.bz2";
sha256 = "086jjnxfchypkr8cp1q8nsis0jfvl293bv4bcg7ikv7aia3vda8p";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/win-64/wincertstore-0.2-py35hfebbdb8_0.tar.bz2";
sha256 = "1624bzqlbiq4jlz46l65574smw739p7l38ydzxmayq3jmwx6zaar";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/win-64/certifi-2018.8.24-py35_1.tar.bz2";
sha256 = "06ygpkx3f71rwmq9lgc29r6jj4g1zi8rvrmn4mrrsb9b5sf0rzgf";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/win-64/icu-58.2-ha925a31_3.tar.bz2";
sha256 = "0vcl0j3v9ab022s2g3a9iv1pn7sflh670yyf5m08hdyf049m3jz7";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/win-64/regex-2018.08.29-py35hfa6e2cd_0.tar.bz2";
sha256 = "16cjgwpaqnfy8bg35iz12bb9whpws4abiz4cq4shpnh6gnwpwzj3";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/win-64/cffi-1.11.5-py35h74b6da3_1.tar.bz2";
sha256 = "0nc3ps4xl8rqpwiqaqqr5lrqzk8lx1a4hh3fpm7i7rlppqn5pm6a";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/win-64/libcurl-7.69.1-h2a8f88b_0.tar.bz2";
sha256 = "182zrmgl2142gfgnbgjsk0gbj85vw75xzxvhn9lzm485ghxsasvg";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/win-64/mkl_fft-1.0.6-py35hdbbee80_0.tar.bz2";
sha256 = "0xb02rx28rjlp5clavg19jb129ihcarrkz46pncaar91qxckbbxz";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/win-64/pyqtgraph-0.10.0-py35h28b3542_3.tar.bz2";
sha256 = "0ywzw3i4kf5ccck9whmg1j7s22x6i5fpywndy8rkr9va38g4nazq";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/win-64/sip-4.19.8-py35h6538335_0.tar.bz2";
sha256 = "1y761zpm6bi0pb61x1y26ap56hylh6mjg1xq9zl08bmcl2dlkwfy";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/win-64/setuptools-40.2.0-py35_0.tar.bz2";
sha256 = "146piyifnip1flqph19nxilnhfbzzsxlhfyx0i61wv7krr6ln9yy";
})
(pkgs.fetchurl {
url = "https://conda.anaconda.org/conda-forge/win-64/python-levenshtein-0.12.0-py35_0.tar.bz2";
sha256 = "1rdpzv1y535swf17nm88chkp1m8w3wd3nwdy7jk1xfcnx1da5ss6";
})
(pkgs.fetchurl {
url = "https://conda.anaconda.org/conda-forge/win-64/libgit2-0.27.8-hfac1375_0.tar.bz2";
sha256 = "05pjzwmm5vyxwcgi6vv3i961x3dgdd9c4b1ihagq9kyxgqfy21kd";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/win-64/scipy-1.1.0-py35h4f6bf74_1.tar.bz2";
sha256 = "0sdyj5nlycv4krz7f8rzhi0kxv302gpx65x1zwhj8dn6b2c50li0";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/win-64/curl-7.69.1-h2a8f88b_0.tar.bz2";
sha256 = "0hw5dh7gzx8fap4c3vkc2xc2q9by3f5ndbigr6pm6w5v29qaydn4";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/win-64/intel-openmp-2019.4-245.tar.bz2";
sha256 = "13qid5aagyxsfy5ng4bbwb7hs9jj29jvqbpvvkjiy6bgv36m8kr8";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/win-64/numpy-base-1.15.2-py35h8128ebf_0.tar.bz2";
sha256 = "166w4wkp9dwl505hc00hny1mq2mlvb169n7c9nws7dz8j36pqfrx";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/win-64/python-3.5.6-he025d50_0.tar.bz2";
sha256 = "0ygvmbyvhc2jisb5bzb0r5709qmn8392gr7rv6c8vrdiylfiqr73";
})
]

View File

@ -1,67 +0,0 @@
{ pkgs } : [
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/win-64/numpy-base-1.20.2-py38hc2deb75_0.tar.bz2";
sha256 = "1940fryxlil04d5y0df1s1sydhny8l97slvfm7v40mxibdmf2sdn";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/win-64/hdf5-1.10.6-h7ebc959_0.tar.bz2";
sha256 = "09bik65gspyrqj3j5p67wf2ywhgyfz3pkw39gwdzha7yyjkkzx0q";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/win-64/certifi-2021.5.30-py38haa95532_0.tar.bz2";
sha256 = "0hzj23xjw88wllz1l4qdnzp335608vm8pl7w1ka9pkg6ip69lnpm";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/win-64/sqlite-3.36.0-h2bbff1b_0.tar.bz2";
sha256 = "15w0lhcl97wafqvc6ccc96311wc5rrmh16i4ki1pw6kzkfmr1k6r";
})
(pkgs.fetchurl {
url = "https://conda.anaconda.org/conda-forge/win-64/libgit2-1.1.1-h8648793_0.tar.bz2";
sha256 = "0rwmd48g7sywmxgcyjad3hznpm15d3w5604syrkcs2ryih0cgwkd";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/win-64/h5py-3.2.1-py38h3de5c98_0.tar.bz2";
sha256 = "17czl7gvv6d9v6ng9l6c0i1iy181cr1qq7cyn2s90kamnfh3dx8b";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/win-64/numpy-1.20.2-py38ha4e8547_0.tar.bz2";
sha256 = "0w4hkirwgh4bp7djzxp2yh086jfdbz1y3njsrpqm441rjjq39hpz";
})
(pkgs.fetchurl {
url = "https://conda.anaconda.org/conda-forge/win-64/python_abi-3.8-2_cp38.tar.bz2";
sha256 = "1j62rls5r6646b7gagc7d6jj6sqiyqd9vq442dqg3pwyldlz3zqg";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/win-64/python-levenshtein-0.12.2-py38h2bbff1b_0.tar.bz2";
sha256 = "0xvl3v7q1wr0a8li3f8d7hara58lcvrr6hmcd6cqkvcnhggkhp9s";
})
(pkgs.fetchurl {
url = "https://conda.anaconda.org/conda-forge/win-64/pygit2-1.6.1-py38h294d835_0.tar.bz2";
sha256 = "0frgv2b1ckgp8w574abliz369lbziyqsipkwxcjy4l9rbn2pa0rj";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/win-64/ca-certificates-2021.5.25-haa95532_1.tar.bz2";
sha256 = "0g77ic1hs9gj9nknjgrn7byk63z82ima7gqynjcds1kbk8cy4hcd";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/win-64/pip-21.1.3-py38haa95532_0.tar.bz2";
sha256 = "1wzb3f3n1lndmxbr3agmx4rr1k11jf60fcrqmgr1d7a3ygqvqy4q";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/win-64/python-3.8.10-hdbf39b2_7.tar.bz2";
sha256 = "05p2g1552crfmcf4a9wfjg4d1qngsvi1srpca2hqr6s9slip2w19";
})
]

View File

@ -1,182 +0,0 @@
{ pkgs } : [
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/win-64/ca-certificates-2021.10.26-haa95532_2.tar.bz2";
sha256 = "14zdv6whd7cw298mkwpgkfydpz6zwhjq6gvxpw5s77m9b8jyi08w";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/win-64/jpeg-9d-h2bbff1b_0.tar.bz2";
sha256 = "0jdwx9bl89byaqi73h0wr9hkjdi0ia47izgj602xfzc8ylhg0fxl";
})
(pkgs.fetchurl {
url = "https://conda.anaconda.org/conda-forge/win-64/python_abi-3.9-2_cp39.tar.bz2";
sha256 = "04d9pbqzck0330jv7mi8x4r1883sv421lwai2p9yamr2yl6xpan0";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/win-64/certifi-2021.10.8-py39haa95532_0.tar.bz2";
sha256 = "032gmb2lyd0kwb7cr1j90fqyb44aar9jnki9jzjp6p65lbckc27l";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/win-64/python-3.9.7-h6244533_1.tar.bz2";
sha256 = "1pgnrci071wbjdsarjrjssqvbk6nr2hbsvwdvzvf255f33264jj3";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/win-64/pip-21.2.4-py39haa95532_0.tar.bz2";
sha256 = "06a916f8fyjydy5rrrhvwjxn5jwr1w2b6dvhgpsa09bq4p1c2qid";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/win-64/regex-2021.8.3-py39h2bbff1b_0.tar.bz2";
sha256 = "1p8kf8d2d3cb6z91fl2sjrn3mpaxb7pq1rc20705k1lqbjf9biga";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/win-64/mkl-2021.4.0-haa95532_640.tar.bz2";
sha256 = "115zmm4n769xl442qmv5h2ik5fyc1hb8cm1hc5a1vyb7bqwj2xrx";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/win-64/setuptools-58.0.4-py39haa95532_0.tar.bz2";
sha256 = "0avlsc1k3ms92panb24z5vas5gkfm9zxy0mh71pv7yqc466k2qc0";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/win-64/importlib-metadata-4.8.2-py39haa95532_0.tar.bz2";
sha256 = "1fjr69ibknprp25742vzpbck6byrw84w2g3zxa5vqsyz7pyv27xb";
})
(pkgs.fetchurl {
url = "https://conda.anaconda.org/numba/win-64/llvmlite-0.38.0rc1-py39_0.tar.bz2";
sha256 = "0xmd338r5vl9f456cvmmf4s1bz1jxc3yv58050xqgifpkrx3z3vr";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/win-64/hdf5-1.10.6-h7ebc959_0.tar.bz2";
sha256 = "09bik65gspyrqj3j5p67wf2ywhgyfz3pkw39gwdzha7yyjkkzx0q";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/win-64/intel-openmp-2021.4.0-haa95532_3556.tar.bz2";
sha256 = "19mmjvcm7f8nzyk7djjsd9lf6qx01f35wrvmragv1fhzsis3ddgf";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/win-64/mkl_random-1.2.2-py39hf11a4ad_0.tar.bz2";
sha256 = "03h3857xvp5klhc24jn7lh0h7227qrk7q7ch768w27bx27g27js3";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/win-64/numpy-base-1.21.2-py39h0829f74_0.tar.bz2";
sha256 = "1x97k4kdvdwh0aahrpzfaypsf7zvxkqwn0xw7cph2p9fzk7j88zw";
})
(pkgs.fetchurl {
url = "https://conda.anaconda.org/conda-forge/win-64/pygit2-1.7.2-py39hb82d6ee_0.tar.bz2";
sha256 = "1l56b7x1mrha9fs4j2zw4f2c199ab0yw1yggdivjyn40xll9x2sd";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/win-64/openssl-1.1.1l-h2bbff1b_0.tar.bz2";
sha256 = "1w37wciivy4dqa1gvkwq23myv7sk7vr4davarvxc9hjl2is3r4dm";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/win-64/sip-4.19.13-py39hd77b12b_0.tar.bz2";
sha256 = "1p2wgpzy1ccya314m53qjqnw2wns7wj4lxv3lkmddx940f12i76r";
})
(pkgs.fetchurl {
url = "https://conda.anaconda.org/conda-forge/win-64/libssh2-1.10.0-h680486a_2.tar.bz2";
sha256 = "1nnzz9hxgj63gs48flj6pxq6nqz9gkx54gwibyq502za1rbhbyw6";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/win-64/python-levenshtein-0.12.2-py39h2bbff1b_0.tar.bz2";
sha256 = "1y5g9l07whv614qyafkzbph30zi9kypxfswdxh0gknsih8941j0k";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/win-64/pyreadline-2.1-py39haa95532_1.tar.bz2";
sha256 = "0581i2vp5b1dx7z9v4q41dd9ppj9bh9xgy0cnd7zd01zblafdbj5";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/win-64/h5py-3.6.0-py39h3de5c98_0.tar.bz2";
sha256 = "0g6jw2c6qn4vzmn6k3yal8qxrdzcdzpx6grwq1mjxpamb11akni7";
})
(pkgs.fetchurl {
url = "https://conda.anaconda.org/numba/win-64/icc_rt-2020.2-intel_254.tar.bz2";
sha256 = "195km05did3n7zaljg7vwz11n8ibf935gk0m7dy07ngfk97s8w9f";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/win-64/lld-12.0.0-he71bc95_0.tar.bz2";
sha256 = "0f6aw6d72w339jyqyjavnnsv35k2dc4pl79ymylczwf2my37rqpk";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/win-64/mkl-service-2.4.0-py39h2bbff1b_0.tar.bz2";
sha256 = "05sbxqipw9cp2c4p5imadk4hc9ds1d25hys5vamjp6pnl6435ipk";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/win-64/pyqt-5.9.2-py39hd77b12b_6.tar.bz2";
sha256 = "1v3qbmidh2h4vkzf5w8lisjc58z2714bbvaf0dg5m2k5vga4zh84";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/win-64/llvm-tools-12.0.0-h05d9aec_3.tar.bz2";
sha256 = "0ly686hb730mvhw47xyz92dqchq46wkdaw1v2a6i0fzzdgf3ww9p";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/win-64/libxml2-2.9.12-h0ad7f3c_0.tar.bz2";
sha256 = "0gm1ajhiwdk23cc62nkayqqwqcccxwzj01dmypycx5d6cbh46l9y";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/win-64/wincertstore-0.2-py39haa95532_2.tar.bz2";
sha256 = "0fvcgpmn13i9rxql4dvw8ybzj2vvck0fqsqk1qrgdl3zkh841f1z";
})
(pkgs.fetchurl {
url = "https://conda.anaconda.org/conda-forge/win-64/libgit2-1.3.0-h8648793_1.tar.bz2";
sha256 = "0gw71dk8a2b80822p44rarq5nqiww8g9braxvbxnwanam5wl5611";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/win-64/cffi-1.15.0-py39h2bbff1b_0.tar.bz2";
sha256 = "1akbnswb1zigf1cx8im6s96rjmfryl4i20sg3xgaqy5papz926hf";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/win-64/mkl_fft-1.3.1-py39h277e83a_0.tar.bz2";
sha256 = "0i2v4vzwklvwp1n003yyr2hgpb1gjsh05ibsc92w0c5angf0s4wp";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/win-64/numpy-1.21.2-py39hfca59bb_0.tar.bz2";
sha256 = "1f10ka0vyx319z3f51zfxfcbwipa3vscw175c6i2qsb9ynlihqi7";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/win-64/scipy-1.7.1-py39hbe87c03_2.tar.bz2";
sha256 = "131xc7qr5lrfnxdy8p09y6gghx0h7wbaif2kq4r1jmmmm8pksjm6";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/win-64/libllvm12-12.0.0-h425c57c_3.tar.bz2";
sha256 = "1k5396hvb4hbylcpnx4m7v7kggw9kn8j1w0v91lwzc3gv64q83l9";
})
(pkgs.fetchurl {
url = "https://repo.anaconda.com/pkgs/main/win-64/sqlite-3.36.0-h2bbff1b_0.tar.bz2";
sha256 = "15w0lhcl97wafqvc6ccc96311wc5rrmh16i4ki1pw6kzkfmr1k6r";
})
]

View File

@ -0,0 +1,7 @@
{ pkgs ? import <nixpkgs> {}
}:
{
makeWindowsImage = attrs: import ./win.nix ({ inherit pkgs; } // attrs);
pkgs = import ./pkgs.nix { inherit pkgs; };
}

View File

@ -0,0 +1,97 @@
{ pkgs ? import <nixpkgs> {}
, diskImageSize ? "22G"
, qemuMem ? "4G"
,
}:
with pkgs;
let
windowsIso = fetchurl {
url = "https://software-download.microsoft.com/download/sg/17763.107.101029-1455.rs5_release_svc_refresh_CLIENT_LTSC_EVAL_x64FRE_en-us.iso";
sha256 = "668fe1af70c2f7416328aee3a0bb066b12dc6bbd2576f40f812b95741e18bc3a";
};
anaconda = fetchurl {
url = "https://repo.anaconda.com/archive/Anaconda3-2019.03-Windows-x86_64.exe";
sha256 = "1f9icm5rwab6l1f23a70dw0qixzrl62wbglimip82h4zhxlh3jfj";
};
escape = builtins.replaceStrings [ "\\" ] [ "\\\\" ];
qemu = import ./qemu.nix {
inherit pkgs qemuMem;
diskImage = "c.img";
};
# Double-escape because we produce a script from a shell heredoc
ssh = cmd: qemu.ssh (escape cmd);
scp = qemu.scp;
sshCondaEnv = cmd: ssh "anaconda\\scripts\\activate && ${cmd}";
condaEnv = "artiq-env";
condaDepSpecs =
builtins.concatStringsSep " "
(map (s: "\"${s}\"")
(import ../conda/artiq-deps.nix));
instructions =
builtins.toFile "install.txt"
(builtins.readFile ./install.txt);
in
stdenv.mkDerivation {
name = "windows-installer";
src = windowsIso;
setSourceRoot = "sourceRoot=`pwd`";
unpackCmd = ''
ln -s $curSrc windows.iso
'';
propagatedBuildInputs = qemu.inputs;
dontBuild = true;
installPhase = ''
mkdir -p $out/bin $out/data
ln -s $(readlink windows.iso) $out/data/windows.iso
cat > $out/bin/windows-installer.sh << EOF
#!/usr/bin/env bash
set -e -m
${qemu.qemu-img} create -f qcow2 c.img ${diskImageSize}
${qemu.runQemu false [] [
"-boot"
"order=d"
"-drive"
"file=c.img,index=0,media=disk,cache=unsafe"
"-drive"
"file=$out/data/windows.iso,index=1,media=cdrom,cache=unsafe"
]} &
cat ${instructions}
wait
EOF
cat > $out/bin/anaconda-installer.sh << EOF
#!/usr/bin/env bash
set -e -m
${qemu.runQemu false [] [
"-boot"
"order=c"
"-drive"
"file=c.img,index=0,media=disk"
]} &
sleep 10
${ssh "ver"}
${scp anaconda "Anaconda.exe"}
${ssh "start /wait \"\" Anaconda.exe /S /D=%cd%\\anaconda"}
${sshCondaEnv "conda config --add channels conda-forge"}
${sshCondaEnv "conda config --add channels m-labs"}
( ${sshCondaEnv "conda update -y conda"} ) || true
${sshCondaEnv "conda update -y --all"}
${sshCondaEnv "conda create -y -n ${condaEnv}"}
${sshCondaEnv "conda install -y -n ${condaEnv} ${condaDepSpecs}"}
${ssh "shutdown /p /f"}
echo "Waiting for qemu exit"
wait
EOF
chmod a+x $out/bin/*.sh
'';
}

View File

@ -1,79 +0,0 @@
#!/usr/bin/env bash
# Run manually to build the list of conda dependencies to install in the
# test environments.
# NOTE: This procedure encounters failing HTTPS handshakes. To circumvent just
# for testing, insert `call conda config --set ssl_verify no` into
# getcondapackages.bat. To securely update the sources lists update the RTC
# hack to the proper date instead.
set -e
nix-build -E "
let
pkgs = import <nixpkgs> {};
wfvm = import ../wfvm.nix { inherit pkgs; };
in
wfvm.utils.wfvm-run {
name = \"get-conda-packages\";
image = wfvm.makeWindowsImage { installCommands = [ wfvm.layers.anaconda3 ]; };
# TODO: fix wfvm login expiry and also remove 'date' workarounds below
#fakeRtc = false;
isolateNetwork = false;
script = ''
cat > getcondapackages.bat << EOF
date 12-14-21
call conda config --prepend channels https://conda.m-labs.hk/artiq-beta
call conda config --append channels conda-forge
call conda config --prepend channels numba
call conda create -n artiq -y
call conda install --dry-run --json -n artiq artiq > packages.json
EOF
\${wfvm.utils.win-put}/bin/win-put getcondapackages.bat
\${wfvm.utils.win-exec}/bin/win-exec '.\Anaconda3\Scripts\activate && getcondapackages'
\${wfvm.utils.win-get}/bin/win-get packages.json
'';
}
"
./result/bin/wfvm-run-get-conda-packages
python -c "
import json
with open('packages.json') as json_file:
packages = json.load(json_file)
with open('packages_noarch.txt', 'w') as list_noarch:
with open('packages_win-64.txt', 'w') as list_win64:
for fetch in packages['actions']['FETCH']:
if 'm-labs' not in fetch['channel']:
if fetch['subdir'] == 'noarch':
list = list_noarch
elif fetch['subdir'] == 'win-64':
list = list_win64
else:
raise ValueError
url = fetch['url']
if url.endswith('.conda'):
url = url[:-6] + '.tar.bz2'
print(url, file=list)
"
for type in "noarch" "win-64"; do
echo Downloading $type packages
out=conda_$type\_packages.nix
echo "{ pkgs } : [" > $out
while read package; do
hash=$(nix-prefetch-url $package)
echo "
(pkgs.fetchurl {
url = \"$package\";
sha256 = \"$hash\";
})" >> $out
done < packages_$type.txt
echo "]" >> $out
done
rm result getcondapackages.bat packages.json packages_noarch.txt packages_win-64.txt

View File

@ -1,10 +1,32 @@
{ pkgs ? import <nixpkgs> {} }:
# This runs `run-test.nix` with `nix-build`
{ pkgs ? import <nixpkgs> {}
, artiqpkgs ? import ../. { inherit pkgs; }
, diskImage ? (import ./build.nix { inherit pkgs; })
, qemuMem ? "2G"
, testTimeout ? 180
}:
with pkgs;
let
artiqpkgs = import ../. { inherit pkgs; };
run-test = import ./run-test.nix {
inherit pkgs artiqpkgs;
testCommand = "set ARTIQ_ROOT=%cd%\\Anaconda3\\envs\\artiq-env\\Lib\\site-packages\\artiq\\examples\\kc705_nist_clock&& python -m unittest discover -v sipyco.test && python -m unittest discover -v artiq.test";
};
windowsRunner = overrides:
import ./run-test.nix (
{
inherit pkgs diskImage qemuMem testTimeout;
sipycoPkg = artiqpkgs.conda-sipyco;
artiqPkg = artiqpkgs.conda-artiq;
} // overrides
);
in
run-test
stdenv.mkDerivation {
name = "windows-test";
phases = [ "installPhase" "checkPhase" ];
installPhase = "touch $out";
doCheck = true;
checkPhase = ''
${windowsRunner { testCommand = "set ARTIQ_ROOT=%cd%\\anaconda\\envs\\artiq-env\\Lib\\site-packages\\artiq\\examples\\kc705_nist_clock&&python -m unittest discover -v artiq.test"; }}/bin/run.sh
'';
}

View File

@ -0,0 +1 @@
This file is not publicaly acessible anywhere so had to be extracted from a connected instance

Binary file not shown.

110
artiq-fast/windows/pkgs.nix Normal file
View File

@ -0,0 +1,110 @@
{ pkgs ? import <nixpkgs> {}
, lib ? pkgs.lib
}:
/*
This file creates a simple custom simple bundle format containing
a powershell script plus any required executables and assets.
These are assets that are only handled in the pure build steps.
Impure packages are installed in _another_ step that runs impurely outside of
the Nix sandbox.
*/
let
makeBundle =
{ name
, bundle
}: pkgs.runCommandNoCC "${name}-archive.tar" {} ''
cp -r -L ${bundle} build
tar -cpf $out -C build .
'';
in
rec {
/*
Make a custom install bundle
*/
makePkg =
{ name
, src
, installScript
}: let
installScript_ = pkgs.writeText "${name}-install-script" installScript;
bundle = pkgs.runCommandNoCC "${name}-bundle" {} ''
mkdir build
ln -s ${src} build/"$(stripHash "${src}")"
ln -s ${installScript_} build/install.ps1
mv build $out
'';
in
makeBundle {
inherit name bundle;
};
/*
Make an install bundle from a .msi
*/
makeMSIPkg =
{ name
, msi
, cert ? null
, ADDLOCAL ? []
, preInstall ? ""
, postInstall ? ""
}: let
installScript = pkgs.writeText "${name}-install-script" ''
${preInstall}
${if cert != null then "certutil.exe -f -addstore TrustedPublisher cert.cer" else ""}
msiexec.exe /i .\${name}.msi ${if ADDLOCAL != [] then "ADDLOCAL=" else ""}${lib.concatStringsSep "," ADDLOCAL}
${postInstall}
'';
bundle = pkgs.runCommandNoCC "${name}-bundle" {} ''
mkdir build
ln -s ${msi} build/${name}.msi
${if cert != null then "ln -s ${cert} build/cert.cer" else ""}
ln -s ${installScript} build/install.ps1
mv build $out
'';
in
makeBundle {
inherit name bundle;
};
/*
Nix cross-built packages
*/
makeCrossPkg =
{ name
, pkg
, destination ? ''C:\Program Files\${name}\''
, preInstall ? ""
, postInstall ? ""
}: let
installScript = pkgs.writeText "${name}-install-script" ''
${preInstall}
Copy-Item pkg -Destination "${destination}"
${postInstall}
'';
bundle = pkgs.runCommandNoCC "${name}-bundle" {} ''
mkdir -p build/pkg
ln -s ${pkg} build/pkg
ln -s ${installScript} build/install.ps1
mv build $out
'';
in
makeBundle {
inherit name bundle;
};
}

View File

@ -0,0 +1,63 @@
{ pkgs
, qemuMem
, sshUser ? "user"
, sshPassword ? "user"
,
}:
with pkgs;
let
qemu-img = "${qemu_kvm}/bin/qemu-img";
runQemu = isolateNetwork: forwardedPorts: extraArgs:
let
restrict =
if isolateNetwork
then "on"
else "off";
# use socat instead of `tcp:…` to allow multiple connections
guestfwds =
builtins.concatStringsSep ""
(
map (
{ listenAddr, targetAddr, port }:
",guestfwd=tcp:${listenAddr}:${toString port}-cmd:${socat}/bin/socat\\ -\\ tcp:${targetAddr}:${toString port}"
) forwardedPorts
);
args = [
#"-enable-kvm"
"-m"
qemuMem
"-bios"
"${OVMF.fd}/FV/OVMF.fd"
"-netdev"
"user,id=n1,net=192.168.1.0/24,restrict=${restrict},hostfwd=tcp::2022-:22${guestfwds}"
"-device"
"e1000,netdev=n1"
];
argStr = builtins.concatStringsSep " " (args ++ extraArgs);
in
"${qemu_kvm}/bin/qemu-system-x86_64 ${argStr}";
# Pass empty config file to prevent ssh from failing to create ~/.ssh
sshOpts = "-F /dev/null -o StrictHostKeyChecking=accept-new -o UserKnownHostsFile=\$TMP/known_hosts";
sshWithQuotes = quotes: cmd: ''
echo ssh windows ${quotes}${cmd}${quotes}
${sshpass}/bin/sshpass -p${sshPassword} -- \
${openssh}/bin/ssh -np 2022 ${sshOpts} \
${sshUser}@localhost \
${quotes}${cmd}${quotes}
'';
ssh = sshWithQuotes "'";
scp = src: target: ''
echo "Copy ${src} to ${target}"
${sshpass}/bin/sshpass -p${sshPassword} -- \
${openssh}/bin/scp -P 2022 ${sshOpts} \
"${src}" "${sshUser}@localhost:${target}"
'';
in
{
inherit qemu-img runQemu ssh sshWithQuotes scp;
inputs = [ qemu_kvm openssh sshpass ];
}

View File

@ -1,65 +1,91 @@
{ pkgs, artiqpkgs, testCommand, testTimeout ? 600 }:
{ pkgs
, sipycoPkg
, artiqPkg
, diskImage ? (import ./build.nix { inherit pkgs; })
, qemuMem ? "2G"
, testTimeout ? 600
, testCommand ? "python -m unittest discover -v sipyco.test && python -m unittest discover -v artiq.test"
,
}:
with pkgs;
let
escape = builtins.replaceStrings [ "\\" ] [ "\\\\" ];
qemu = import ./qemu.nix {
inherit pkgs qemuMem;
};
# Double-escape because we produce a script from a shell heredoc
ssh = cmd: qemu.ssh (escape cmd);
sshUnquoted = qemu.sshWithQuotes "\"";
scp = qemu.scp;
condaEnv = "artiq-env";
tcpPorts = [ 1380 1381 1382 1383 ];
forwardedPorts =
map (port: {
listenAddr = "192.168.1.50";
targetAddr = "192.168.1.50";
inherit port;
}) tcpPorts;
artiq6 = pkgs.lib.strings.versionAtLeast artiqpkgs.artiq.version "6.0";
artiq7 = pkgs.lib.strings.versionAtLeast artiqpkgs.artiq.version "7.0";
wfvm = import (if artiq6 then ../wfvm.nix else ../wfvm-legacy.nix) { inherit pkgs; };
conda-deps = {
name = "conda-deps";
script = let
qt-asyncio-package = if artiq6 then artiqpkgs.conda-qasync else artiqpkgs.conda-quamash;
conda-deps-noarch = import (if artiq7 then ./conda_noarch_packages-7.nix else if artiq6 then ./conda_noarch_packages-6.nix else ./conda_noarch_packages-5.nix) { inherit pkgs; };
conda-deps-win-64 = import (if artiq7 then ./conda_win-64_packages-7.nix else if artiq6 then ./conda_win-64_packages-6.nix else ./conda_win-64_packages-5.nix) { inherit pkgs; };
conda-packages-put = pkgs.lib.strings.concatStringsSep "\n"
( (map (package: ''win-put ${package} 'fake-channel/noarch' '') conda-deps-noarch)
++ (map (package: ''win-put ${package} 'fake-channel/win-64' '') conda-deps-win-64) );
conda-packages-legacy-put = if artiq7 then "" else
''
win-put ${artiqpkgs.conda-windows-binutils-or1k}/win-64/*.tar.bz2 'fake-channel/win-64'
win-put ${artiqpkgs.conda-windows-llvm-or1k}/win-64/*.tar.bz2 'fake-channel/win-64'
win-put ${artiqpkgs.conda-windows-llvmlite-artiq}/win-64/*.tar.bz2 'fake-channel/win-64'
'';
in
''
win-exec 'mkdir fake-channel && mkdir fake-channel\noarch && mkdir fake-channel\win-64'
${conda-packages-put}
${conda-packages-legacy-put}
win-put ${artiqpkgs.conda-pythonparser}/noarch/*.tar.bz2 'fake-channel/noarch'
win-put ${artiqpkgs.conda-sipyco}/noarch/*.tar.bz2 'fake-channel/noarch'
win-put ${qt-asyncio-package}/noarch/*.tar.bz2 'fake-channel/noarch'
'';
};
map (
port: {
listenAddr = "192.168.1.50";
targetAddr = "192.168.1.50";
inherit port;
}
) tcpPorts;
in
wfvm.utils.wfvm-run {
name = "windows-tests";
image = wfvm.makeWindowsImage { installCommands = [ wfvm.layers.anaconda3 conda-deps ]; };
inherit forwardedPorts;
script =
''
${wfvm.utils.win-put}/bin/win-put ${artiqpkgs.conda-artiq}/noarch/*.tar.bz2 'fake-channel/noarch'
${wfvm.utils.win-exec}/bin/win-exec ".\Anaconda3\scripts\activate && conda index fake-channel"
${wfvm.utils.win-exec}/bin/win-exec ".\Anaconda3\scripts\activate && conda create -n ${condaEnv} --offline"
${wfvm.utils.win-exec}/bin/win-exec ".\Anaconda3\scripts\activate ${condaEnv} && conda install -y -c file:///C:/users/wfvm/fake-channel --offline artiq"\
stdenv.mkDerivation {
name = "windows-test-runner";
# Schedule a timed shutdown against hanging test runs
${wfvm.utils.win-exec}/bin/win-exec "shutdown -s -t ${toString testTimeout}"
# Dummy sources
src = pkgs.runCommandNoCC "dummy" {} "touch $out";
dontUnpack = true;
${wfvm.utils.win-exec}/bin/win-exec ".\Anaconda3\scripts\activate ${condaEnv} && ${testCommand}"
propagatedBuildInputs = qemu.inputs;
dontBuild = true;
installPhase = ''
mkdir -p $out/bin
cat > $out/bin/run.sh << EOF
#!/usr/bin/env bash
set -e -m
# Abort timeouted shutdown
${wfvm.utils.win-exec}/bin/win-exec "shutdown -a"
'';
}
cp ${diskImage} c.img
${qemu.runQemu true forwardedPorts [
"-boot"
"order=c"
"-snapshot"
"-drive"
"file=c.img,index=0,media=disk,cache=unsafe"
"-display"
"none"
]} &
echo "Wait for Windows to boot"
sleep 30
${ssh "ver"}
i=0
for pkg in ${sipycoPkg}/noarch/sipyco*.tar.bz2 ${artiqPkg}/noarch/artiq*.tar.bz2 ; do
${scp "\\$pkg" "to_install\\$i.tar.bz2"}
${sshUnquoted "anaconda\\scripts\\activate ${condaEnv} && conda install to_install\\$i.tar.bz2"}
((i=i+1))
done
# Schedule a timed shutdown against hanging test runs
${ssh "shutdown -s -t ${toString testTimeout}"}
FAIL=n
( ${ssh "anaconda\\scripts\\activate ${condaEnv} && ${testCommand}"} ) || FAIL=y
# Abort timeouted shutdown
${ssh "shutdown -a"}
# Power off immediately
${ssh "shutdown -p -f"}
wait
if [ "\$FAIL" = "y" ]; then
exit 1
else
exit 0
fi
EOF
chmod a+x $out/bin/run.sh
'';
}

264
artiq-fast/windows/win.nix Normal file
View File

@ -0,0 +1,264 @@
{ pkgs ? import <nixpkgs> {}
, lib ? pkgs.lib
, diskImageSize ? "22G"
, qemuMem ? "4G"
, windowsImage ? null
, autoUnattendParams ? {}
, packages ? []
, impureMode ? false
, baseRtc ? "2020-04-20T14:21:42"
, installCommands ? []
, users ? {}
, ...
}@attrs:
let
# qemu_test is a smaller closure only building for a single system arch
qemu = pkgs.qemu_test;
libguestfs = pkgs.libguestfs-with-appliance;
# p7zip on >20.03 has known vulns but we have no better option
p7zip = pkgs.p7zip.overrideAttrs(old: {
meta = old.meta // {
knownVulnerabilities = [];
};
});
runQemuCommand = name: command: (
pkgs.runCommandNoCC name { buildInputs = [ p7zip qemu libguestfs ]; }
(
''
if ! test -f; then
echo "KVM not available, bailing out" >> /dev/stderr
exit 1
fi
'' + command
)
);
windowsIso = if windowsImage != null then windowsImage else pkgs.fetchurl {
url = "https://software-download.microsoft.com/download/sg/17763.107.101029-1455.rs5_release_svc_refresh_CLIENT_LTSC_EVAL_x64FRE_en-us.iso";
sha256 = "668fe1af70c2f7416328aee3a0bb066b12dc6bbd2576f40f812b95741e18bc3a";
};
openSshServerPackage = ./openssh/server-package.cab;
autounattend = import ./autounattend.nix (
attrs // {
inherit pkgs;
}
);
bundleInstaller = pkgs.callPackage ./bundle {};
# Packages required to drive installation of other packages
bootstrapPkgs = let
winPkgs = import ./pkgs.nix { inherit pkgs; };
in
runQemuCommand "bootstrap-win-pkgs.img" ''
mkdir pkgs
mkdir pkgs/bootstrap
mkdir pkgs/user
mkdir pkgs/fod
cp ${bundleInstaller} pkgs/"$(stripHash "${bundleInstaller}")"
# Install optional windows features
cp ${openSshServerPackage} pkgs/fod/OpenSSH-Server-Package~31bf3856ad364e35~amd64~~.cab
# SSH setup script goes here because windows XML parser sucks
cp ${autounattend.setupScript} pkgs/ssh-setup.ps1
${lib.concatStringsSep "\n" (builtins.map (x: ''cp ${x} pkgs/bootstrap/"$(stripHash "${x}")"'') packages)}
virt-make-fs --partition --type=fat pkgs/ $out
'';
mkQemuFlags = extraFlags: [
#"-enable-kvm"
"-cpu"
"host"
"-smp"
"$NIX_BUILD_CORES"
"-m"
"${qemuMem}"
"-bios"
"${pkgs.OVMF.fd}/FV/OVMF.fd"
"-vga"
"virtio"
"-device"
"piix3-usb-uhci" # USB root hub
# "CD" drive with windows features-on-demand
# "-cdrom" "${fodIso}"
# Set the base clock inside the VM
"-rtc base=${baseRtc}"
# Always enable SSH port forward
# It's not really required for the initial setup but we do it here anyway
"-netdev user,id=n1,net=192.168.1.0/24,restrict=off,hostfwd=tcp::2022-:22"
"-device e1000,netdev=n1"
] ++ lib.optional (!impureMode) "-nographic" ++ extraFlags;
installScript = pkgs.writeScript "windows-install-script" (
let
qemuParams = mkQemuFlags [
# "CD" drive with bootstrap pkgs
"-drive"
"id=virtio-win,file=${bootstrapPkgs},if=none,format=raw,readonly=on"
"-device"
"usb-storage,drive=virtio-win"
# USB boot
"-drive"
"id=win-install,file=usbimage.img,if=none,format=raw,readonly=on"
"-device"
"usb-storage,drive=win-install"
# Output image
"-drive"
"file=c.img,index=0,media=disk,cache=unsafe"
];
in
''
#!${pkgs.runtimeShell}
set -euxo pipefail
export PATH=${lib.makeBinPath [ p7zip qemu libguestfs ]}:$PATH
if test -z "''${NIX_BUILD_CORES+x}"; then
export NIX_BUILD_CORES=$(nproc)
fi
# Create a bootable "USB" image
# Booting in USB mode circumvents the "press any key to boot from cdrom" prompt
#
# Also embed the autounattend answer file in this image
mkdir -p win
mkdir -p win/nix-win
7z x -y ${windowsIso} -owin
cp ${autounattend.autounattendXML} win/autounattend.xml
virt-make-fs --partition --type=fat win/ usbimage.img
rm -rf win
# Qemu requires files to be rw
qemu-img create -f qcow2 c.img ${diskImageSize}
env NIX_BUILD_CORES="''${NIX_BUILD_CORES:4}" qemu-system-x86_64 ${lib.concatStringsSep " " qemuParams}
''
);
baseImage = pkgs.runCommandNoCC "windows.img" {} ''
${installScript}
mv c.img $out
'';
# Use Paramiko instead of OpenSSH
#
# OpenSSH goes out of it's way to make password logins hard
# and Windows goes out of it's way to make key authentication hard
# so we're in a pretty tough spot
#
# Luckily the usage patterns are quite simple and easy to reimplement with paramiko
paramikoClient = pkgs.writeScriptBin "win" ''
#!${pkgs.python3.withPackages(ps: [ ps.paramiko ])}/bin/python
import paramiko
import os.path
import sys
def w_join(*args):
# Like os.path.join but for windows paths
return "\\".join(args)
if __name__ == '__main__':
client = paramiko.SSHClient()
client.set_missing_host_key_policy(paramiko.client.AutoAddPolicy)
cmd = sys.argv[1]
try:
client.connect(hostname="127.0.0.1", port=2022, username="artiq", password="${users.artiq.password}", timeout=1)
if cmd == "put":
sftp = client.open_sftp()
src = sys.argv[2]
dst = sys.argv[3]
sftp.put(src, w_join(dst, os.path.basename(src)))
elif cmd == "exec":
_, stdout, stderr = client.exec_command(sys.argv[2])
sys.stdout.write(stdout.read().strip().decode())
sys.stdout.flush()
sys.stderr.write(stderr.read().strip().decode())
sys.stderr.flush()
else:
raise ValueError(f"Unhandled command: {cmd}")
except (EOFError, paramiko.ssh_exception.SSHException):
exit(1)
'';
finalImage = builtins.foldl' (acc: v: pkgs.runCommandNoCC "${v.name}.img" {
buildInputs = [
paramikoClient
qemu
];
} (let
script = pkgs.writeScript "${v.name}-script" v.script;
qemuParams = mkQemuFlags [
# Output image
"-drive"
"file=c.img,index=0,media=disk,cache=unsafe"
];
in ''
export HOME=$(mktemp -d)
# Create an image referencing the previous image in the chain
qemu-img create -f qcow2 -b ${acc} c.img
qemu-system-x86_64 ${lib.concatStringsSep " " qemuParams} &
# If the machine is not up within 10 minutes it's likely never coming up
timeout=600
# Wait for VM to be accessible
sleep 20
echo "Waiting for SSH"
while true; do
if test "$timeout" -eq 0; then
echo "SSH connection timed out"
exit 1
fi
output=$(win exec 'echo Ran command' || echo "")
if test "$output" = "Ran command"; then
break
fi
echo "Retrying in 1 second, timing out in $timeout seconds"
((timeout=$timeout-1))
sleep 1
done
echo "Executing user script to build layer"
${script}
# Allow install to "settle"
sleep 20
win exec 'shutdown /s'
mv c.img $out
'')) baseImage installCommands;
in
# impureMode is meant for debugging the base image, not the full incremental build process
if !(impureMode) then finalImage else assert installCommands == []; installScript

View File

@ -1,128 +1,120 @@
{ pkgs ? import <nixpkgs> { overlays = [ (import ./artiq-fast/mozilla-overlay.nix) ]; }
, use-generated ? <use-generated>
}:
{ pkgs ? import <nixpkgs> {}}:
let
sinaraSystemsRev = builtins.readFile <artiq-board-generated/sinara-rev.txt>;
sinaraSystemsHash = builtins.readFile <artiq-board-generated/sinara-hash.txt>;
sinaraSystemsSrc =
if use-generated
then pkgs.fetchgit {
url = "https://git.m-labs.hk/M-Labs/sinara-systems-legacy.git";
rev = sinaraSystemsRev;
sha256 = sinaraSystemsHash;
}
else <sinaraSystemsSrc>;
artiq-fast =
if use-generated
then <artiq-board-generated/fast>
else <artiq-fast>;
artiqVersion = import (artiq-fast + "/pkgs/artiq-version.nix") {
inherit (pkgs) stdenv git fetchgit;
};
targets = import ./artiq-full/artiq-targets.nix {
inherit pkgs artiqVersion sinaraSystemsSrc;
};
kasliVariants = map ({ variant, ... }: variant) (
builtins.filter ({ target, ... }: target == "kasli") (
builtins.attrValues targets
)
);
standaloneVariants = map ({ variant, ... }: variant) (
builtins.filter ({ target, standalone ? false, ... }: target == "kasli" && standalone) (
builtins.attrValues targets
)
);
serializedTargets = pkgs.lib.generators.toPretty {} (
map (conf:
if conf ? buildCommand
then conf // {
buildCommand = builtins.replaceStrings ["$"] ["\\\\\\$"] conf.buildCommand;
}
else conf
) (builtins.attrValues targets)
);
sinaraSystemsSrc = <sinaraSystemsSrc>;
generatedNix = pkgs.runCommand "generated-nix" { buildInputs = [ pkgs.nix pkgs.git ]; }
generatedNix = builtins.trace "full generatedNix" (pkgs.runCommand "generated-nix" { buildInputs = [ pkgs.nix pkgs.git ]; }
''
mkdir $out
${if use-generated
then ''
cp -a ${<artiq-board-generated>} $out/board-generated
ln -s board-generated/fast $out/fast
''
else "cp -a ${<artiq-fast>} $out/fast"}
cp ${./artiq-full}/artiq-board-vivado.nix $out
cp ${./artiq-full}/generate-identifier.py $out
cp ${./artiq-full}/conda-artiq-board.nix $out
cp ${./artiq-full}/extras.nix $out
cp ${./artiq-full}/*.patch $out
cp -a ${<artiq-fast>} $out/fast
cp ${./artiq-full/conda-artiq-board.nix} $out/conda-artiq-board.nix
cp ${./artiq-full/extras.nix} $out/extras.nix
${if use-generated
then ''
REV=${sinaraSystemsRev}
HASH=${sinaraSystemsHash}
''
else ''
REV=`git --git-dir ${sinaraSystemsSrc}/.git rev-parse HEAD`
SINARA_SRC_CLEAN=`mktemp -d`
cp -a ${sinaraSystemsSrc}/. $SINARA_SRC_CLEAN
chmod -R 755 $SINARA_SRC_CLEAN/.git
chmod 755 $SINARA_SRC_CLEAN
rm -rf $SINARA_SRC_CLEAN/.git
HASH=`nix-hash --type sha256 --base32 $SINARA_SRC_CLEAN`
''}
REV=`git --git-dir ${sinaraSystemsSrc}/.git rev-parse HEAD`
SINARA_SRC_CLEAN=`mktemp -d`
cp -a ${sinaraSystemsSrc}/. $SINARA_SRC_CLEAN
chmod -R 755 $SINARA_SRC_CLEAN/.git
chmod 755 $SINARA_SRC_CLEAN
rm -rf $SINARA_SRC_CLEAN/.git
HASH=`nix-hash --type sha256 --base32 $SINARA_SRC_CLEAN`
cat > $out/default.nix << EOF
{ pkgs ? import <nixpkgs> { overlays = [ (import ./fast/mozilla-overlay.nix) ]; }}:
{ pkgs ? import <nixpkgs> {}}:
let
artiq-fast = import ${if use-generated then "./board-generated" else "."}/fast { inherit pkgs; };
ddbDeps = [
artiq-fast.artiq
(pkgs.python3.withPackages (ps: [ ps.jsonschema ]))
];
artiq-fast = import ./fast { inherit pkgs; };
kasliVariants = [${builtins.concatStringsSep " " (
builtins.map (variant: "\"${variant}\"") kasliVariants
)}];
standaloneVariants = [${builtins.concatStringsSep " " (
builtins.map (variant: "\"${variant}\"") standaloneVariants
)}];
target = "kasli";
variants = [
"afmaster"
"afsatellite"
"berkeley2"
"berkeley3"
"csu"
"duke"
"duke2"
"duke3"
"femto1"
"femto2"
"femto3"
"freiburg1"
"griffith"
"hub"
"hw"
"indiana"
"innsbruck2"
"ist"
"liaf"
"luh"
"luh2"
"luh3"
"mikes"
"mit"
"mitll3"
"mitll4master"
"mitll4satellite"
"mpik"
"mpq"
"nict"
"nist"
"no"
"npl1"
"npl2"
"oklahoma"
"olomouc"
"opticlock"
"oregon"
"osaka"
"ptb"
"ptb2"
"ptb3"
"ptb4"
"ptb5"
"ptb6"
"ptbal"
"ptbin"
"purdue"
"qe"
"rice"
"saymamaster"
"siegen"
"su"
"sydney"
"uaarhus"
"ubirmingham"
"ugranada"
"unlv"
"unsw2"
"ustc2"
"vlbaimaster"
"vlbaisatellite"
"wipm"
"wipm4"
"wipm5master"
"wipm5satellite"
] ++ (pkgs.lib.lists.optionals (pkgs.lib.strings.versionAtLeast artiq-fast.artiq.version "6.0") [
"bonn1master"
"bonn1satellite"
"hw2master"
"hw2satellite"
"uamsterdam"
]);
vivado = import ${if use-generated then "./board-generated" else "."}/fast/vivado.nix {
inherit pkgs;
};
artiq-board =
${if use-generated
then ''
import ./artiq-board-vivado.nix {
inherit pkgs vivado;
version = artiq-fast.artiq.version;
board-generated = import ./board-generated {
inherit pkgs;
};
}
''
else ''
import ./fast/artiq-board.nix {
inherit pkgs vivado;
rustPlatform = import ./fast/rust-platform.nix { inherit pkgs; };
}
''};
vivado = builtins.trace "vivado (full)" (import ./fast/vivado.nix { inherit pkgs; });
artiq-board = builtins.trace "artiq-board" (import ./fast/artiq-board.nix { inherit pkgs vivado; });
conda-artiq-board = import ./conda-artiq-board.nix { inherit pkgs; };
src = pkgs.fetchgit {
url = "https://git.m-labs.hk/M-Labs/sinara-systems-legacy.git";
url = "https://git.m-labs.hk/M-Labs/sinara-systems.git";
rev = "$REV";
sha256 = "$HASH";
};
artiq-targets = pkgs.lib.lists.foldr (conf: start:
generic-kasli = pkgs.lib.lists.foldr (variant: start:
let
inherit (conf) target variant;
json = src + "/\''${variant}.json";
boardBinaries = artiq-board (conf // {
src = json;
});
json = builtins.toPath (src + "/\''${variant}.json");
boardBinaries = artiq-board {
inherit target variant;
buildCommand = "python -m artiq.gateware.targets.kasli_generic \''${json}";
};
in
start // {
"artiq-board-\''${target}-\''${variant}" = boardBinaries;
@ -130,13 +122,10 @@ let
boardBinaries = boardBinaries;
inherit target variant;
};
} // (pkgs.lib.optionalAttrs (
target == "kasli" &&
builtins.elem variant standaloneVariants
) {
} // (pkgs.lib.optionalAttrs ((builtins.fromJSON (builtins.readFile json)).base == "standalone") {
"device-db-\''${target}-\''${variant}" = pkgs.stdenv.mkDerivation {
name = "device-db-\''${target}-\''${variant}";
buildInputs = ddbDeps;
buildInputs = [ artiq-fast.artiq ];
phases = [ "buildPhase" ];
buildPhase = "
mkdir \$out
@ -145,11 +134,99 @@ let
echo file device_db_template \$out/device_db.py >> \$out/nix-support/hydra-build-products
";
};
})
) {} ${serializedTargets};
})) {} variants;
drtio-systems = {
af = {
master = "afmaster";
satellites = {
"1" = "afsatellite";
};
};
mitll4 = {
master = "mitll4master";
satellites = {
"1" = "mitll4satellite";
};
};
vlbai = {
master = "vlbaimaster";
satellites = {
"1" = "vlbaisatellite";
};
};
wipm5 = {
master = "wipm5master";
satellites = {
"1" = "wipm5satellite";
};
};
} // (pkgs.lib.optionalAttrs (pkgs.lib.strings.versionAtLeast artiq-fast.artiq.version "6.0") {
bonn1 = {
master = "bonn1master";
satellites = {
"1" = "bonn1satellite";
};
};
hw2 = {
master = "hw2master";
satellites = {
"1" = "hw2satellite";
};
};
});
drtio-ddbs = pkgs.lib.attrsets.mapAttrs'
(system: crates: pkgs.lib.attrsets.nameValuePair ("device-db-" + system)
(pkgs.stdenv.mkDerivation {
name = "device-db-\''${system}";
buildInputs = [ artiq-fast.artiq ];
phases = [ "buildPhase" ];
buildPhase = "
mkdir \$out
artiq_ddb_template \
\''${pkgs.lib.strings.concatStringsSep " " (pkgs.lib.attrsets.mapAttrsToList (dest: desc: "-s " + dest + " " + src + "/" + desc + ".json") crates.satellites) } \
\''${src}/\''${crates.master}.json -o \$out/device_db.py
mkdir \$out/nix-support
echo file device_db_template \$out/device_db.py >> \$out/nix-support/hydra-build-products
";
})) drtio-systems;
extras = import ./extras.nix { inherit pkgs; inherit (artiq-fast) sipyco asyncserial artiq; };
in
artiq-fast // artiq-targets // extras // rec {
artiq-fast // generic-kasli // drtio-ddbs // extras // rec {
artiq-board-sayma-rtm = artiq-board {
target = "sayma";
variant = "rtm";
buildCommand = "python -m artiq.gateware.targets.sayma_rtm";
};
artiq-board-sayma-satellite = artiq-board {
target = "sayma";
variant = "satellite";
buildCommand = "python -m artiq.gateware.targets.sayma_amc";
};
artiq-board-metlino-master = artiq-board {
target = "metlino";
variant = "master";
buildCommand = "python -m artiq.gateware.targets.metlino";
};
artiq-board-kc705-nist_qc2 = artiq-board {
target = "kc705";
variant = "nist_qc2";
};
conda-artiq-board-sayma-rtm = conda-artiq-board {
target = "sayma";
variant = "rtm";
boardBinaries = artiq-board-sayma-rtm;
};
conda-artiq-board-sayma-satellite = conda-artiq-board {
target = "sayma";
variant = "satellite";
boardBinaries = artiq-board-sayma-satellite;
};
conda-artiq-board-metlino-master = conda-artiq-board {
target = "metlino";
variant = "master";
boardBinaries = artiq-board-metlino-master;
};
conda-artiq-board-kasli-tester = conda-artiq-board {
target = "kasli";
variant = "tester";
@ -160,28 +237,34 @@ let
variant = "nist_clock";
boardBinaries = artiq-fast.artiq-board-kc705-nist_clock;
};
conda-artiq-board-kc705-nist_qc2 = conda-artiq-board {
target = "kc705";
variant = "nist_qc2";
boardBinaries = artiq-board-kc705-nist_qc2;
};
}
EOF
'';
'');
pythonDeps = import ./artiq-full/python-deps.nix { inherit pkgs; };
sipycoManualPackages = import ./artiq-full/sipyco-manual.nix {
inherit (pkgs) stdenv lib python3Packages texlive texinfo;
inherit (import artiq-fast { inherit pkgs; }) sipyco;
inherit (import <artiq-fast> { inherit pkgs; }) sipyco;
};
artiqManualPackages = import ./artiq-full/artiq-manual.nix {
inherit (pkgs) stdenv lib fetchgit git python3Packages texlive texinfo;
inherit (pythonDeps) sphinxcontrib-wavedrom;
inherit artiq-fast;
};
artiq-full = import generatedNix { inherit pkgs; };
exampleUserEnv = import ./artiq-full/example-user-env.nix { inherit pkgs artiq-full; };
jobs = artiq-full // sipycoManualPackages // artiqManualPackages // exampleUserEnv;
jobs = (import generatedNix { inherit pkgs; }) // sipycoManualPackages // artiqManualPackages // {
# This is in the example in the ARTIQ manual - precompile it to speed up
# installation for users.
matplotlib-qt = pkgs.lib.hydraJob (pkgs.python3Packages.matplotlib.override { enableQt = true; });
};
in
builtins.mapAttrs (key: value: pkgs.lib.hydraJob value) jobs // {
artiq-full = pkgs.releaseTools.channel {
builtins.mapAttrs (key: value: builtins.trace "full-${key}" pkgs.lib.hydraJob value) jobs // {
artiq-full = builtins.trace "channel" (pkgs.releaseTools.channel {
name = "artiq-full";
src = generatedNix;
constituents = [];
};
conda-channel = import ./artiq-full/conda-channel.nix { inherit pkgs artiq-fast; } { inherit jobs; };
constituents = builtins.attrValues jobs;
});
conda-channel = builtins.trace "conda-channel" (import ./artiq-full/conda-channel.nix { inherit pkgs; }) { inherit jobs; };
}

View File

@ -1,125 +0,0 @@
# Install Vivado in /opt and add to /etc/nixos/configuration.nix:
# nix.sandboxPaths = ["/opt"];
{ pkgs
, vivado ? import ./fast/vivado.nix { inherit pkgs; }
, board-generated
, version
}:
let
# Funnelling the source code through a Nix string allows dropping
# all dependencies via `unsafeDiscardStringContext`.
discardContextFromPath = { name, src }:
let
packed = pkgs.stdenv.mkDerivation {
name = "${name}.nar.base64";
buildInputs = [ pkgs.nix ];
phases = [ "installPhase" ];
installPhase = "nix-store --dump ${src} | base64 -w0 > $out";
};
unpacked = archive:
pkgs.stdenvNoCC.mkDerivation {
name = builtins.unsafeDiscardStringContext name;
phases = [ "installPhase" ];
buildInputs = [ pkgs.nix ];
installPhase = "base64 -d < ${archive} | nix-store --restore $out";
};
in
unpacked (
builtins.toFile "${builtins.unsafeDiscardStringContext name}.nar.base64" (
builtins.unsafeDiscardStringContext (
builtins.readFile packed
))) ;
in
{ target
, variant
, extraInstallCommands ? ""
, ... }:
let
name = "artiq-board-${target}-${variant}-${version}";
installPath = builtins.unsafeDiscardStringContext "${pkgs.python3Packages.python.sitePackages}/artiq/board-support/${target}-${variant}";
generated = board-generated."artiq-board-${target}-${variant}";
identifierStr = "${version};${variant}";
identifiers = import (
pkgs.runCommandLocal "${name}-identifiers.nix" {
buildInputs = [ pkgs.python3 ];
} ''python ${./generate-identifier.py} "${identifierStr}" > $out''
);
# Depends on just Vivado and the generated Bitstream source
vivadoCheckpoint = pkgs.stdenvNoCC.mkDerivation {
name = builtins.unsafeDiscardStringContext "${name}-vivado-checkpoint";
src = discardContextFromPath {
name = "${name}-gateware";
src = "${generated}/gateware";
};
buildInputs = [ vivado pkgs.nix ];
buildPhase = ''
vivado -mode batch -source top_route.tcl
'';
installPhase = ''
mkdir -p $out
chmod a+r top_route.dcp
cp top_route.dcp $out
cp top_bitstream.tcl $out
'';
};
vivadoOutput = pkgs.stdenvNoCC.mkDerivation {
name = builtins.unsafeDiscardStringContext "${name}-vivado-output";
src = vivadoCheckpoint;
buildInputs = [ vivado ];
buildPhase =
''
cat >top.tcl <<EOF
open_checkpoint top_route.dcp
'' +
(pkgs.lib.concatMapStrings ({ cell, init }:
''
set_property INIT ${init} [get_cell ${cell}]
''
) identifiers) +
''
source "top_bitstream.tcl"
EOF
vivado -mode batch -source top.tcl
'';
installPhase = ''
TARGET_DIR=$out/${installPath}
mkdir -p $TARGET_DIR
chmod a+r top.bit
cp top.bit $TARGET_DIR/
'';
# temporarily disabled because there is currently always at least one Kasli bitstream
# that fails timing and blocks the conda channel.
doCheck = false;
checkPhase = ''
# Search for PCREs in the Vivado output to check for errors
check_log() {
set +e
grep -Pe "$1" vivado.log
FOUND=$?
set -e
if [ $FOUND != 1 ]; then
exit 1
fi
}
check_log "\d+ constraint not met\."
check_log "Timing constraints are not met\."
'';
};
in
pkgs.python3Packages.toPythonModule (
pkgs.buildEnv rec {
inherit name;
paths = [ generated vivadoOutput ];
pathsToLink = [ "/${installPath}" ];
})

View File

@ -1,7 +1,7 @@
{ stdenv, lib, fetchgit, git, python3Packages, texlive, texinfo, sphinxcontrib-wavedrom, artiq-fast }:
{ stdenv, lib, fetchgit, git, python3Packages, texlive, texinfo, sphinxcontrib-wavedrom }:
let
artiqVersion = import (artiq-fast + "/pkgs/artiq-version.nix") { inherit stdenv fetchgit git; };
artiqVersion = import <artiq-fast/pkgs/artiq-version.nix> { inherit stdenv fetchgit git; };
isLatexPdfTarget = target: builtins.match "latexpdf.*" target != null;
@ -16,7 +16,7 @@ let
name = "artiq-manual-${target}-${version}";
version = artiqVersion;
src = import (artiq-fast + "/pkgs/artiq-src.nix") { inherit fetchgit; };
src = import <artiq-fast/pkgs/artiq-src.nix> { inherit fetchgit; };
buildInputs = [
python3Packages.sphinx python3Packages.sphinx_rtd_theme
python3Packages.sphinx-argparse sphinxcontrib-wavedrom
@ -26,7 +26,7 @@ let
preBuild = ''
export VERSIONEER_OVERRIDE=${artiqVersion}
export SOURCE_DATE_EPOCH=${import (artiq-fast + "/pkgs/artiq-timestamp.nix") { inherit stdenv fetchgit git; }}
export SOURCE_DATE_EPOCH=${import <artiq-fast/pkgs/artiq-timestamp.nix> { inherit stdenv fetchgit git; }}
cd doc/manual
'';
makeFlags = [ target ];

View File

@ -1,66 +0,0 @@
{ pkgs
, artiqVersion
, sinaraSystemsSrc
}:
let
jsons =
map (jsonFile: builtins.fromJSON (
builtins.readFile (sinaraSystemsSrc + "/${jsonFile}")
)) (
builtins.attrNames (
pkgs.lib.filterAttrs (name: type:
type != "directory" &&
builtins.match ".+\\.json" name != null
) (builtins.readDir sinaraSystemsSrc)
)
);
kasli = builtins.listToAttrs (
builtins.map ({ variant, base, ... }: {
name = "artiq-board-kasli-${variant}";
value = {
target = "kasli";
inherit variant;
src = sinaraSystemsSrc + "/${variant}.json";
buildCommand = "python -m artiq.gateware.targets.kasli_generic $src";
standalone = base == "standalone";
};
}) (
builtins.filter (json:
pkgs.lib.strings.versionAtLeast artiqVersion (
if json ? min_artiq_version
then json.min_artiq_version
else "0"
)
) jsons
)
);
in
kasli // {
artiq-board-metlino-master = {
target = "metlino";
variant = "master";
buildCommand = "python -m artiq.gateware.targets.metlino";
};
artiq-board-kc705-nist_qc2 = {
target = "kc705";
variant = "nist_qc2";
};
} // (pkgs.lib.optionalAttrs (pkgs.lib.strings.versionAtLeast artiqVersion "7.0") {
artiq-board-kc705-nist_clock_master = {
target = "kc705";
variant = "nist_clock_master";
};
artiq-board-kc705-nist_qc2_master = {
target = "kc705";
variant = "nist_qc2_master";
};
artiq-board-kc705-nist_clock_satellite = {
target = "kc705";
variant = "nist_clock_satellite";
};
artiq-board-kc705-nist_qc2_satellite = {
target = "kc705";
variant = "nist_qc2_satellite";
};
})

View File

@ -1,8 +1,8 @@
{ pkgs, artiq-fast }:
{ pkgs }:
{ jobs }:
let
condaBuilderEnv = import (artiq-fast + "/conda/builder-env.nix") { inherit pkgs; };
condaBuilderEnv = import <artiq-fast/conda/builder-env.nix> { inherit pkgs; };
in
pkgs.runCommand "conda-channel" { }
''

View File

@ -1,38 +0,0 @@
{ pkgs, artiq-full }:
let
matplotlib-qt = (pkgs.python3Packages.matplotlib.override { enableQt = true; });
in
{
artiq-example-user-env = pkgs.runCommand "artiq-example-user-env" {
buildInputs = [
(pkgs.python3.withPackages(ps: [
artiq-full.artiq
artiq-full.artiq-comtools
artiq-full.wand
artiq-full.flake8-artiq
artiq-full.lda
artiq-full.korad_ka3005p
artiq-full.novatech409b
artiq-full.thorlabs_tcube
artiq-full.artiq-board-kc705-nist_clock
ps.paramiko
ps.pandas
ps.numpy
ps.scipy
# our newer llvmlite conflicts with the one in nixpkgs (21.05), reenable after nixpkgs updates llvmlite
#ps.numba
ps.bokeh
matplotlib-qt
# cirq is broken and doesn't build (as of 20.09.3281.06b11191834)
#(ps.cirq.override { matplotlib = matplotlib-qt; })
# qiskit does not work with matplotlib-qt
#ps.qiskit
]))
artiq-full.openocd
pkgs.gtkwave
pkgs.spyder
pkgs.R
];
} "touch $out";
}

View File

@ -3,20 +3,13 @@ let
condaBuild = import ./fast/conda/build.nix { inherit pkgs; };
condaFakeSource = import ./fast/conda/fake-source.nix { inherit pkgs; };
dualPackage = (
{ name, version, src, pythonOptions ? {}, condaOptions ? {}, enabled ? true, withManual ? true}:
pkgs.lib.optionalAttrs enabled ({
{ name, version, src, pythonOptions ? {}, condaOptions ? {}}:
{
"${name}" = pkgs.python3Packages.buildPythonPackage ({
inherit version;
name = "${name}-${version}";
inherit src;
} // pythonOptions);
"conda-${name}" = condaBuild {
name = "conda-${name}";
src = condaFakeSource ({
inherit name version src;
} // condaOptions);
};
} // (pkgs.lib.optionalAttrs withManual {
"${name}-manual-html" = pkgs.stdenv.mkDerivation {
name = "${name}-manual-html-${version}";
inherit version src;
@ -38,7 +31,13 @@ let
echo doc manual ${dest}/html index.html >> $out/nix-support/hydra-build-products
'';
};
}))
"conda-${name}" = condaBuild {
name = "conda-${name}";
src = condaFakeSource ({
inherit name version src;
} // condaOptions);
};
}
);
# https://github.com/m-labs/artiq/issues/23
hidapi = pkgs.hidapi.overrideAttrs (oa: {
@ -56,8 +55,8 @@ in
src = pkgs.fetchFromGitHub {
owner = "m-labs";
repo = "korad_ka3005p";
rev = "a1898409cb188b388ed1cf84e76ca69e9c8a74eb";
sha256 = "0h20qss70nssqiagc2fx75mravq1pji7rizhag3nq8xrcz2w20nc";
rev = "a0cfaa5792a211e166d224314c4d0be4881b9b8d";
sha256 = "1bxzyjyvdhsbm9hj7ypf0vgkd1lvc340bb6lx3wchvh30n7bv9gv";
};
pythonOptions = { propagatedBuildInputs = [ sipyco asyncserial ]; };
condaOptions = { dependencies = [ "sipyco" "asyncserial" ]; };
@ -67,8 +66,8 @@ in
src = pkgs.fetchFromGitHub {
owner = "m-labs";
repo = "novatech409b";
rev = "3bd559753972f07d881df66b7c6819afc5436053";
sha256 = "1g9qv6fn5h7d393mb1v7w8sg6fimqg34blqdj22qnayb4agw1wyg";
rev = "8740b3e7b254e03395135e6bc128bbaca70d4fbb";
sha256 = "0mwm434y83y8jb30fpz69z6z3b6sxbc8dv3nw0hq4wc7iginx89d";
};
pythonOptions = { propagatedBuildInputs = [ sipyco asyncserial ]; };
condaOptions = { dependencies = [ "sipyco" "asyncserial" ]; };
@ -78,8 +77,8 @@ in
src = pkgs.fetchFromGitHub {
owner = "m-labs";
repo = "lda";
rev = "e6bf828b6dfd7fbf59b61b691712736c98c95970";
sha256 = "1w4ykzsl3386bz4ggpd6i60b6a3k7rnc6qjw59xm3hk0vs3w2vyn";
rev = "6138a94a1116c8f7b40b8bd8bb161f847065aab6";
sha256 = "1009k9pq8wx5zxrljkxr1g95g8q979i7mq3csksdkd3d0v2jvqif";
};
pythonOptions = {
propagatedBuildInputs = [ sipyco ];
@ -96,8 +95,8 @@ in
src = pkgs.fetchFromGitHub {
owner = "m-labs";
repo = "thorlabs_tcube";
rev = "0cb0c15fc7e660a150e193245f5338d48f8b97db";
sha256 = "1n4zmjcj2kpd97217y602pq6x8s80w39fgyi6qjmal92aicqdg07";
rev = "8b85292d76a69ae72ba8da32b894c87c794574ba";
sha256 = "09cy9nhydcwdib21wb0qg1cinvibfbszwgphrmf2ajw5kqpr1d6a";
};
pythonOptions = { propagatedBuildInputs = [ sipyco asyncserial ]; };
condaOptions = { dependencies = [ "sipyco" "asyncserial" ]; };
@ -195,9 +194,6 @@ in
sha256 = "165j12k9nnrkf2pv0idcv6xhnp1hnsllna4rps2dssnqgjfaw1ss";
};
propagatedBuildInputs = [ sipyco pkgs.python3Packages.numpy pkgs.python3Packages.aiohttp ];
# Modifies PATH to pass the wrapped python environment (i.e. python3.withPackages(...) to subprocesses.
# Allows subprocesses using python to find all packages you have installed
makeWrapperArgs = [ ''--run 'if [ ! -z "$NIX_PYTHONPREFIX" ]; then export PATH=$NIX_PYTHONPREFIX/bin:$PATH;fi' '' ];
};
conda-artiq-comtools = condaBuild {
name = "conda-artiq-comtools";
@ -207,69 +203,4 @@ in
dependencies = [ "sipyco" "numpy" "aiohttp >=3" ];
};
};
} // {
wand = pkgs.python3Packages.buildPythonApplication rec {
name = "wand";
version = "0.4.dev";
src = pkgs.fetchFromGitHub {
owner = "OxfordIonTrapGroup";
repo = "wand";
rev = "0bf1cfef4aa37e5761c20ac8702abec125b45e23";
sha256 = "0jfw6w6id7qkx2f6rklrmp13b2hsnvii1qbls60ampx399lcb43g";
};
patches = [ ./wand-fix-config-dir.patch ];
nativeBuildInputs = [ pkgs.qt5.wrapQtAppsHook ];
dontWrapQtApps = true;
postFixup = ''
wrapQtApp "$out/bin/wand_gui"
'';
propagatedBuildInputs = with pkgs.python3Packages; [ artiq quamash numpy scipy influxdb setuptools ];
};
} // (dualPackage {
name = "flake8-artiq";
version = "0.1.0";
withManual = false;
src = pkgs.fetchgit {
url = "https://gitlab.com/duke-artiq/flake8-artiq.git";
rev = "1216092974140a561850905734fc22fdacdc2cde";
sha256 = "0rkab2qdwyzms6nxc44jzb5grvkkbpjwwmfv2zj96cm6cm8d9pdr";
};
pythonOptions = {
propagatedBuildInputs = [ pkgs.python3Packages.flake8 ];
checkInputs = [ pkgs.python3Packages.pytestCheckHook ];
};
condaOptions = { dependencies = [ "flake8" ]; };
}) // (dualPackage rec {
name = "dax";
version = "6.7";
enabled = builtins.head (builtins.splitVersion version) == builtins.head (builtins.splitVersion artiq.version);
withManual = false;
src = pkgs.fetchgit {
url = "https://gitlab.com/duke-artiq/dax.git";
rev = "v${version}";
sha256 = "0rgvqqiypqvxjzrsixn3h7dn93isqw5vc2wrmpkxhzvw9lh5ihm7";
};
pythonOptions = {
VERSIONEER_OVERRIDE = version;
inherit (pkgs.python3Packages.pygit2) SSL_CERT_FILE;
propagatedBuildInputs = [ artiq sipyco ]
++ (with pkgs.python3Packages; [ numpy scipy pyvcd natsort pygit2 matplotlib graphviz h5py networkx sortedcontainers ]);
checkInputs = [ pkgs.python3Packages.pytestCheckHook ];
};
condaOptions = { dependencies = [ "python>=3.7" "artiq" "sipyco" "numpy" "scipy" "pyvcd" "natsort" "pygit2" "matplotlib" "python-graphviz" "h5py" "networkx" "sortedcontainers" ]; };
}) // (dualPackage {
name = "dax-applets";
version = "0.0.0";
withManual = false;
src = pkgs.fetchgit {
url = "https://gitlab.com/duke-artiq/dax-applets.git";
rev = "0f0196b6941b0c44a33c85d8c02047ca65466463";
sha256 = "0mx6yjvprhdnkdigwns8mg6v5daqxpgbv7mf63fa76i1iv2wvak4";
};
pythonOptions = {
propagatedBuildInputs = [ artiq ]
++ (with pkgs.python3Packages; [ numpy pyqt5 pyqtgraph ]);
doCheck = false;
};
condaOptions = { dependencies = [ "python>=3.5" "artiq" "numpy" "pyqt" "pyqtgraph" ]; };
})
}

View File

@ -1,24 +0,0 @@
#!/usr/bin/env python
#
# Encodes data like ARTIQ build_soc.py ReprogrammableIdentifier
import sys
if len(sys.argv) != 2:
raise ValueError('argument missing')
identifier_str = sys.argv[1]
contents = list(identifier_str.encode())
l = len(contents)
if l > 255:
raise ValueError("Identifier string must be 255 characters or less")
contents.insert(0, l)
f = sys.stdout
f.write("[\n");
for i in range(7):
init = sum(1 << j if c & (1 << i) else 0 for j, c in enumerate(contents))
f.write(
' {{ cell = "identifier_str{}"; init = "256\'h{:X}"; }}\n'.format(i, init)
)
f.write("]\n");

View File

@ -3,18 +3,18 @@
rec {
wavedrom = pkgs.python3Packages.buildPythonPackage rec {
pname = "wavedrom";
version = "2.0.3.post2";
version = "0.1";
src = pkgs.python3Packages.fetchPypi {
inherit pname version;
sha256 = "13a4086417nv836s2wbj3f4r31gwapbyw5smgl00jsqizwsk96r3";
sha256 = "006w683zlmmwcw5xz1n5dwg34ims5jg3gl2700ql4wr0myjz6710";
};
buildInputs = [ pkgs.python3Packages.setuptools_scm ];
propagatedBuildInputs = with pkgs.python3Packages; [ svgwrite attrdict ];
doCheck = false;
meta = with pkgs.lib; {
meta = with pkgs.stdenv.lib; {
description = "WaveDrom compatible Python module and command line";
homepage = "https://pypi.org/project/wavedrom/";
license = licenses.mit;
@ -23,18 +23,18 @@ rec {
sphinxcontrib-wavedrom = pkgs.python3Packages.buildPythonPackage rec {
pname = "sphinxcontrib-wavedrom";
version = "2.1.1";
version = "2.0.0";
src = pkgs.python3Packages.fetchPypi {
inherit pname version;
sha256 = "09xq4csdcil2x8mm38yd5k6lfbkazicvm278xnzwbfc9vghkqqs2";
sha256 = "0nk36zqq5ipxqx9izz2iazb3iraasanv3nm05bjr21gw42zgkz22";
};
buildInputs = [ pkgs.python3Packages.setuptools_scm ];
propagatedBuildInputs = [ wavedrom ] ++ (with pkgs.python3Packages; [ sphinx xcffib cairosvg ]);
doCheck = false;
meta = with pkgs.lib; {
meta = with pkgs.stdenv.lib; {
description = "A Sphinx extension that allows including WaveDrom diagrams";
homepage = "https://pypi.org/project/sphinxcontrib-wavedrom/";
license = licenses.mit;

View File

@ -1,30 +0,0 @@
diff --git a/wand/tools.py b/wand/tools.py
index a51dabd..4d5a9d1 100644
--- a/wand/tools.py
+++ b/wand/tools.py
@@ -6,6 +6,7 @@ import shutil
import logging
from sipyco import pyon
+from artiq.appdirs import user_config_dir
import wand
logger = logging.getLogger(__name__)
@@ -26,10 +27,15 @@ class LockException(Exception):
pass
+def get_user_config_dir():
+ dir = user_config_dir("wand", "oitg", "1")
+ os.makedirs(dir, exist_ok=True)
+ return dir
+
+
def get_config_path(args, name_suffix=""):
config_file = "{}{}_config.pyon".format(args.name, name_suffix)
- wand_dir = os.path.dirname(wand.__file__)
- config_path = os.path.join(wand_dir, config_file)
+ config_path = os.path.join(get_user_config_dir(), config_file)
if args.backup_dir == "":
backup_path = ""

View File

@ -1,84 +0,0 @@
let
pkgs = import <nixpkgs> { overlays = [ (import ./artiq-fast/mozilla-overlay.nix) ]; };
artiq-zynq = import <artiq-zynq>;
artiq-fast = import <artiq-fast> { inherit pkgs; };
in
(
builtins.mapAttrs (key: value: pkgs.lib.hydraJob value) artiq-zynq
) // {
gateware-sim = pkgs.lib.hydraJob (pkgs.stdenv.mkDerivation {
name = "gateware-sim";
buildInputs = [ artiq-fast.migen artiq-fast.migen-axi artiq-fast.artiq ];
phases = [ "buildPhase" ];
buildPhase =
''
python -m unittest discover ${<artiq-zynq>}/src/gateware -v
touch $out
'';
});
zc706-hitl-tests = pkgs.lib.hydraJob (pkgs.stdenv.mkDerivation {
name = "zc706-hitl-tests";
__networked = true; # compatibility with old patched Nix
# breaks hydra, https://github.com/NixOS/hydra/issues/1216
#__impure = true; # Nix 2.8+
buildInputs = [
pkgs.netcat pkgs.openssh pkgs.rsync artiq-fast.artiq artiq-fast.artiq-netboot
];
phases = [ "buildPhase" ];
buildPhase =
''
export NIX_SSHOPTS="-F /dev/null -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -o LogLevel=ERROR -i /opt/hydra_id_ed25519"
LOCKCTL=$(mktemp -d)
mkfifo $LOCKCTL/lockctl
cat $LOCKCTL/lockctl | ${pkgs.openssh}/bin/ssh \
$NIX_SSHOPTS \
rpi-4 \
'mkdir -p /tmp/board_lock && flock /tmp/board_lock/zc706-1 -c "echo Ok; cat"' \
| (
# End remote flock via FIFO
atexit_unlock() {
echo > $LOCKCTL/lockctl
}
trap atexit_unlock EXIT
# Read "Ok" line when remote successfully locked
read LOCK_OK
echo Power cycling board...
(echo b; sleep 5; echo B; sleep 5) | nc -N -w6 192.168.1.31 3131
echo Power cycle done.
export USER=hydra
export OPENOCD_ZYNQ=${artiq-zynq.zynq-rs}/openocd
export SZL=${(import artiq-zynq.zynq-rs).zc706-szl}/szl.elf
pushd ${<artiq-zynq>}
bash ${<artiq-zynq>}/remote_run.sh -h rpi-4 -o "$NIX_SSHOPTS" -d ${artiq-zynq.zc706-nist_qc2-jtag}
popd
echo Waiting for the firmware to boot...
sleep 15
echo Running test kernel...
artiq_run --device-db ${<artiq-zynq>}/examples/device_db.py ${<artiq-zynq>}/examples/mandelbrot.py
echo Running ARTIQ unit tests...
export ARTIQ_ROOT=${<artiq-zynq>}/examples
export ARTIQ_LOW_LATENCY=1
python -m unittest discover artiq.test.coredevice -v
touch $out
echo Completed
(echo b; sleep 5) | nc -N -w6 192.168.1.31 3131
echo Board powered off
)
'';
});
}

View File

@ -1,15 +1,14 @@
{ pkgs ? import <nixpkgs> {} }:
let
artiqpkgs = import ../artiq-fast/pkgs/python-deps.nix { inherit (pkgs) lib fetchgit fetchFromGitHub python3Packages; misoc-new = true; };
artiqpkgs = import ../artiq-fast/pkgs/python-deps.nix { inherit (pkgs) stdenv fetchFromGitHub python3Packages; };
ise = import ./ise.nix { inherit pkgs; };
vivado = import ../artiq-fast/vivado.nix { inherit pkgs; };
buildUrukulCpld = {version, src}: pkgs.stdenv.mkDerivation {
pname = "urukul-cpld";
inherit src version;
name = "urukul-cpld-${version}";
inherit src;
buildInputs = [(pkgs.python3.withPackages(ps: [artiqpkgs.migen]))] ++ (builtins.attrValues ise);
phases = ["buildPhase" "installPhase"];
buildPhase = "python $src/urukul_impl.py";
installPhase =
installPhase =
''
mkdir -p $out $out/nix-support
cp build/urukul.jed $out
@ -17,12 +16,12 @@ let
'';
};
buildMirnyCpld = {version, src}: pkgs.stdenv.mkDerivation {
pname = "mirny-cpld";
inherit src version;
name = "mirny-cpld-${version}";
inherit src;
buildInputs = [(pkgs.python3.withPackages(ps: [artiqpkgs.migen]))] ++ (builtins.attrValues ise);
phases = ["buildPhase" "installPhase"];
buildPhase = "python $src/mirny_impl.py";
installPhase =
installPhase =
''
mkdir -p $out $out/nix-support
cp build/mirny.jed $out
@ -44,15 +43,6 @@ in
sha256 = "1962jpzqzn22cwkcmfnvwqlj5i89pljhgfk64n6pk73clir9mp0w";
};
};
urukul-cpld-legacy = buildUrukulCpld rec {
version = "1.3.1";
src = pkgs.fetchFromGitHub {
owner = "quartiq";
repo = "urukul";
rev = "v${version}";
sha256 = "1nvarspqbf9f7b27j34jkkh4mj6rwrlmccmfpz5nnzk3h2j6zbqc";
};
};
mirny-cpld-master = buildMirnyCpld {
version = "master";
src = <mirnySrc>;
@ -79,27 +69,4 @@ in
echo file binary-dist $out/fastino.bin >> $out/nix-support/hydra-build-products
'';
};
phaser-fpga = pkgs.stdenv.mkDerivation {
name = "phaser-fpga";
src = <phaserSrc>;
patchPhase = ''
substituteInPlace phaser.py \
--replace "source ../load.tcl" \
""
'';
buildInputs = [ (pkgs.python3.withPackages(ps: [ artiqpkgs.migen artiqpkgs.misoc ])) ] ++ [ vivado ];
buildPhase = "python phaser.py";
installPhase =
''
mkdir -p $out $out/nix-support
cp build/phaser.bit $out
echo file binary-dist $out/phaser.bit >> $out/nix-support/hydra-build-products
'';
dontFixup = true;
doCheck = true;
checkInputs = [ pkgs.python3Packages.pytest ];
checkPhase = "pytest";
};
}

Some files were not shown because too many files have changed in this diff Show More