Compare commits
2 commits
master
...
try-packag
Author | SHA1 | Date | |
---|---|---|---|
![]() |
268673f04b | ||
![]() |
adaefcb7b4 |
68
docker/nix/default.nix
Normal file
68
docker/nix/default.nix
Normal file
|
@ -0,0 +1,68 @@
|
||||||
|
{ dockerTools
|
||||||
|
, bashInteractive
|
||||||
|
, cacert
|
||||||
|
, coreutils
|
||||||
|
, curl
|
||||||
|
, gitReallyMinimal
|
||||||
|
, gnutar
|
||||||
|
, gzip
|
||||||
|
, iana-etc
|
||||||
|
, nix
|
||||||
|
, openssh
|
||||||
|
, xz
|
||||||
|
, fromImage
|
||||||
|
, extraContents ? [ ]
|
||||||
|
, extraEnv ? [ ]
|
||||||
|
}:
|
||||||
|
let
|
||||||
|
image = dockerTools.buildImageWithNixDb {
|
||||||
|
inherit (nix) name;
|
||||||
|
inherit fromImage;
|
||||||
|
|
||||||
|
contents = [
|
||||||
|
./root
|
||||||
|
coreutils
|
||||||
|
# add /bin/sh
|
||||||
|
bashInteractive
|
||||||
|
nix
|
||||||
|
|
||||||
|
# runtime dependencies of nix
|
||||||
|
cacert
|
||||||
|
gitReallyMinimal
|
||||||
|
gnutar
|
||||||
|
gzip
|
||||||
|
openssh
|
||||||
|
xz
|
||||||
|
|
||||||
|
# for haskell binaries
|
||||||
|
iana-etc
|
||||||
|
] ++ extraContents;
|
||||||
|
|
||||||
|
extraCommands = ''
|
||||||
|
# for /usr/bin/env
|
||||||
|
mkdir usr
|
||||||
|
ln -s ../bin usr/bin
|
||||||
|
|
||||||
|
# make sure /tmp exists
|
||||||
|
mkdir -m 1777 tmp
|
||||||
|
|
||||||
|
# need a HOME
|
||||||
|
mkdir -vp root
|
||||||
|
'';
|
||||||
|
|
||||||
|
config = {
|
||||||
|
Cmd = [ "/bin/bash" ];
|
||||||
|
Env = [
|
||||||
|
"ENV=/etc/profile.d/nix.sh"
|
||||||
|
"BASH_ENV=/etc/profile.d/nix.sh"
|
||||||
|
"NIX_BUILD_SHELL=/bin/bash"
|
||||||
|
"NIX_PATH=nixpkgs=${./fake_nixpkgs}"
|
||||||
|
"PAGER=cat"
|
||||||
|
"PATH=/usr/bin:/bin"
|
||||||
|
"SSL_CERT_FILE=${cacert}/etc/ssl/certs/ca-bundle.crt"
|
||||||
|
"USER=root"
|
||||||
|
] ++ extraEnv;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
in
|
||||||
|
image // { meta = nix.meta // image.meta; }
|
10
docker/nix/fake_nixpkgs/default.nix
Normal file
10
docker/nix/fake_nixpkgs/default.nix
Normal file
|
@ -0,0 +1,10 @@
|
||||||
|
_:
|
||||||
|
throw ''
|
||||||
|
This container doesn't include nixpkgs.
|
||||||
|
|
||||||
|
The best way to work around that is to pin your dependencies. See
|
||||||
|
https://nix.dev/tutorials/first-steps/towards-reproducibility-pinning-nixpkgs.html
|
||||||
|
|
||||||
|
Or if you must, override the NIX_PATH environment variable with eg:
|
||||||
|
"NIX_PATH=nixpkgs=channel:nixos-unstable"
|
||||||
|
''
|
21
docker/nix/root/etc/group
Normal file
21
docker/nix/root/etc/group
Normal file
|
@ -0,0 +1,21 @@
|
||||||
|
root:x:0:
|
||||||
|
wheel:x:1:
|
||||||
|
kmem:x:2:
|
||||||
|
tty:x:3:
|
||||||
|
messagebus:x:4:
|
||||||
|
disk:x:6:
|
||||||
|
audio:x:17:
|
||||||
|
floppy:x:18:
|
||||||
|
uucp:x:19:
|
||||||
|
lp:x:20:
|
||||||
|
cdrom:x:24:
|
||||||
|
tape:x:25:
|
||||||
|
video:x:26:
|
||||||
|
dialout:x:27:
|
||||||
|
utmp:x:29:
|
||||||
|
adm:x:55:
|
||||||
|
keys:x:96:
|
||||||
|
users:x:100:
|
||||||
|
input:x:174:
|
||||||
|
nixbld:x:30000:nixbld1,nixbld10,nixbld11,nixbld12,nixbld13,nixbld14,nixbld15,nixbld16,nixbld17,nixbld18,nixbld19,nixbld2,nixbld20,nixbld21,nixbld22,nixbld23,nixbld24,nixbld25,nixbld26,nixbld27,nixbld28,nixbld29,nixbld3,nixbld30,nixbld31,nixbld32,nixbld4,nixbld5,nixbld6,nixbld7,nixbld8,nixbld9
|
||||||
|
nogroup:x:65534:
|
11
docker/nix/root/etc/nsswitch.conf
Normal file
11
docker/nix/root/etc/nsswitch.conf
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
passwd: files mymachines systemd
|
||||||
|
group: files mymachines systemd
|
||||||
|
shadow: files
|
||||||
|
|
||||||
|
hosts: files mymachines dns myhostname
|
||||||
|
networks: files
|
||||||
|
|
||||||
|
ethers: files
|
||||||
|
services: files
|
||||||
|
protocols: files
|
||||||
|
rpc: files
|
34
docker/nix/root/etc/passwd
Normal file
34
docker/nix/root/etc/passwd
Normal file
|
@ -0,0 +1,34 @@
|
||||||
|
root:x:0:0:System administrator:/root:/bin/bash
|
||||||
|
nixbld1:x:30001:30000:Nix build user 1:/var/empty:/run/current-system/sw/bin/nologin
|
||||||
|
nixbld2:x:30002:30000:Nix build user 2:/var/empty:/run/current-system/sw/bin/nologin
|
||||||
|
nixbld3:x:30003:30000:Nix build user 3:/var/empty:/run/current-system/sw/bin/nologin
|
||||||
|
nixbld4:x:30004:30000:Nix build user 4:/var/empty:/run/current-system/sw/bin/nologin
|
||||||
|
nixbld5:x:30005:30000:Nix build user 5:/var/empty:/run/current-system/sw/bin/nologin
|
||||||
|
nixbld6:x:30006:30000:Nix build user 6:/var/empty:/run/current-system/sw/bin/nologin
|
||||||
|
nixbld7:x:30007:30000:Nix build user 7:/var/empty:/run/current-system/sw/bin/nologin
|
||||||
|
nixbld8:x:30008:30000:Nix build user 8:/var/empty:/run/current-system/sw/bin/nologin
|
||||||
|
nixbld9:x:30009:30000:Nix build user 9:/var/empty:/run/current-system/sw/bin/nologin
|
||||||
|
nixbld10:x:30010:30000:Nix build user 10:/var/empty:/run/current-system/sw/bin/nologin
|
||||||
|
nixbld11:x:30011:30000:Nix build user 11:/var/empty:/run/current-system/sw/bin/nologin
|
||||||
|
nixbld12:x:30012:30000:Nix build user 12:/var/empty:/run/current-system/sw/bin/nologin
|
||||||
|
nixbld13:x:30013:30000:Nix build user 13:/var/empty:/run/current-system/sw/bin/nologin
|
||||||
|
nixbld14:x:30014:30000:Nix build user 14:/var/empty:/run/current-system/sw/bin/nologin
|
||||||
|
nixbld15:x:30015:30000:Nix build user 15:/var/empty:/run/current-system/sw/bin/nologin
|
||||||
|
nixbld16:x:30016:30000:Nix build user 16:/var/empty:/run/current-system/sw/bin/nologin
|
||||||
|
nixbld17:x:30017:30000:Nix build user 17:/var/empty:/run/current-system/sw/bin/nologin
|
||||||
|
nixbld18:x:30018:30000:Nix build user 18:/var/empty:/run/current-system/sw/bin/nologin
|
||||||
|
nixbld19:x:30019:30000:Nix build user 19:/var/empty:/run/current-system/sw/bin/nologin
|
||||||
|
nixbld20:x:30020:30000:Nix build user 20:/var/empty:/run/current-system/sw/bin/nologin
|
||||||
|
nixbld21:x:30021:30000:Nix build user 21:/var/empty:/run/current-system/sw/bin/nologin
|
||||||
|
nixbld22:x:30022:30000:Nix build user 22:/var/empty:/run/current-system/sw/bin/nologin
|
||||||
|
nixbld23:x:30023:30000:Nix build user 23:/var/empty:/run/current-system/sw/bin/nologin
|
||||||
|
nixbld24:x:30024:30000:Nix build user 24:/var/empty:/run/current-system/sw/bin/nologin
|
||||||
|
nixbld25:x:30025:30000:Nix build user 25:/var/empty:/run/current-system/sw/bin/nologin
|
||||||
|
nixbld26:x:30026:30000:Nix build user 26:/var/empty:/run/current-system/sw/bin/nologin
|
||||||
|
nixbld27:x:30027:30000:Nix build user 27:/var/empty:/run/current-system/sw/bin/nologin
|
||||||
|
nixbld28:x:30028:30000:Nix build user 28:/var/empty:/run/current-system/sw/bin/nologin
|
||||||
|
nixbld29:x:30029:30000:Nix build user 29:/var/empty:/run/current-system/sw/bin/nologin
|
||||||
|
nixbld30:x:30030:30000:Nix build user 30:/var/empty:/run/current-system/sw/bin/nologin
|
||||||
|
nixbld31:x:30031:30000:Nix build user 31:/var/empty:/run/current-system/sw/bin/nologin
|
||||||
|
nixbld32:x:30032:30000:Nix build user 32:/var/empty:/run/current-system/sw/bin/nologin
|
||||||
|
nobody:x:65534:65534:Unprivileged account (don't use!):/var/empty:/run/current-system/sw/bin/nologin
|
|
@ -18,11 +18,11 @@
|
||||||
},
|
},
|
||||||
"nixpkgs": {
|
"nixpkgs": {
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1727634051,
|
"lastModified": 1730200266,
|
||||||
"narHash": "sha256-S5kVU7U82LfpEukbn/ihcyNt2+EvG7Z5unsKW9H/yFA=",
|
"narHash": "sha256-l253w0XMT8nWHGXuXqyiIC/bMvh1VRszGXgdpQlfhvU=",
|
||||||
"owner": "NixOS",
|
"owner": "NixOS",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"rev": "06cf0e1da4208d3766d898b7fdab6513366d45b9",
|
"rev": "807e9154dcb16384b1b765ebe9cd2bba2ac287fd",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
|
|
|
@ -42,6 +42,10 @@
|
||||||
};
|
};
|
||||||
umd = callPackage ./pkgs/umd { };
|
umd = callPackage ./pkgs/umd { };
|
||||||
metal = callPackage ./pkgs/metal { };
|
metal = callPackage ./pkgs/metal { };
|
||||||
|
buda-docker = callPackage ./pkgs/buda-docker { };
|
||||||
|
# Will be difficult to get these 2 working and keep them working
|
||||||
|
buda-prebuilt = callPackage ./pkgs/buda-prebuilt { };
|
||||||
|
buda = callPackage ./pkgs/buda { };
|
||||||
|
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
@ -61,6 +65,9 @@
|
||||||
flash = scope.flash;
|
flash = scope.flash;
|
||||||
umd = scope.umd;
|
umd = scope.umd;
|
||||||
metal = scope.metal;
|
metal = scope.metal;
|
||||||
|
buda-docker = scope.buda-docker;
|
||||||
|
buda-prebuilt = scope.buda-prebuilt;
|
||||||
|
buda = scope.buda;
|
||||||
default = self.packages.${system}.smi;
|
default = self.packages.${system}.smi;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
41
pkgs/buda-docker/default.nix
Normal file
41
pkgs/buda-docker/default.nix
Normal file
|
@ -0,0 +1,41 @@
|
||||||
|
{
|
||||||
|
dockerTools,
|
||||||
|
callPackage,
|
||||||
|
git,
|
||||||
|
}:
|
||||||
|
|
||||||
|
let
|
||||||
|
budaDocker = dockerTools.pullImage {
|
||||||
|
imageName = "ghcr.io/tenstorrent/tt-buda/ubuntu-22-04-amd64/wh_b0";
|
||||||
|
imageDigest = "sha256:3a6f84ed355c8738432737f6498745c4bee235b871e97608394e29e396ff6925";
|
||||||
|
sha256 = "1vx7v9yx211dibshzgzz9zwm9xgkfj25iabplff19hx687w0n3sz";
|
||||||
|
finalImageName = "ghcr.io/tenstorrent/tt-buda/ubuntu-22-04-amd64/wh_b0";
|
||||||
|
finalImageTag = "v0.19.3";
|
||||||
|
};
|
||||||
|
|
||||||
|
#nixDocker = dockerTools.pullImage {
|
||||||
|
# imageName = "nixpkgs/nix-flakes";
|
||||||
|
# imageDigest = "sha256:cab18b64d25e4bc30415758d6e2f6bc05ecf6ae576092c0cf407b1cebb1ea0e5";
|
||||||
|
# sha256 = "0v4npm2h4z0k3y0h75zsk3q589vhris76g4vg5gkjlfbg16c822j";
|
||||||
|
# finalImageName = "nixpkgs/nix-flakes";
|
||||||
|
# finalImageTag = "latest";
|
||||||
|
#};
|
||||||
|
|
||||||
|
nixDocker = callPackage ../../docker/nix/default.nix {
|
||||||
|
fromImage = budaDocker;
|
||||||
|
|
||||||
|
# gitMinimal still ships with perl and python
|
||||||
|
gitReallyMinimal =
|
||||||
|
(git.override {
|
||||||
|
perlSupport = false;
|
||||||
|
pythonSupport = false;
|
||||||
|
withManual = false;
|
||||||
|
withpcre2 = false;
|
||||||
|
}).overrideAttrs
|
||||||
|
(_: {
|
||||||
|
# installCheck is broken when perl is disabled
|
||||||
|
doInstallCheck = false;
|
||||||
|
});
|
||||||
|
};
|
||||||
|
in
|
||||||
|
budaDocker
|
181
pkgs/buda-prebuilt/default.nix
Normal file
181
pkgs/buda-prebuilt/default.nix
Normal file
|
@ -0,0 +1,181 @@
|
||||||
|
{
|
||||||
|
lib,
|
||||||
|
python310Packages,
|
||||||
|
fetchzip,
|
||||||
|
stdenv,
|
||||||
|
callPackage,
|
||||||
|
__splicedPackages,
|
||||||
|
darwin,
|
||||||
|
runCommand,
|
||||||
|
}:
|
||||||
|
|
||||||
|
let
|
||||||
|
python3Packages = python310Packages;
|
||||||
|
pkgs = __splicedPackages;
|
||||||
|
|
||||||
|
boost_1_74 = (callPackage ./vendored/boost/default.nix { }).boost174;
|
||||||
|
yaml-cpp_0_6 = callPackage ./vendored/libyaml-cpp.nix { };
|
||||||
|
|
||||||
|
prebuilt-buda = fetchzip {
|
||||||
|
url = "https://github.com/tenstorrent/tt-buda/releases/download/v0.19.3/pybuda-wh.b0-v0.19.3-ubuntu-22-04-amd64-python3.10.zip";
|
||||||
|
hash = "sha256-M9sgFKSmWra+BglEWgrfFPJRS+UIVKUG+ZF1oTPVexg=";
|
||||||
|
stripRoot = false;
|
||||||
|
};
|
||||||
|
|
||||||
|
pipInstallHook' = python3Packages.callPackage (
|
||||||
|
{ makePythonHook, pip }:
|
||||||
|
makePythonHook {
|
||||||
|
name = "pip-install-hook";
|
||||||
|
propagatedBuildInputs = [ pip ];
|
||||||
|
substitutions = {
|
||||||
|
pythonInterpreter = python3Packages.python.interpreter;
|
||||||
|
pythonSitePackages = python3Packages.python.sitePackages;
|
||||||
|
};
|
||||||
|
} ./vendored/pip-install-hook.sh
|
||||||
|
) { };
|
||||||
|
|
||||||
|
nukeReferences = callPackage ./vendored/nuke-references.nix {
|
||||||
|
inherit (darwin) signingUtils;
|
||||||
|
};
|
||||||
|
|
||||||
|
autoPatchelfHook = callPackage (
|
||||||
|
{ makeSetupHook, bintools }:
|
||||||
|
makeSetupHook {
|
||||||
|
name = "auto-patchelf-hook";
|
||||||
|
propagatedBuildInputs = [
|
||||||
|
bintools
|
||||||
|
];
|
||||||
|
substitutions = {
|
||||||
|
pythonInterpreter = "${python3Packages.python.withPackages (ps: [ ps.pyelftools ])}/bin/python";
|
||||||
|
autoPatchelfScript = ./vendored/auto-patchelf.py;
|
||||||
|
};
|
||||||
|
} ./auto-patchelf.sh
|
||||||
|
) { };
|
||||||
|
|
||||||
|
tt-buda = stdenv.mkDerivation rec {
|
||||||
|
pname = "tt-buda";
|
||||||
|
version = "0.19.3";
|
||||||
|
format = "wheel";
|
||||||
|
|
||||||
|
src = prebuilt-buda;
|
||||||
|
|
||||||
|
nativeBuildInputs = [
|
||||||
|
pipInstallHook'
|
||||||
|
nukeReferences
|
||||||
|
];
|
||||||
|
|
||||||
|
preInstall = ''
|
||||||
|
mkdir dist
|
||||||
|
mv *.whl dist/
|
||||||
|
'';
|
||||||
|
|
||||||
|
postInstall = ''
|
||||||
|
find $out -name "__pycache__" -type d | xargs rm -rf
|
||||||
|
|
||||||
|
find $out/bin/ -type f -not -name 'debuda' -print0 | xargs -0 rm --
|
||||||
|
substituteInPlace $out/bin/debuda \
|
||||||
|
--replace-fail "${python3Packages.python.interpreter}" "/usr/bin/env python3"
|
||||||
|
|
||||||
|
# error: illegal path references in fixed-output derivation
|
||||||
|
find $out -print0 | xargs -0 nuke-refs
|
||||||
|
|
||||||
|
'';
|
||||||
|
|
||||||
|
dontPatchShebangs = true;
|
||||||
|
dontFixup = true;
|
||||||
|
|
||||||
|
outputHash = "sha256-eSU10kgIQzJ0kv6gmQwMCdVw0uBpohVyYqkjK4RU2ng=";
|
||||||
|
outputHashAlgo = "sha256";
|
||||||
|
outputHashMode = "recursive";
|
||||||
|
|
||||||
|
meta = {
|
||||||
|
description = "Tenstorrent TT-BUDA Repository";
|
||||||
|
homepage = "https://github.com/tenstorrent/tt-buda";
|
||||||
|
license = lib.licenses.asl20;
|
||||||
|
mainProgram = "tt-buda";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
tt-buda-final = python3Packages.toPythonModule (
|
||||||
|
stdenv.mkDerivation (finalAttrs: {
|
||||||
|
pname = "tt-buda-final";
|
||||||
|
inherit (tt-buda) version;
|
||||||
|
|
||||||
|
nativeBuildInputs = [
|
||||||
|
autoPatchelfHook
|
||||||
|
python3Packages.pythonImportsCheckHook
|
||||||
|
];
|
||||||
|
|
||||||
|
buildInputs =
|
||||||
|
with pkgs;
|
||||||
|
[
|
||||||
|
stdenv.cc.libc.libgcc
|
||||||
|
stdenv.cc.libc.libgcc.lib
|
||||||
|
libpng
|
||||||
|
python3Packages.python
|
||||||
|
ncurses
|
||||||
|
expat
|
||||||
|
hwloc
|
||||||
|
zeromq
|
||||||
|
libjpeg8
|
||||||
|
glib
|
||||||
|
libGL
|
||||||
|
boost_1_74
|
||||||
|
yaml-cpp_0_6
|
||||||
|
]
|
||||||
|
++ (with pkgs.xorg; [
|
||||||
|
libxcb
|
||||||
|
libXext
|
||||||
|
libX11
|
||||||
|
libSM
|
||||||
|
libICE
|
||||||
|
]);
|
||||||
|
|
||||||
|
#runtimeDependencies = [
|
||||||
|
# # from torch._C import * # noqa: F403
|
||||||
|
# # ImportError: libstdc++.so.6: cannot open shared object file: No such file or directory
|
||||||
|
# stdenv.cc.libc.libgcc.lib
|
||||||
|
|
||||||
|
#];
|
||||||
|
|
||||||
|
#pythonImportsCheck = [
|
||||||
|
# "pybuda"
|
||||||
|
# "torch"
|
||||||
|
#];
|
||||||
|
|
||||||
|
passthru = {
|
||||||
|
inherit tt-buda yaml-cpp_0_6 boost_1_74;
|
||||||
|
pythonWith = python3Packages.python.withPackages (ps: [ finalAttrs.finalPackage ]);
|
||||||
|
|
||||||
|
tests = {
|
||||||
|
integrationTest =
|
||||||
|
runCommand "tt-buda-tests-integration-test"
|
||||||
|
{
|
||||||
|
strictDeps = true;
|
||||||
|
nativeBuildInputs = [
|
||||||
|
finalAttrs.passthru.pythonWith
|
||||||
|
stdenv.cc.libc.libgcc.lib
|
||||||
|
];
|
||||||
|
LD_LIBRARY_PATH = lib.makeLibraryPath [ stdenv.cc.libc.libgcc.lib ];
|
||||||
|
}
|
||||||
|
''
|
||||||
|
export HOME=$(mktemp -d)
|
||||||
|
python3 "${./test.py}"
|
||||||
|
touch "$out"
|
||||||
|
'';
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
dontUnpack = true;
|
||||||
|
installPhase = ''
|
||||||
|
runHook preInstall
|
||||||
|
mkdir -p $out
|
||||||
|
cp -r ${tt-buda}/* $out
|
||||||
|
runHook postInstall
|
||||||
|
|
||||||
|
|
||||||
|
'';
|
||||||
|
})
|
||||||
|
);
|
||||||
|
in
|
||||||
|
tt-buda-final
|
26
pkgs/buda-prebuilt/test.py
Normal file
26
pkgs/buda-prebuilt/test.py
Normal file
|
@ -0,0 +1,26 @@
|
||||||
|
import pybuda
|
||||||
|
import torch
|
||||||
|
|
||||||
|
|
||||||
|
# Sample PyTorch module
|
||||||
|
class PyTorchTestModule(torch.nn.Module):
|
||||||
|
def __init__(self):
|
||||||
|
super().__init__()
|
||||||
|
self.weights1 = torch.nn.Parameter(torch.rand(32, 32), requires_grad=True)
|
||||||
|
self.weights2 = torch.nn.Parameter(torch.rand(32, 32), requires_grad=True)
|
||||||
|
def forward(self, act1, act2):
|
||||||
|
m1 = torch.matmul(act1, self.weights1)
|
||||||
|
m2 = torch.matmul(act2, self.weights2)
|
||||||
|
return m1 + m2, m1
|
||||||
|
|
||||||
|
|
||||||
|
def test_module_direct_pytorch():
|
||||||
|
input1 = torch.rand(4, 32, 32)
|
||||||
|
input2 = torch.rand(4, 32, 32)
|
||||||
|
# Run single inference pass on a PyTorch module, using a wrapper to convert to PyBUDA first
|
||||||
|
output = pybuda.PyTorchModule("direct_pt", PyTorchTestModule()).run(input1, input2)
|
||||||
|
print(output)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
test_module_direct_pytorch()
|
439
pkgs/buda-prebuilt/vendored/auto-patchelf.py
Normal file
439
pkgs/buda-prebuilt/vendored/auto-patchelf.py
Normal file
|
@ -0,0 +1,439 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import os
|
||||||
|
import pprint
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
import json
|
||||||
|
from fnmatch import fnmatch
|
||||||
|
from collections import defaultdict
|
||||||
|
from contextlib import contextmanager
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from itertools import chain
|
||||||
|
from pathlib import Path, PurePath
|
||||||
|
from typing import DefaultDict, Generator, Iterator, Optional
|
||||||
|
|
||||||
|
from elftools.common.exceptions import ELFError # type: ignore
|
||||||
|
from elftools.elf.dynamic import DynamicSection # type: ignore
|
||||||
|
from elftools.elf.sections import NoteSection # type: ignore
|
||||||
|
from elftools.elf.elffile import ELFFile # type: ignore
|
||||||
|
from elftools.elf.enums import ENUM_E_TYPE, ENUM_EI_OSABI # type: ignore
|
||||||
|
|
||||||
|
|
||||||
|
@contextmanager
|
||||||
|
def open_elf(path: Path) -> Iterator[ELFFile]:
|
||||||
|
with path.open('rb') as stream:
|
||||||
|
yield ELFFile(stream)
|
||||||
|
|
||||||
|
|
||||||
|
def is_static_executable(elf: ELFFile) -> bool:
|
||||||
|
# Statically linked executables have an ELF type of EXEC but no INTERP.
|
||||||
|
return (elf.header["e_type"] == 'ET_EXEC'
|
||||||
|
and not elf.get_section_by_name(".interp"))
|
||||||
|
|
||||||
|
|
||||||
|
def is_dynamic_executable(elf: ELFFile) -> bool:
|
||||||
|
# We do not require an ELF type of EXEC. This also catches
|
||||||
|
# position-independent executables, as they typically have an INTERP
|
||||||
|
# section but their ELF type is DYN.
|
||||||
|
return bool(elf.get_section_by_name(".interp"))
|
||||||
|
|
||||||
|
|
||||||
|
def get_dependencies(elf: ELFFile) -> list[list[Path]]:
|
||||||
|
dependencies = []
|
||||||
|
# This convoluted code is here on purpose. For some reason, using
|
||||||
|
# elf.get_section_by_name(".dynamic") does not always return an
|
||||||
|
# instance of DynamicSection, but that is required to call iter_tags
|
||||||
|
for section in elf.iter_sections():
|
||||||
|
if isinstance(section, DynamicSection):
|
||||||
|
for tag in section.iter_tags('DT_NEEDED'):
|
||||||
|
dependencies.append([Path(tag.needed)])
|
||||||
|
break # There is only one dynamic section
|
||||||
|
|
||||||
|
return dependencies
|
||||||
|
|
||||||
|
|
||||||
|
def get_dlopen_dependencies(elf: ELFFile) -> list[list[Path]]:
|
||||||
|
"""
|
||||||
|
Extracts dependencies from the `.note.dlopen` section.
|
||||||
|
This is a FreeDesktop standard to annotate binaries with libraries that it may `dlopen`.
|
||||||
|
See https://systemd.io/ELF_DLOPEN_METADATA/
|
||||||
|
"""
|
||||||
|
dependencies = []
|
||||||
|
for section in elf.iter_sections():
|
||||||
|
if not isinstance(section, NoteSection) or section.name != ".note.dlopen":
|
||||||
|
continue
|
||||||
|
for note in section.iter_notes():
|
||||||
|
if note["n_type"] != 0x407C0C0A or note["n_name"] != "FDO":
|
||||||
|
continue
|
||||||
|
note_desc = note["n_desc"]
|
||||||
|
text = note_desc.decode("utf-8").rstrip("\0")
|
||||||
|
j = json.loads(text)
|
||||||
|
for d in j:
|
||||||
|
dependencies.append([Path(soname) for soname in d["soname"]])
|
||||||
|
return dependencies
|
||||||
|
|
||||||
|
|
||||||
|
def get_rpath(elf: ELFFile) -> list[str]:
|
||||||
|
# This convoluted code is here on purpose. For some reason, using
|
||||||
|
# elf.get_section_by_name(".dynamic") does not always return an
|
||||||
|
# instance of DynamicSection, but that is required to call iter_tags
|
||||||
|
for section in elf.iter_sections():
|
||||||
|
if isinstance(section, DynamicSection):
|
||||||
|
for tag in section.iter_tags('DT_RUNPATH'):
|
||||||
|
return tag.runpath.split(':')
|
||||||
|
|
||||||
|
for tag in section.iter_tags('DT_RPATH'):
|
||||||
|
return tag.rpath.split(':')
|
||||||
|
|
||||||
|
break # There is only one dynamic section
|
||||||
|
|
||||||
|
return []
|
||||||
|
|
||||||
|
|
||||||
|
def get_arch(elf: ELFFile) -> str:
|
||||||
|
return elf.get_machine_arch()
|
||||||
|
|
||||||
|
|
||||||
|
def get_osabi(elf: ELFFile) -> str:
|
||||||
|
return elf.header["e_ident"]["EI_OSABI"]
|
||||||
|
|
||||||
|
|
||||||
|
def osabi_are_compatible(wanted: str, got: str) -> bool:
|
||||||
|
"""
|
||||||
|
Tests whether two OS ABIs are compatible, taking into account the
|
||||||
|
generally accepted compatibility of SVR4 ABI with other ABIs.
|
||||||
|
"""
|
||||||
|
if not wanted or not got:
|
||||||
|
# One of the types couldn't be detected, so as a fallback we'll
|
||||||
|
# assume they're compatible.
|
||||||
|
return True
|
||||||
|
|
||||||
|
# Generally speaking, the base ABI (0x00), which is represented by
|
||||||
|
# readelf(1) as "UNIX - System V", indicates broad compatibility
|
||||||
|
# with other ABIs.
|
||||||
|
#
|
||||||
|
# TODO: This isn't always true. For example, some OSes embed ABI
|
||||||
|
# compatibility into SHT_NOTE sections like .note.tag and
|
||||||
|
# .note.ABI-tag. It would be prudent to add these to the detection
|
||||||
|
# logic to produce better ABI information.
|
||||||
|
if wanted == 'ELFOSABI_SYSV':
|
||||||
|
return True
|
||||||
|
|
||||||
|
# Similarly here, we should be able to link against a superset of
|
||||||
|
# features, so even if the target has another ABI, this should be
|
||||||
|
# fine.
|
||||||
|
if got == 'ELFOSABI_SYSV':
|
||||||
|
return True
|
||||||
|
|
||||||
|
# Otherwise, we simply return whether the ABIs are identical.
|
||||||
|
return wanted == got
|
||||||
|
|
||||||
|
|
||||||
|
def glob(path: Path, pattern: str, recursive: bool) -> Iterator[Path]:
|
||||||
|
if path.is_dir():
|
||||||
|
return path.rglob(pattern) if recursive else path.glob(pattern)
|
||||||
|
else:
|
||||||
|
# path.glob won't return anything if the path is not a directory.
|
||||||
|
# We extend that behavior by matching the file name against the pattern.
|
||||||
|
# This allows to pass single files instead of dirs to auto_patchelf,
|
||||||
|
# for greater control on the files to consider.
|
||||||
|
return [path] if path.match(pattern) else []
|
||||||
|
|
||||||
|
|
||||||
|
cached_paths: set[Path] = set()
|
||||||
|
soname_cache: DefaultDict[tuple[str, str], list[tuple[Path, str]]] = defaultdict(list)
|
||||||
|
|
||||||
|
|
||||||
|
def populate_cache(initial: list[Path], recursive: bool =False) -> None:
|
||||||
|
lib_dirs = list(initial)
|
||||||
|
|
||||||
|
while lib_dirs:
|
||||||
|
lib_dir = lib_dirs.pop(0)
|
||||||
|
|
||||||
|
if lib_dir in cached_paths:
|
||||||
|
continue
|
||||||
|
|
||||||
|
cached_paths.add(lib_dir)
|
||||||
|
|
||||||
|
for path in glob(lib_dir, "*.so*", recursive):
|
||||||
|
if not path.is_file():
|
||||||
|
continue
|
||||||
|
|
||||||
|
# As an optimisation, resolve the symlinks here, as the target is unique
|
||||||
|
# XXX: (layus, 2022-07-25) is this really an optimisation in all cases ?
|
||||||
|
# It could make the rpath bigger or break the fragile precedence of $out.
|
||||||
|
resolved = path.resolve()
|
||||||
|
# Do not use resolved paths when names do not match
|
||||||
|
if resolved.name != path.name:
|
||||||
|
resolved = path
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open_elf(path) as elf:
|
||||||
|
osabi = get_osabi(elf)
|
||||||
|
arch = get_arch(elf)
|
||||||
|
rpath = [Path(p) for p in get_rpath(elf)
|
||||||
|
if p and '$ORIGIN' not in p]
|
||||||
|
lib_dirs += rpath
|
||||||
|
soname_cache[(path.name, arch)].append((resolved.parent, osabi))
|
||||||
|
|
||||||
|
except ELFError:
|
||||||
|
# Not an ELF file in the right format
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def find_dependency(soname: str, soarch: str, soabi: str) -> Optional[Path]:
|
||||||
|
for lib, libabi in soname_cache[(soname, soarch)]:
|
||||||
|
if osabi_are_compatible(soabi, libabi):
|
||||||
|
return lib
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class Dependency:
|
||||||
|
file: Path # The file that contains the dependency
|
||||||
|
name: Path # The name of the dependency
|
||||||
|
found: bool = False # Whether it was found somewhere
|
||||||
|
|
||||||
|
|
||||||
|
def auto_patchelf_file(path: Path, runtime_deps: list[Path], append_rpaths: list[Path] = [], keep_libc: bool = False, extra_args: list[str] = []) -> list[Dependency]:
|
||||||
|
try:
|
||||||
|
with open_elf(path) as elf:
|
||||||
|
|
||||||
|
if is_static_executable(elf):
|
||||||
|
# No point patching these
|
||||||
|
print(f"skipping {path} because it is statically linked")
|
||||||
|
return []
|
||||||
|
|
||||||
|
if elf.num_segments() == 0:
|
||||||
|
# no segment (e.g. object file)
|
||||||
|
print(f"skipping {path} because it contains no segment")
|
||||||
|
return []
|
||||||
|
|
||||||
|
file_arch = get_arch(elf)
|
||||||
|
if interpreter_arch != file_arch:
|
||||||
|
# Our target architecture is different than this file's
|
||||||
|
# architecture, so skip it.
|
||||||
|
print(f"skipping {path} because its architecture ({file_arch})"
|
||||||
|
f" differs from target ({interpreter_arch})")
|
||||||
|
return []
|
||||||
|
|
||||||
|
file_osabi = get_osabi(elf)
|
||||||
|
if not osabi_are_compatible(interpreter_osabi, file_osabi):
|
||||||
|
print(f"skipping {path} because its OS ABI ({file_osabi}) is"
|
||||||
|
f" not compatible with target ({interpreter_osabi})")
|
||||||
|
return []
|
||||||
|
|
||||||
|
file_is_dynamic_executable = is_dynamic_executable(elf)
|
||||||
|
|
||||||
|
file_dependencies = get_dependencies(elf) + get_dlopen_dependencies(elf)
|
||||||
|
|
||||||
|
except ELFError:
|
||||||
|
return []
|
||||||
|
|
||||||
|
rpath = []
|
||||||
|
if file_is_dynamic_executable:
|
||||||
|
print("setting interpreter of", path)
|
||||||
|
subprocess.run(
|
||||||
|
["patchelf", "--set-interpreter", interpreter_path.as_posix(), path.as_posix()] + extra_args,
|
||||||
|
check=True)
|
||||||
|
rpath += runtime_deps
|
||||||
|
|
||||||
|
print("searching for dependencies of", path)
|
||||||
|
dependencies = []
|
||||||
|
# Be sure to get the output of all missing dependencies instead of
|
||||||
|
# failing at the first one, because it's more useful when working
|
||||||
|
# on a new package where you don't yet know the dependencies.
|
||||||
|
for dep in file_dependencies:
|
||||||
|
was_found = False
|
||||||
|
for candidate in dep:
|
||||||
|
|
||||||
|
# This loop determines which candidate for a given
|
||||||
|
# dependency can be found, and how. There may be multiple
|
||||||
|
# candidates for a dep because of '.note.dlopen'
|
||||||
|
# dependencies.
|
||||||
|
#
|
||||||
|
# 1. If a candidate is an absolute path, it is already a
|
||||||
|
# valid dependency if that path exists, and nothing needs
|
||||||
|
# to be done. It should be an error if that path does not exist.
|
||||||
|
# 2. If a candidate is found within libc, it should be dropped
|
||||||
|
# and resolved automatically by the dynamic linker, unless
|
||||||
|
# keep_libc is enabled.
|
||||||
|
# 3. If a candidate is found in our library dependencies, that
|
||||||
|
# dependency should be added to rpath.
|
||||||
|
# 4. If all of the above fail, libc dependencies should still be
|
||||||
|
# considered found. This is in contrast to step 2, because
|
||||||
|
# enabling keep_libc should allow libc to be found in step 3
|
||||||
|
# if possible to preserve its presence in rpath.
|
||||||
|
#
|
||||||
|
# These conditions are checked in this order, because #2
|
||||||
|
# and #3 may both be true. In that case, we still want to
|
||||||
|
# add the dependency to rpath, as the original binary
|
||||||
|
# presumably had it and this should be preserved.
|
||||||
|
|
||||||
|
is_libc = (libc_lib / candidate).is_file()
|
||||||
|
|
||||||
|
if candidate.is_absolute() and candidate.is_file():
|
||||||
|
was_found = True
|
||||||
|
break
|
||||||
|
elif is_libc and not keep_libc:
|
||||||
|
was_found = True
|
||||||
|
break
|
||||||
|
elif found_dependency := find_dependency(candidate.name, file_arch, file_osabi):
|
||||||
|
rpath.append(found_dependency)
|
||||||
|
dependencies.append(Dependency(path, candidate, found=True))
|
||||||
|
print(f" {candidate} -> found: {found_dependency}")
|
||||||
|
was_found = True
|
||||||
|
break
|
||||||
|
elif is_libc and keep_libc:
|
||||||
|
was_found = True
|
||||||
|
break
|
||||||
|
|
||||||
|
if not was_found:
|
||||||
|
dep_name = dep[0] if len(dep) == 1 else f"any({', '.join(map(str, dep))})"
|
||||||
|
dependencies.append(Dependency(path, dep_name, found=False))
|
||||||
|
print(f" {dep_name} -> not found!")
|
||||||
|
|
||||||
|
rpath.extend(append_rpaths)
|
||||||
|
|
||||||
|
# Dedup the rpath
|
||||||
|
rpath_str = ":".join(dict.fromkeys(map(Path.as_posix, rpath)))
|
||||||
|
|
||||||
|
if rpath:
|
||||||
|
print("setting RPATH to:", rpath_str)
|
||||||
|
subprocess.run(
|
||||||
|
["patchelf", "--set-rpath", rpath_str, path.as_posix()] + extra_args,
|
||||||
|
check=True)
|
||||||
|
|
||||||
|
return dependencies
|
||||||
|
|
||||||
|
|
||||||
|
def auto_patchelf(
|
||||||
|
paths_to_patch: list[Path],
|
||||||
|
lib_dirs: list[Path],
|
||||||
|
runtime_deps: list[Path],
|
||||||
|
recursive: bool = True,
|
||||||
|
ignore_missing: list[str] = [],
|
||||||
|
append_rpaths: list[Path] = [],
|
||||||
|
keep_libc: bool = False,
|
||||||
|
extra_args: list[str] = []) -> None:
|
||||||
|
|
||||||
|
if not paths_to_patch:
|
||||||
|
sys.exit("No paths to patch, stopping.")
|
||||||
|
|
||||||
|
# Add all shared objects of the current output path to the cache,
|
||||||
|
# before lib_dirs, so that they are chosen first in find_dependency.
|
||||||
|
populate_cache(paths_to_patch, recursive)
|
||||||
|
populate_cache(lib_dirs)
|
||||||
|
|
||||||
|
dependencies = []
|
||||||
|
for path in chain.from_iterable(glob(p, '*', recursive) for p in paths_to_patch):
|
||||||
|
if not path.is_symlink() and path.is_file():
|
||||||
|
dependencies += auto_patchelf_file(path, runtime_deps, append_rpaths, keep_libc, extra_args)
|
||||||
|
|
||||||
|
missing = [dep for dep in dependencies if not dep.found]
|
||||||
|
|
||||||
|
# Print a summary of the missing dependencies at the end
|
||||||
|
print(f"auto-patchelf: {len(missing)} dependencies could not be satisfied")
|
||||||
|
failure = False
|
||||||
|
for dep in missing:
|
||||||
|
for pattern in ignore_missing:
|
||||||
|
if fnmatch(dep.name.name, pattern):
|
||||||
|
print(f"warn: auto-patchelf ignoring missing {dep.name} wanted by {dep.file}")
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
print(f"error: auto-patchelf could not satisfy dependency {dep.name} wanted by {dep.file}")
|
||||||
|
failure = True
|
||||||
|
|
||||||
|
if failure:
|
||||||
|
sys.exit('auto-patchelf failed to find all the required dependencies.\n'
|
||||||
|
'Add the missing dependencies to --libs or use '
|
||||||
|
'`--ignore-missing="foo.so.1 bar.so etc.so"`.')
|
||||||
|
|
||||||
|
|
||||||
|
def main() -> None:
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
prog="auto-patchelf",
|
||||||
|
description='auto-patchelf tries as hard as possible to patch the'
|
||||||
|
' provided binary files by looking for compatible'
|
||||||
|
'libraries in the provided paths.')
|
||||||
|
parser.add_argument(
|
||||||
|
"--ignore-missing",
|
||||||
|
nargs="*",
|
||||||
|
type=str,
|
||||||
|
help="Do not fail when some dependencies are not found.")
|
||||||
|
parser.add_argument(
|
||||||
|
"--no-recurse",
|
||||||
|
dest="recursive",
|
||||||
|
action="store_false",
|
||||||
|
help="Disable the recursive traversal of paths to patch.")
|
||||||
|
parser.add_argument(
|
||||||
|
"--paths", nargs="*", type=Path,
|
||||||
|
help="Paths whose content needs to be patched."
|
||||||
|
" Single files and directories are accepted."
|
||||||
|
" Directories are traversed recursively by default.")
|
||||||
|
parser.add_argument(
|
||||||
|
"--libs", nargs="*", type=Path,
|
||||||
|
help="Paths where libraries are searched for."
|
||||||
|
" Single files and directories are accepted."
|
||||||
|
" Directories are not searched recursively.")
|
||||||
|
parser.add_argument(
|
||||||
|
"--runtime-dependencies", nargs="*", type=Path,
|
||||||
|
help="Paths to prepend to the runtime path of executable binaries."
|
||||||
|
" Subject to deduplication, which may imply some reordering.")
|
||||||
|
parser.add_argument(
|
||||||
|
"--append-rpaths",
|
||||||
|
nargs="*",
|
||||||
|
type=Path,
|
||||||
|
help="Paths to append to all runtime paths unconditionally",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--keep-libc",
|
||||||
|
dest="keep_libc",
|
||||||
|
action="store_true",
|
||||||
|
help="Attempt to search for and relink libc dependencies.",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--extra-args",
|
||||||
|
# Undocumented Python argparse feature: consume all remaining arguments
|
||||||
|
# as values for this one. This means this argument should always be passed
|
||||||
|
# last.
|
||||||
|
nargs="...",
|
||||||
|
type=str,
|
||||||
|
help="Extra arguments to pass to patchelf. This argument should always come last."
|
||||||
|
)
|
||||||
|
|
||||||
|
print("automatically fixing dependencies for ELF files")
|
||||||
|
args = parser.parse_args()
|
||||||
|
pprint.pprint(vars(args))
|
||||||
|
|
||||||
|
auto_patchelf(
|
||||||
|
args.paths,
|
||||||
|
args.libs,
|
||||||
|
args.runtime_dependencies,
|
||||||
|
args.recursive,
|
||||||
|
args.ignore_missing,
|
||||||
|
append_rpaths=args.append_rpaths,
|
||||||
|
keep_libc=args.keep_libc,
|
||||||
|
extra_args=args.extra_args)
|
||||||
|
|
||||||
|
|
||||||
|
interpreter_path: Path = None # type: ignore
|
||||||
|
interpreter_osabi: str = None # type: ignore
|
||||||
|
interpreter_arch: str = None # type: ignore
|
||||||
|
libc_lib: Path = None # type: ignore
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
nix_support = Path(os.environ['NIX_BINTOOLS']) / 'nix-support'
|
||||||
|
interpreter_path = Path((nix_support / 'dynamic-linker').read_text().strip())
|
||||||
|
libc_lib = Path((nix_support / 'orig-libc').read_text().strip()) / 'lib'
|
||||||
|
|
||||||
|
with open_elf(interpreter_path) as interpreter:
|
||||||
|
interpreter_osabi = get_osabi(interpreter)
|
||||||
|
interpreter_arch = get_arch(interpreter)
|
||||||
|
|
||||||
|
if interpreter_arch and interpreter_osabi and interpreter_path and libc_lib:
|
||||||
|
main()
|
||||||
|
else:
|
||||||
|
sys.exit("Failed to parse dynamic linker (ld) properties.")
|
20
pkgs/buda-prebuilt/vendored/auto-patchelf.sh
Normal file
20
pkgs/buda-prebuilt/vendored/auto-patchelf.sh
Normal file
|
@ -0,0 +1,20 @@
|
||||||
|
# This setup hook calls patchelf to automatically remove unneeded
|
||||||
|
# directories from the RPATH of every library or executable in every
|
||||||
|
# output.
|
||||||
|
|
||||||
|
fixupOutputHooks+=('if [ -z "${dontPatchELF-}" ]; then patchELF "$prefix"; fi')
|
||||||
|
|
||||||
|
patchELF() {
|
||||||
|
local dir="$1"
|
||||||
|
[ -e "$dir" ] || return 0
|
||||||
|
|
||||||
|
echo "shrinking RPATHs of ELF executables and libraries in $dir"
|
||||||
|
|
||||||
|
local i
|
||||||
|
while IFS= read -r -d $'\0' i; do
|
||||||
|
if [[ "$i" =~ .build-id ]]; then continue; fi
|
||||||
|
if ! isELF "$i"; then continue; fi
|
||||||
|
echo "shrinking $i"
|
||||||
|
patchelf --shrink-rpath "$i" || true
|
||||||
|
done < <(find "$dir" -type f -print0)
|
||||||
|
}
|
15
pkgs/buda-prebuilt/vendored/boost/1.74.nix
Normal file
15
pkgs/buda-prebuilt/vendored/boost/1.74.nix
Normal file
|
@ -0,0 +1,15 @@
|
||||||
|
{ callPackage, fetchurl, fetchpatch, ... } @ args:
|
||||||
|
|
||||||
|
callPackage ./generic.nix (args // rec {
|
||||||
|
version = "1.74.0";
|
||||||
|
|
||||||
|
src = fetchurl {
|
||||||
|
urls = [
|
||||||
|
"mirror://sourceforge/boost/boost_${builtins.replaceStrings ["."] ["_"] version}.tar.bz2"
|
||||||
|
"https://boostorg.jfrog.io/artifactory/main/release/${version}/source/boost_${builtins.replaceStrings ["."] ["_"] version}.tar.bz2"
|
||||||
|
];
|
||||||
|
# SHA256 from http://www.boost.org/users/history/version_1_74_0.html
|
||||||
|
sha256 = "83bfc1507731a0906e387fc28b7ef5417d591429e51e788417fe9ff025e116b1";
|
||||||
|
};
|
||||||
|
})
|
||||||
|
|
22
pkgs/buda-prebuilt/vendored/boost/cmake-paths-173.patch
Normal file
22
pkgs/buda-prebuilt/vendored/boost/cmake-paths-173.patch
Normal file
|
@ -0,0 +1,22 @@
|
||||||
|
diff --git a/tools/boost_install/boost-install.jam b/tools/boost_install/boost-install.jam
|
||||||
|
index 4238f921e..8fc1ea269 100644
|
||||||
|
--- a/tools/boost_install/boost-install.jam
|
||||||
|
+++ b/tools/boost_install/boost-install.jam
|
||||||
|
@@ -649,7 +649,7 @@ rule generate-cmake-config- ( target : sources * : properties * )
|
||||||
|
""
|
||||||
|
"# Compute the include and library directories relative to this file."
|
||||||
|
""
|
||||||
|
- "get_filename_component(_BOOST_CMAKEDIR \"${CMAKE_CURRENT_LIST_DIR}/../\" REALPATH)"
|
||||||
|
+ "get_filename_component(_BOOST_REAL_CMAKEDIR \"${CMAKE_CURRENT_LIST_DIR}/../\" REALPATH)"
|
||||||
|
: true ;
|
||||||
|
|
||||||
|
if [ path.is-rooted $(cmakedir) ]
|
||||||
|
@@ -668,6 +668,8 @@ rule generate-cmake-config- ( target : sources * : properties * )
|
||||||
|
" unset(_BOOST_CMAKEDIR_ORIGINAL)"
|
||||||
|
"endif()"
|
||||||
|
""
|
||||||
|
+ "# Assume that the installer actually did know where the libs were to be installed"
|
||||||
|
+ "get_filename_component(_BOOST_CMAKEDIR \"$(cmakedir-native)\" REALPATH)"
|
||||||
|
: true ;
|
||||||
|
}
|
||||||
|
|
21
pkgs/buda-prebuilt/vendored/boost/cmake-paths.patch
Normal file
21
pkgs/buda-prebuilt/vendored/boost/cmake-paths.patch
Normal file
|
@ -0,0 +1,21 @@
|
||||||
|
diff --git a/tools/boost_install/boost-install.jam b/tools/boost_install/boost-install.jam
|
||||||
|
index ad19f7b55..ec6bf57ff 100644
|
||||||
|
--- a/tools/boost_install/boost-install.jam
|
||||||
|
+++ b/tools/boost_install/boost-install.jam
|
||||||
|
@@ -587,6 +587,7 @@ rule generate-cmake-config- ( target : sources * : properties * )
|
||||||
|
"# Compute the include and library directories relative to this file."
|
||||||
|
""
|
||||||
|
"get_filename_component(_BOOST_CMAKEDIR \"${CMAKE_CURRENT_LIST_DIR}/../\" ABSOLUTE)"
|
||||||
|
+ "get_filename_component(_BOOST_REAL_CMAKEDIR \"${CMAKE_CURRENT_LIST_DIR}/../\" ABSOLUTE)"
|
||||||
|
: true ;
|
||||||
|
|
||||||
|
if [ path.is-rooted $(cmakedir) ]
|
||||||
|
@@ -607,6 +608,8 @@ rule generate-cmake-config- ( target : sources * : properties * )
|
||||||
|
" unset(_BOOST_CMAKEDIR_ORIGINAL)"
|
||||||
|
"endif()"
|
||||||
|
""
|
||||||
|
+ "# Assume that the installer actually did know where the libs were to be installed"
|
||||||
|
+ "get_filename_component(_BOOST_CMAKEDIR \"$(cmakedir-native)\" REALPATH)"
|
||||||
|
: true ;
|
||||||
|
}
|
||||||
|
|
|
@ -0,0 +1,45 @@
|
||||||
|
diff --git a/tools/build/src/tools/python.jam b/tools/build/src/tools/python.jam
|
||||||
|
index 273b28a..2d2031e 100644
|
||||||
|
--- a/tools/build/src/tools/python.jam
|
||||||
|
+++ b/tools/build/src/tools/python.jam
|
||||||
|
@@ -428,13 +428,7 @@ local rule windows-installed-pythons ( version ? )
|
||||||
|
|
||||||
|
local rule darwin-installed-pythons ( version ? )
|
||||||
|
{
|
||||||
|
- version ?= $(.version-countdown) ;
|
||||||
|
-
|
||||||
|
- local prefix
|
||||||
|
- = [ GLOB /System/Library/Frameworks /Library/Frameworks
|
||||||
|
- : Python.framework ] ;
|
||||||
|
-
|
||||||
|
- return $(prefix)/Versions/$(version)/bin/python ;
|
||||||
|
+ return ;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@@ -890,25 +884,6 @@ local rule configure ( version ? : cmd-or-prefix ? : includes * : libraries ? :
|
||||||
|
|
||||||
|
# See if we can find a framework directory on darwin.
|
||||||
|
local framework-directory ;
|
||||||
|
- if $(target-os) = darwin
|
||||||
|
- {
|
||||||
|
- # Search upward for the framework directory.
|
||||||
|
- local framework-directory = $(libraries[-1]) ;
|
||||||
|
- while $(framework-directory:D=) && $(framework-directory:D=) != Python.framework
|
||||||
|
- {
|
||||||
|
- framework-directory = $(framework-directory:D) ;
|
||||||
|
- }
|
||||||
|
-
|
||||||
|
- if $(framework-directory:D=) = Python.framework
|
||||||
|
- {
|
||||||
|
- debug-message framework directory is \"$(framework-directory)\" ;
|
||||||
|
- }
|
||||||
|
- else
|
||||||
|
- {
|
||||||
|
- debug-message "no framework directory found; using library path" ;
|
||||||
|
- framework-directory = ;
|
||||||
|
- }
|
||||||
|
- }
|
||||||
|
|
||||||
|
local dll-path = $(libraries) ;
|
||||||
|
|
20
pkgs/buda-prebuilt/vendored/boost/default.nix
Normal file
20
pkgs/buda-prebuilt/vendored/boost/default.nix
Normal file
|
@ -0,0 +1,20 @@
|
||||||
|
{ lib
|
||||||
|
, callPackage
|
||||||
|
, boost-build
|
||||||
|
, fetchurl
|
||||||
|
}:
|
||||||
|
|
||||||
|
let
|
||||||
|
makeBoost = file:
|
||||||
|
lib.fix (self:
|
||||||
|
callPackage file {
|
||||||
|
boost-build = boost-build.override {
|
||||||
|
# useBoost allows us passing in src and version from
|
||||||
|
# the derivation we are building to get a matching b2 version.
|
||||||
|
useBoost = self;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
);
|
||||||
|
in {
|
||||||
|
boost174 = makeBoost ./1.74.nix;
|
||||||
|
}
|
249
pkgs/buda-prebuilt/vendored/boost/generic.nix
Normal file
249
pkgs/buda-prebuilt/vendored/boost/generic.nix
Normal file
|
@ -0,0 +1,249 @@
|
||||||
|
{ lib, stdenv, icu, expat, zlib, bzip2, zstd, xz, python ? null, fixDarwinDylibNames, libiconv, libxcrypt
|
||||||
|
, boost-build
|
||||||
|
, fetchpatch
|
||||||
|
, which
|
||||||
|
, toolset ? /**/ if stdenv.cc.isClang then "clang"
|
||||||
|
else if stdenv.cc.isGNU then "gcc"
|
||||||
|
else null
|
||||||
|
, enableRelease ? true
|
||||||
|
, enableDebug ? false
|
||||||
|
, enableSingleThreaded ? false
|
||||||
|
, enableMultiThreaded ? true
|
||||||
|
, enableShared ? !(with stdenv.hostPlatform; isStatic || libc == "msvcrt") # problems for now
|
||||||
|
, enableStatic ? !enableShared
|
||||||
|
, enablePython ? false
|
||||||
|
, enableNumpy ? false
|
||||||
|
, enableIcu ? stdenv.hostPlatform == stdenv.buildPlatform
|
||||||
|
, taggedLayout ? ((enableRelease && enableDebug) || (enableSingleThreaded && enableMultiThreaded) || (enableShared && enableStatic))
|
||||||
|
, patches ? []
|
||||||
|
, boostBuildPatches ? []
|
||||||
|
, useMpi ? false
|
||||||
|
, mpi
|
||||||
|
, extraB2Args ? []
|
||||||
|
|
||||||
|
# Attributes inherit from specific versions
|
||||||
|
, version, src
|
||||||
|
, ...
|
||||||
|
}:
|
||||||
|
|
||||||
|
# We must build at least one type of libraries
|
||||||
|
assert enableShared || enableStatic;
|
||||||
|
|
||||||
|
assert enableNumpy -> enablePython;
|
||||||
|
|
||||||
|
# Boost <1.69 can't be built on linux with clang >8, because pth was removed
|
||||||
|
assert with lib; (stdenv.isLinux && toolset == "clang" && versionAtLeast stdenv.cc.version "8.0.0") -> versionAtLeast version "1.69";
|
||||||
|
|
||||||
|
let
|
||||||
|
|
||||||
|
variant = lib.concatStringsSep ","
|
||||||
|
(lib.optional enableRelease "release" ++
|
||||||
|
lib.optional enableDebug "debug");
|
||||||
|
|
||||||
|
threading = lib.concatStringsSep ","
|
||||||
|
(lib.optional enableSingleThreaded "single" ++
|
||||||
|
lib.optional enableMultiThreaded "multi");
|
||||||
|
|
||||||
|
link = lib.concatStringsSep ","
|
||||||
|
(lib.optional enableShared "shared" ++
|
||||||
|
lib.optional enableStatic "static");
|
||||||
|
|
||||||
|
runtime-link = if enableShared then "shared" else "static";
|
||||||
|
|
||||||
|
# To avoid library name collisions
|
||||||
|
layout = if taggedLayout then "tagged" else "system";
|
||||||
|
|
||||||
|
needUserConfig = stdenv.hostPlatform != stdenv.buildPlatform || useMpi || (stdenv.isDarwin && enableShared);
|
||||||
|
|
||||||
|
b2Args = lib.concatStringsSep " " ([
|
||||||
|
"--includedir=$dev/include"
|
||||||
|
"--libdir=$out/lib"
|
||||||
|
"-j$NIX_BUILD_CORES"
|
||||||
|
"--layout=${layout}"
|
||||||
|
"variant=${variant}"
|
||||||
|
"threading=${threading}"
|
||||||
|
"link=${link}"
|
||||||
|
"-sEXPAT_INCLUDE=${expat.dev}/include"
|
||||||
|
"-sEXPAT_LIBPATH=${expat.out}/lib"
|
||||||
|
|
||||||
|
# TODO: make this unconditional
|
||||||
|
] ++ lib.optionals (stdenv.hostPlatform != stdenv.buildPlatform ||
|
||||||
|
# required on mips; see 61d9f201baeef4c4bb91ad8a8f5f89b747e0dfe4
|
||||||
|
(stdenv.hostPlatform.isMips && lib.versionAtLeast version "1.79")) [
|
||||||
|
"address-model=${toString stdenv.hostPlatform.parsed.cpu.bits}"
|
||||||
|
"architecture=${if stdenv.hostPlatform.isMips64
|
||||||
|
then if lib.versionOlder version "1.78" then "mips1" else "mips"
|
||||||
|
else if stdenv.hostPlatform.parsed.cpu.name == "s390x" then "s390x"
|
||||||
|
else toString stdenv.hostPlatform.parsed.cpu.family}"
|
||||||
|
"binary-format=${toString stdenv.hostPlatform.parsed.kernel.execFormat.name}"
|
||||||
|
"target-os=${toString stdenv.hostPlatform.parsed.kernel.name}"
|
||||||
|
|
||||||
|
# adapted from table in boost manual
|
||||||
|
# https://www.boost.org/doc/libs/1_66_0/libs/context/doc/html/context/architectures.html
|
||||||
|
"abi=${if stdenv.hostPlatform.parsed.cpu.family == "arm" then "aapcs"
|
||||||
|
else if stdenv.hostPlatform.isWindows then "ms"
|
||||||
|
else if stdenv.hostPlatform.isMips32 then "o32"
|
||||||
|
else if stdenv.hostPlatform.isMips64n64 then "n64"
|
||||||
|
else "sysv"}"
|
||||||
|
] ++ lib.optional (link != "static") "runtime-link=${runtime-link}"
|
||||||
|
++ lib.optional (variant == "release") "debug-symbols=off"
|
||||||
|
++ lib.optional (toolset != null) "toolset=${toolset}"
|
||||||
|
++ lib.optional (!enablePython) "--without-python"
|
||||||
|
++ lib.optional needUserConfig "--user-config=user-config.jam"
|
||||||
|
++ lib.optional (stdenv.buildPlatform.isDarwin && stdenv.hostPlatform.isLinux) "pch=off"
|
||||||
|
++ lib.optionals (stdenv.hostPlatform.libc == "msvcrt") [
|
||||||
|
"threadapi=win32"
|
||||||
|
] ++ extraB2Args
|
||||||
|
);
|
||||||
|
|
||||||
|
in
|
||||||
|
|
||||||
|
stdenv.mkDerivation {
|
||||||
|
pname = "boost";
|
||||||
|
|
||||||
|
inherit src version;
|
||||||
|
|
||||||
|
patchFlags = [];
|
||||||
|
|
||||||
|
patches = patches
|
||||||
|
++ lib.optional stdenv.isDarwin ./darwin-no-system-python.patch
|
||||||
|
# Fix boost-context segmentation faults on ppc64 due to ABI violation
|
||||||
|
++ lib.optional (lib.versionOlder version "1.71") (fetchpatch {
|
||||||
|
url = "https://github.com/boostorg/context/commit/2354eca9b776a6739112833f64754108cc0d1dc5.patch";
|
||||||
|
sha256 = "067m4bjpmcanqvg28djax9a10avmdwhlpfx6gn73kbqqq70dnz29";
|
||||||
|
stripLen = 1;
|
||||||
|
extraPrefix = "libs/context/";
|
||||||
|
})
|
||||||
|
++ lib.optional (lib.versionOlder version "1.70") (fetchpatch {
|
||||||
|
# support for Mips64n64 appeared in boost-context 1.70
|
||||||
|
url = "https://github.com/boostorg/context/commit/e3f744a1862164062d579d1972272d67bdaa9c39.patch";
|
||||||
|
sha256 = "sha256-qjQy1b4jDsIRrI+UYtcguhvChrMbGWO0UlEzEJHYzRI=";
|
||||||
|
stripLen = 1;
|
||||||
|
extraPrefix = "libs/context/";
|
||||||
|
})
|
||||||
|
++ lib.optional (lib.versionAtLeast version "1.70" && lib.versionOlder version "1.73") ./cmake-paths.patch
|
||||||
|
++ lib.optional (lib.versionAtLeast version "1.73") ./cmake-paths-173.patch
|
||||||
|
++ lib.optional (version == "1.77.0") (fetchpatch {
|
||||||
|
url = "https://github.com/boostorg/math/commit/7d482f6ebc356e6ec455ccb5f51a23971bf6ce5b.patch";
|
||||||
|
relative = "include";
|
||||||
|
sha256 = "sha256-KlmIbixcds6GyKYt1fx5BxDIrU7msrgDdYo9Va/KJR4=";
|
||||||
|
});
|
||||||
|
|
||||||
|
meta = with lib; {
|
||||||
|
homepage = "http://boost.org/";
|
||||||
|
description = "Collection of C++ libraries";
|
||||||
|
license = licenses.boost;
|
||||||
|
platforms = platforms.unix ++ platforms.windows;
|
||||||
|
badPlatforms = optionals (versionOlder version "1.73") platforms.riscv;
|
||||||
|
maintainers = with maintainers; [ hjones2199 ];
|
||||||
|
|
||||||
|
broken =
|
||||||
|
# boost-context lacks support for the N32 ABI on mips64. The build
|
||||||
|
# will succeed, but packages depending on boost-context will fail with
|
||||||
|
# a very cryptic error message.
|
||||||
|
stdenv.hostPlatform.isMips64n32;
|
||||||
|
};
|
||||||
|
|
||||||
|
passthru = {
|
||||||
|
inherit boostBuildPatches;
|
||||||
|
};
|
||||||
|
|
||||||
|
preConfigure = lib.optionalString useMpi ''
|
||||||
|
cat << EOF >> user-config.jam
|
||||||
|
using mpi : ${mpi}/bin/mpiCC ;
|
||||||
|
EOF
|
||||||
|
''
|
||||||
|
# On darwin we need to add the `$out/lib` to the libraries' rpath explicitly,
|
||||||
|
# otherwise the dynamic linker is unable to resolve the reference to @rpath
|
||||||
|
# when the boost libraries want to load each other at runtime.
|
||||||
|
+ lib.optionalString (stdenv.isDarwin && enableShared) ''
|
||||||
|
cat << EOF >> user-config.jam
|
||||||
|
using clang-darwin : : ${stdenv.cc.targetPrefix}c++
|
||||||
|
: <linkflags>"-rpath $out/lib/"
|
||||||
|
;
|
||||||
|
EOF
|
||||||
|
''
|
||||||
|
# b2 has trouble finding the correct compiler and tools for cross compilation
|
||||||
|
# since it apparently ignores $CC, $AR etc. Thus we need to set everything
|
||||||
|
# in user-config.jam. To keep things simple we just set everything in an
|
||||||
|
# uniform way for clang and gcc (which works thanks to our cc-wrapper).
|
||||||
|
# We pass toolset later which will make b2 invoke everything in the right
|
||||||
|
# way -- the other toolset in user-config.jam will be ignored.
|
||||||
|
+ lib.optionalString (stdenv.hostPlatform != stdenv.buildPlatform) ''
|
||||||
|
cat << EOF >> user-config.jam
|
||||||
|
using gcc : cross : ${stdenv.cc.targetPrefix}c++
|
||||||
|
: <archiver>$AR
|
||||||
|
<ranlib>$RANLIB
|
||||||
|
;
|
||||||
|
|
||||||
|
using clang : cross : ${stdenv.cc.targetPrefix}c++
|
||||||
|
: <archiver>$AR
|
||||||
|
<ranlib>$RANLIB
|
||||||
|
;
|
||||||
|
EOF
|
||||||
|
''
|
||||||
|
# b2 needs to be explicitly told how to find Python when cross-compiling
|
||||||
|
+ lib.optionalString enablePython ''
|
||||||
|
cat << EOF >> user-config.jam
|
||||||
|
using python : : ${python.interpreter}
|
||||||
|
: ${python}/include/python${python.pythonVersion}
|
||||||
|
: ${python}/lib
|
||||||
|
;
|
||||||
|
EOF
|
||||||
|
'';
|
||||||
|
|
||||||
|
NIX_CFLAGS_LINK = lib.optionalString stdenv.isDarwin
|
||||||
|
"-headerpad_max_install_names";
|
||||||
|
|
||||||
|
enableParallelBuilding = true;
|
||||||
|
|
||||||
|
nativeBuildInputs = [ which boost-build ]
|
||||||
|
++ lib.optional stdenv.hostPlatform.isDarwin fixDarwinDylibNames;
|
||||||
|
buildInputs = [ expat zlib bzip2 libiconv ]
|
||||||
|
++ lib.optional (lib.versionAtLeast version "1.69") zstd
|
||||||
|
++ [ xz ]
|
||||||
|
++ lib.optional enableIcu icu
|
||||||
|
++ lib.optionals enablePython [ libxcrypt python ]
|
||||||
|
++ lib.optional enableNumpy python.pkgs.numpy;
|
||||||
|
|
||||||
|
configureScript = "./bootstrap.sh";
|
||||||
|
configurePlatforms = [];
|
||||||
|
dontDisableStatic = true;
|
||||||
|
dontAddStaticConfigureFlags = true;
|
||||||
|
configureFlags = [
|
||||||
|
"--includedir=$(dev)/include"
|
||||||
|
"--libdir=$(out)/lib"
|
||||||
|
"--with-bjam=b2" # prevent bootstrapping b2 in configurePhase
|
||||||
|
] ++ lib.optional (toolset != null) "--with-toolset=${toolset}"
|
||||||
|
++ [ (if enableIcu then "--with-icu=${icu.dev}" else "--without-icu") ];
|
||||||
|
|
||||||
|
buildPhase = ''
|
||||||
|
runHook preBuild
|
||||||
|
b2 ${b2Args}
|
||||||
|
runHook postBuild
|
||||||
|
'';
|
||||||
|
|
||||||
|
installPhase = ''
|
||||||
|
runHook preInstall
|
||||||
|
|
||||||
|
# boostbook is needed by some applications
|
||||||
|
mkdir -p $dev/share/boostbook
|
||||||
|
cp -a tools/boostbook/{xsl,dtd} $dev/share/boostbook/
|
||||||
|
|
||||||
|
# Let boost install everything else
|
||||||
|
b2 ${b2Args} install
|
||||||
|
|
||||||
|
runHook postInstall
|
||||||
|
'';
|
||||||
|
|
||||||
|
postFixup = ''
|
||||||
|
# Make boost header paths relative so that they are not runtime dependencies
|
||||||
|
cd "$dev" && find include \( -name '*.hpp' -or -name '*.h' -or -name '*.ipp' \) \
|
||||||
|
-exec sed '1s/^\xef\xbb\xbf//;1i#line 1 "{}"' -i '{}' \;
|
||||||
|
'' + lib.optionalString (stdenv.hostPlatform.libc == "msvcrt") ''
|
||||||
|
$RANLIB "$out/lib/"*.a
|
||||||
|
'';
|
||||||
|
|
||||||
|
outputs = [ "out" "dev" ];
|
||||||
|
setOutputFlags = false;
|
||||||
|
}
|
|
@ -0,0 +1,15 @@
|
||||||
|
Taken from https://github.com/conan-io/conan-center-index/pull/361/files
|
||||||
|
|
||||||
|
diff --git a/include/boost/thread/pthread/thread_data.hpp b/include/boost/thread/pthread/thread_data.hpp
|
||||||
|
index aefbeb4..bc9b136 100644
|
||||||
|
--- a/boost/thread/pthread/thread_data.hpp
|
||||||
|
+++ b/boost/thread/pthread/thread_data.hpp
|
||||||
|
@@ -57,7 +57,7 @@ namespace boost
|
||||||
|
#else
|
||||||
|
std::size_t page_size = ::sysconf( _SC_PAGESIZE);
|
||||||
|
#endif
|
||||||
|
-#if PTHREAD_STACK_MIN > 0
|
||||||
|
+#ifdef PTHREAD_STACK_MIN
|
||||||
|
if (size<PTHREAD_STACK_MIN) size=PTHREAD_STACK_MIN;
|
||||||
|
#endif
|
||||||
|
size = ((size+page_size-1)/page_size)*page_size;
|
24
pkgs/buda-prebuilt/vendored/libyaml-cpp.nix
Normal file
24
pkgs/buda-prebuilt/vendored/libyaml-cpp.nix
Normal file
|
@ -0,0 +1,24 @@
|
||||||
|
{ lib, stdenv, fetchFromGitHub, cmake }:
|
||||||
|
|
||||||
|
stdenv.mkDerivation rec {
|
||||||
|
pname = "libyaml-cpp";
|
||||||
|
version = "0.6.2";
|
||||||
|
|
||||||
|
src = fetchFromGitHub {
|
||||||
|
owner = "jbeder";
|
||||||
|
repo = "yaml-cpp";
|
||||||
|
rev = "yaml-cpp-${version}";
|
||||||
|
sha256 = "16lclpa487yghf9019wymj419wkyx4795wv9q7539hhimajw9kpb";
|
||||||
|
};
|
||||||
|
|
||||||
|
# implement https://github.com/jbeder/yaml-cpp/commit/52a1378e48e15d42a0b755af7146394c6eff998c
|
||||||
|
postPatch = ''
|
||||||
|
substituteInPlace CMakeLists.txt \
|
||||||
|
--replace 'option(YAML_BUILD_SHARED_LIBS "Build Shared Libraries" OFF)' \
|
||||||
|
'option(YAML_BUILD_SHARED_LIBS "Build yaml-cpp shared library" ''${BUILD_SHARED_LIBS})'
|
||||||
|
'';
|
||||||
|
|
||||||
|
nativeBuildInputs = [ cmake ];
|
||||||
|
|
||||||
|
cmakeFlags = [ "-DBUILD_SHARED_LIBS=ON" "-DYAML_CPP_BUILD_TESTS=OFF" ];
|
||||||
|
}
|
43
pkgs/buda-prebuilt/vendored/nuke-references.nix
Normal file
43
pkgs/buda-prebuilt/vendored/nuke-references.nix
Normal file
|
@ -0,0 +1,43 @@
|
||||||
|
# The program `nuke-refs' created by this derivation replaces all
|
||||||
|
# references to the Nix store in the specified files by a non-existant
|
||||||
|
# path (/nix/store/eeee...). This is useful for getting rid of
|
||||||
|
# dependencies that you know are not actually needed at runtime.
|
||||||
|
|
||||||
|
{ lib, stdenvNoCC, perl, signingUtils, shell ? stdenvNoCC.shell }:
|
||||||
|
|
||||||
|
let
|
||||||
|
stdenv = stdenvNoCC;
|
||||||
|
|
||||||
|
darwinCodeSign = stdenv.targetPlatform.isDarwin && stdenv.targetPlatform.isAarch64;
|
||||||
|
in
|
||||||
|
|
||||||
|
stdenvNoCC.mkDerivation {
|
||||||
|
name = "nuke-references";
|
||||||
|
|
||||||
|
strictDeps = true;
|
||||||
|
enableParallelBuilding = true;
|
||||||
|
dontUnpack = true;
|
||||||
|
dontConfigure = true;
|
||||||
|
dontBuild = true;
|
||||||
|
|
||||||
|
installPhase = ''
|
||||||
|
mkdir -p $out/bin
|
||||||
|
substituteAll ${./nuke-refs.sh} $out/bin/nuke-refs
|
||||||
|
chmod a+x $out/bin/nuke-refs
|
||||||
|
'';
|
||||||
|
|
||||||
|
postFixup = lib.optionalString darwinCodeSign ''
|
||||||
|
mkdir -p $out/nix-support
|
||||||
|
substituteAll ${./darwin-sign-fixup.sh} $out/nix-support/setup-hooks.sh
|
||||||
|
'';
|
||||||
|
|
||||||
|
# FIXME: get rid of perl dependency.
|
||||||
|
env = {
|
||||||
|
inherit perl;
|
||||||
|
inherit (builtins) storeDir;
|
||||||
|
shell = lib.getBin shell + (shell.shellPath or "");
|
||||||
|
signingUtils = lib.optionalString darwinCodeSign signingUtils;
|
||||||
|
};
|
||||||
|
|
||||||
|
meta.mainProgram = "nuke-refs";
|
||||||
|
}
|
34
pkgs/buda-prebuilt/vendored/nuke-refs.sh
Normal file
34
pkgs/buda-prebuilt/vendored/nuke-refs.sh
Normal file
|
@ -0,0 +1,34 @@
|
||||||
|
#! @shell@
|
||||||
|
|
||||||
|
fixupHooks=()
|
||||||
|
|
||||||
|
if [ -e @out@/nix-support/setup-hooks.sh ]; then
|
||||||
|
source @out@/nix-support/setup-hooks.sh
|
||||||
|
fi
|
||||||
|
|
||||||
|
excludes=""
|
||||||
|
while getopts e: o; do
|
||||||
|
case "$o" in
|
||||||
|
e) storeId=$(echo "$OPTARG" | @perl@/bin/perl -ne "print \"\$1\" if m|^\Q@storeDir@\E/([a-z0-9]{32})-.*|")
|
||||||
|
if [ -z "$storeId" ]; then
|
||||||
|
echo "-e argument must be a Nix store path"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
excludes="$excludes(?!$storeId)"
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
shift $(($OPTIND-1))
|
||||||
|
|
||||||
|
for i in "$@"; do
|
||||||
|
if test ! -L "$i" -a -f "$i"; then
|
||||||
|
cat "$i" | @perl@/bin/perl -pe "s|\Q@storeDir@\E/$excludes[a-z0-9]{32}-|@storeDir@/eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee-|g" > "$i.tmp"
|
||||||
|
if test -x "$i"; then chmod +x "$i.tmp"; fi
|
||||||
|
cmp -s "$i" "$i.tmp" || echo "Changed file: \"$i\""
|
||||||
|
mv "$i.tmp" "$i"
|
||||||
|
|
||||||
|
for hook in "${fixupHooks[@]}"; do
|
||||||
|
eval "$hook" "$i"
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
done
|
24
pkgs/buda-prebuilt/vendored/pip-install-hook.sh
Normal file
24
pkgs/buda-prebuilt/vendored/pip-install-hook.sh
Normal file
|
@ -0,0 +1,24 @@
|
||||||
|
# Setup hook for pip.
|
||||||
|
echo "Sourcing pip-install-hook"
|
||||||
|
|
||||||
|
declare -a pipInstallFlags
|
||||||
|
|
||||||
|
pipInstallPhase() {
|
||||||
|
echo "Executing pipInstallPhase"
|
||||||
|
runHook preInstall
|
||||||
|
|
||||||
|
mkdir -p "$out/@pythonSitePackages@"
|
||||||
|
export PYTHONPATH="$out/@pythonSitePackages@:$PYTHONPATH"
|
||||||
|
|
||||||
|
pushd dist || return 1
|
||||||
|
@pythonInterpreter@ -m pip install ./*.whl --no-warn-script-location --prefix="$out" --no-cache $pipInstallFlags
|
||||||
|
popd || return 1
|
||||||
|
|
||||||
|
runHook postInstall
|
||||||
|
echo "Finished executing pipInstallPhase"
|
||||||
|
}
|
||||||
|
|
||||||
|
if [ -z "${dontUsePipInstall-}" ] && [ -z "${installPhase-}" ]; then
|
||||||
|
echo "Using pipInstallPhase"
|
||||||
|
installPhase=pipInstallPhase
|
||||||
|
fi
|
58
pkgs/buda/default.nix
Normal file
58
pkgs/buda/default.nix
Normal file
|
@ -0,0 +1,58 @@
|
||||||
|
{
|
||||||
|
lib,
|
||||||
|
python3Packages,
|
||||||
|
fetchFromGitHub,
|
||||||
|
git,
|
||||||
|
cmake,
|
||||||
|
}:
|
||||||
|
|
||||||
|
python3Packages.buildPythonApplication rec {
|
||||||
|
pname = "tt-buda";
|
||||||
|
version = "0.19.3";
|
||||||
|
pyproject = true;
|
||||||
|
|
||||||
|
src = fetchFromGitHub {
|
||||||
|
owner = "tenstorrent";
|
||||||
|
repo = "tt-buda";
|
||||||
|
rev = "v${version}";
|
||||||
|
#hash = "sha256-g5eB2roVh4t4fhM+t2QYm+3NXYM94hbwstWES2sL6hA=";
|
||||||
|
hash = "sha256-a+yamtu93AypLAXa9cj3yQ1AcizEBbmDd8fa2RNjGcQ=";
|
||||||
|
fetchSubmodules = true;
|
||||||
|
leaveDotGit = true;
|
||||||
|
};
|
||||||
|
|
||||||
|
build-system = [
|
||||||
|
python3Packages.setuptools
|
||||||
|
python3Packages.wheel
|
||||||
|
];
|
||||||
|
|
||||||
|
nativeBuildInputs = [
|
||||||
|
git
|
||||||
|
cmake
|
||||||
|
] ++ python3Packages.pybind11.propagatedNativeBuildInputs;
|
||||||
|
|
||||||
|
buildInputs = [
|
||||||
|
python3Packages.python # pybind python.h
|
||||||
|
];
|
||||||
|
|
||||||
|
postPatch = ''
|
||||||
|
substituteInPlace compile_flags.txt third_party/budabackend/compile_flags.txt \
|
||||||
|
--replace-fail "-I/usr/include/python3.8" "-I/usr/include/python3.8"
|
||||||
|
'';
|
||||||
|
|
||||||
|
dontUseCmakeConfigure = true;
|
||||||
|
|
||||||
|
pythonImportsCheck = [
|
||||||
|
"tt_buda"
|
||||||
|
];
|
||||||
|
|
||||||
|
BACKEND_ARCH_NAME = "wormhole_b0";
|
||||||
|
ARCH_NAME = "wormhole_b0";
|
||||||
|
|
||||||
|
meta = {
|
||||||
|
description = "Tenstorrent TT-BUDA Repository";
|
||||||
|
homepage = "https://github.com/tenstorrent/tt-buda";
|
||||||
|
license = lib.licenses.asl20;
|
||||||
|
mainProgram = "tt-buda";
|
||||||
|
};
|
||||||
|
}
|
Loading…
Reference in a new issue