Compare commits
21 commits
try-packag
...
master
Author | SHA1 | Date | |
---|---|---|---|
![]() |
65d7bef16d | ||
![]() |
2245c9c5ea | ||
![]() |
f18aa5fee2 | ||
![]() |
31f0cee50d | ||
![]() |
ba96c32479 | ||
![]() |
24410845a0 | ||
![]() |
8412c8bf22 | ||
![]() |
c1b3fac31c | ||
![]() |
fa8e1be058 | ||
![]() |
9abd0fb916 | ||
![]() |
8f850c7c62 | ||
![]() |
66bf540c3e | ||
![]() |
e5c7215b90 | ||
![]() |
441f4dc464 | ||
![]() |
1ae7313fad | ||
![]() |
be924e4600 | ||
![]() |
31b3aa6fdc | ||
![]() |
d937e5c7fa | ||
![]() |
29dc8c01e2 | ||
![]() |
bfee5bc0ff | ||
![]() |
c280cd3a8f |
|
@ -57,3 +57,8 @@ Testing /dev/tenstorrent/1 @ 0000:76:00.0
|
||||||
```
|
```
|
||||||
|
|
||||||
As far as I can tell, these failures are exercised by the tests, and a clean `stdout` means there is no issue.
|
As far as I can tell, these failures are exercised by the tests, and a clean `stdout` means there is no issue.
|
||||||
|
|
||||||
|
|
||||||
|
# TODO
|
||||||
|
|
||||||
|
- [ ] Better update system
|
||||||
|
|
|
@ -1,68 +0,0 @@
|
||||||
{ dockerTools
|
|
||||||
, bashInteractive
|
|
||||||
, cacert
|
|
||||||
, coreutils
|
|
||||||
, curl
|
|
||||||
, gitReallyMinimal
|
|
||||||
, gnutar
|
|
||||||
, gzip
|
|
||||||
, iana-etc
|
|
||||||
, nix
|
|
||||||
, openssh
|
|
||||||
, xz
|
|
||||||
, fromImage
|
|
||||||
, extraContents ? [ ]
|
|
||||||
, extraEnv ? [ ]
|
|
||||||
}:
|
|
||||||
let
|
|
||||||
image = dockerTools.buildImageWithNixDb {
|
|
||||||
inherit (nix) name;
|
|
||||||
inherit fromImage;
|
|
||||||
|
|
||||||
contents = [
|
|
||||||
./root
|
|
||||||
coreutils
|
|
||||||
# add /bin/sh
|
|
||||||
bashInteractive
|
|
||||||
nix
|
|
||||||
|
|
||||||
# runtime dependencies of nix
|
|
||||||
cacert
|
|
||||||
gitReallyMinimal
|
|
||||||
gnutar
|
|
||||||
gzip
|
|
||||||
openssh
|
|
||||||
xz
|
|
||||||
|
|
||||||
# for haskell binaries
|
|
||||||
iana-etc
|
|
||||||
] ++ extraContents;
|
|
||||||
|
|
||||||
extraCommands = ''
|
|
||||||
# for /usr/bin/env
|
|
||||||
mkdir usr
|
|
||||||
ln -s ../bin usr/bin
|
|
||||||
|
|
||||||
# make sure /tmp exists
|
|
||||||
mkdir -m 1777 tmp
|
|
||||||
|
|
||||||
# need a HOME
|
|
||||||
mkdir -vp root
|
|
||||||
'';
|
|
||||||
|
|
||||||
config = {
|
|
||||||
Cmd = [ "/bin/bash" ];
|
|
||||||
Env = [
|
|
||||||
"ENV=/etc/profile.d/nix.sh"
|
|
||||||
"BASH_ENV=/etc/profile.d/nix.sh"
|
|
||||||
"NIX_BUILD_SHELL=/bin/bash"
|
|
||||||
"NIX_PATH=nixpkgs=${./fake_nixpkgs}"
|
|
||||||
"PAGER=cat"
|
|
||||||
"PATH=/usr/bin:/bin"
|
|
||||||
"SSL_CERT_FILE=${cacert}/etc/ssl/certs/ca-bundle.crt"
|
|
||||||
"USER=root"
|
|
||||||
] ++ extraEnv;
|
|
||||||
};
|
|
||||||
};
|
|
||||||
in
|
|
||||||
image // { meta = nix.meta // image.meta; }
|
|
|
@ -1,10 +0,0 @@
|
||||||
_:
|
|
||||||
throw ''
|
|
||||||
This container doesn't include nixpkgs.
|
|
||||||
|
|
||||||
The best way to work around that is to pin your dependencies. See
|
|
||||||
https://nix.dev/tutorials/first-steps/towards-reproducibility-pinning-nixpkgs.html
|
|
||||||
|
|
||||||
Or if you must, override the NIX_PATH environment variable with eg:
|
|
||||||
"NIX_PATH=nixpkgs=channel:nixos-unstable"
|
|
||||||
''
|
|
|
@ -1,21 +0,0 @@
|
||||||
root:x:0:
|
|
||||||
wheel:x:1:
|
|
||||||
kmem:x:2:
|
|
||||||
tty:x:3:
|
|
||||||
messagebus:x:4:
|
|
||||||
disk:x:6:
|
|
||||||
audio:x:17:
|
|
||||||
floppy:x:18:
|
|
||||||
uucp:x:19:
|
|
||||||
lp:x:20:
|
|
||||||
cdrom:x:24:
|
|
||||||
tape:x:25:
|
|
||||||
video:x:26:
|
|
||||||
dialout:x:27:
|
|
||||||
utmp:x:29:
|
|
||||||
adm:x:55:
|
|
||||||
keys:x:96:
|
|
||||||
users:x:100:
|
|
||||||
input:x:174:
|
|
||||||
nixbld:x:30000:nixbld1,nixbld10,nixbld11,nixbld12,nixbld13,nixbld14,nixbld15,nixbld16,nixbld17,nixbld18,nixbld19,nixbld2,nixbld20,nixbld21,nixbld22,nixbld23,nixbld24,nixbld25,nixbld26,nixbld27,nixbld28,nixbld29,nixbld3,nixbld30,nixbld31,nixbld32,nixbld4,nixbld5,nixbld6,nixbld7,nixbld8,nixbld9
|
|
||||||
nogroup:x:65534:
|
|
|
@ -1,11 +0,0 @@
|
||||||
passwd: files mymachines systemd
|
|
||||||
group: files mymachines systemd
|
|
||||||
shadow: files
|
|
||||||
|
|
||||||
hosts: files mymachines dns myhostname
|
|
||||||
networks: files
|
|
||||||
|
|
||||||
ethers: files
|
|
||||||
services: files
|
|
||||||
protocols: files
|
|
||||||
rpc: files
|
|
|
@ -1,34 +0,0 @@
|
||||||
root:x:0:0:System administrator:/root:/bin/bash
|
|
||||||
nixbld1:x:30001:30000:Nix build user 1:/var/empty:/run/current-system/sw/bin/nologin
|
|
||||||
nixbld2:x:30002:30000:Nix build user 2:/var/empty:/run/current-system/sw/bin/nologin
|
|
||||||
nixbld3:x:30003:30000:Nix build user 3:/var/empty:/run/current-system/sw/bin/nologin
|
|
||||||
nixbld4:x:30004:30000:Nix build user 4:/var/empty:/run/current-system/sw/bin/nologin
|
|
||||||
nixbld5:x:30005:30000:Nix build user 5:/var/empty:/run/current-system/sw/bin/nologin
|
|
||||||
nixbld6:x:30006:30000:Nix build user 6:/var/empty:/run/current-system/sw/bin/nologin
|
|
||||||
nixbld7:x:30007:30000:Nix build user 7:/var/empty:/run/current-system/sw/bin/nologin
|
|
||||||
nixbld8:x:30008:30000:Nix build user 8:/var/empty:/run/current-system/sw/bin/nologin
|
|
||||||
nixbld9:x:30009:30000:Nix build user 9:/var/empty:/run/current-system/sw/bin/nologin
|
|
||||||
nixbld10:x:30010:30000:Nix build user 10:/var/empty:/run/current-system/sw/bin/nologin
|
|
||||||
nixbld11:x:30011:30000:Nix build user 11:/var/empty:/run/current-system/sw/bin/nologin
|
|
||||||
nixbld12:x:30012:30000:Nix build user 12:/var/empty:/run/current-system/sw/bin/nologin
|
|
||||||
nixbld13:x:30013:30000:Nix build user 13:/var/empty:/run/current-system/sw/bin/nologin
|
|
||||||
nixbld14:x:30014:30000:Nix build user 14:/var/empty:/run/current-system/sw/bin/nologin
|
|
||||||
nixbld15:x:30015:30000:Nix build user 15:/var/empty:/run/current-system/sw/bin/nologin
|
|
||||||
nixbld16:x:30016:30000:Nix build user 16:/var/empty:/run/current-system/sw/bin/nologin
|
|
||||||
nixbld17:x:30017:30000:Nix build user 17:/var/empty:/run/current-system/sw/bin/nologin
|
|
||||||
nixbld18:x:30018:30000:Nix build user 18:/var/empty:/run/current-system/sw/bin/nologin
|
|
||||||
nixbld19:x:30019:30000:Nix build user 19:/var/empty:/run/current-system/sw/bin/nologin
|
|
||||||
nixbld20:x:30020:30000:Nix build user 20:/var/empty:/run/current-system/sw/bin/nologin
|
|
||||||
nixbld21:x:30021:30000:Nix build user 21:/var/empty:/run/current-system/sw/bin/nologin
|
|
||||||
nixbld22:x:30022:30000:Nix build user 22:/var/empty:/run/current-system/sw/bin/nologin
|
|
||||||
nixbld23:x:30023:30000:Nix build user 23:/var/empty:/run/current-system/sw/bin/nologin
|
|
||||||
nixbld24:x:30024:30000:Nix build user 24:/var/empty:/run/current-system/sw/bin/nologin
|
|
||||||
nixbld25:x:30025:30000:Nix build user 25:/var/empty:/run/current-system/sw/bin/nologin
|
|
||||||
nixbld26:x:30026:30000:Nix build user 26:/var/empty:/run/current-system/sw/bin/nologin
|
|
||||||
nixbld27:x:30027:30000:Nix build user 27:/var/empty:/run/current-system/sw/bin/nologin
|
|
||||||
nixbld28:x:30028:30000:Nix build user 28:/var/empty:/run/current-system/sw/bin/nologin
|
|
||||||
nixbld29:x:30029:30000:Nix build user 29:/var/empty:/run/current-system/sw/bin/nologin
|
|
||||||
nixbld30:x:30030:30000:Nix build user 30:/var/empty:/run/current-system/sw/bin/nologin
|
|
||||||
nixbld31:x:30031:30000:Nix build user 31:/var/empty:/run/current-system/sw/bin/nologin
|
|
||||||
nixbld32:x:30032:30000:Nix build user 32:/var/empty:/run/current-system/sw/bin/nologin
|
|
||||||
nobody:x:65534:65534:Unprivileged account (don't use!):/var/empty:/run/current-system/sw/bin/nologin
|
|
18
flake.lock
18
flake.lock
|
@ -3,11 +3,11 @@
|
||||||
"flake-compat": {
|
"flake-compat": {
|
||||||
"flake": false,
|
"flake": false,
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1696426674,
|
"lastModified": 1733328505,
|
||||||
"narHash": "sha256-kvjfFW7WAETZlt09AgDn1MrtKzP7t90Vf7vypd3OL1U=",
|
"narHash": "sha256-NeCCThCEP3eCl2l/+27kNNK7QrwZB1IJCrXfrbv5oqU=",
|
||||||
"owner": "edolstra",
|
"owner": "edolstra",
|
||||||
"repo": "flake-compat",
|
"repo": "flake-compat",
|
||||||
"rev": "0f9255e01c2351cc7d116c072cb317785dd33b33",
|
"rev": "ff81ac966bb2cae68946d5ed5fc4994f96d0ffec",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
|
@ -18,11 +18,11 @@
|
||||||
},
|
},
|
||||||
"nixpkgs": {
|
"nixpkgs": {
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1730200266,
|
"lastModified": 1734649271,
|
||||||
"narHash": "sha256-l253w0XMT8nWHGXuXqyiIC/bMvh1VRszGXgdpQlfhvU=",
|
"narHash": "sha256-4EVBRhOjMDuGtMaofAIqzJbg4Ql7Ai0PSeuVZTHjyKQ=",
|
||||||
"owner": "NixOS",
|
"owner": "NixOS",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"rev": "807e9154dcb16384b1b765ebe9cd2bba2ac287fd",
|
"rev": "d70bd19e0a38ad4790d3913bf08fcbfc9eeca507",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
|
@ -59,11 +59,11 @@
|
||||||
"systems": "systems"
|
"systems": "systems"
|
||||||
},
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1726560853,
|
"lastModified": 1731533236,
|
||||||
"narHash": "sha256-X6rJYSESBVr3hBoH0WbKE5KvhPU5bloyZ2L4K60/fPQ=",
|
"narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=",
|
||||||
"owner": "numtide",
|
"owner": "numtide",
|
||||||
"repo": "flake-utils",
|
"repo": "flake-utils",
|
||||||
"rev": "c1dfcf08411b08f6b8615f7d8971a2bfa81d5e8a",
|
"rev": "11707dc2f618dd54ca8739b309ec4fc024de578b",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
|
|
65
flake.nix
65
flake.nix
|
@ -31,21 +31,14 @@
|
||||||
{
|
{
|
||||||
kmd = callPackages ./pkgs/kmd { inherit kernel; };
|
kmd = callPackages ./pkgs/kmd { inherit kernel; };
|
||||||
sfpi = callPackages ./pkgs/sfpi { };
|
sfpi = callPackages ./pkgs/sfpi { };
|
||||||
luwen = callPackage ./pkgs/luwen { };
|
luwen = (callPackage ./pkgs/luwen { }).luwen;
|
||||||
|
pyluwen = (callPackage ./pkgs/luwen { }).pyluwen;
|
||||||
tools-common = callPackage ./pkgs/tools-common { };
|
tools-common = callPackage ./pkgs/tools-common { };
|
||||||
system-tools = callPackage ./pkgs/system-tools { };
|
system-tools = callPackage ./pkgs/system-tools { };
|
||||||
flash = callPackage ./pkgs/flash {
|
flash = callPackage ./pkgs/flash { };
|
||||||
pyluwen = self.luwen.pyluwen_0_1;
|
smi = callPackage ./pkgs/smi { };
|
||||||
};
|
|
||||||
smi = callPackage ./pkgs/smi {
|
|
||||||
pyluwen = self.luwen.pyluwen;
|
|
||||||
};
|
|
||||||
umd = callPackage ./pkgs/umd { };
|
umd = callPackage ./pkgs/umd { };
|
||||||
metal = callPackage ./pkgs/metal { };
|
metal = callPackage ./pkgs/metal { };
|
||||||
buda-docker = callPackage ./pkgs/buda-docker { };
|
|
||||||
# Will be difficult to get these 2 working and keep them working
|
|
||||||
buda-prebuilt = callPackage ./pkgs/buda-prebuilt { };
|
|
||||||
buda = callPackage ./pkgs/buda { };
|
|
||||||
|
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
@ -56,18 +49,16 @@
|
||||||
udev-rules = scope.kmd.udev-rules;
|
udev-rules = scope.kmd.udev-rules;
|
||||||
kmd-test = scope.kmd.test;
|
kmd-test = scope.kmd.test;
|
||||||
sfpi = scope.sfpi.sfpi;
|
sfpi = scope.sfpi.sfpi;
|
||||||
|
sfpi-prebuilt = scope.sfpi.prebuilt;
|
||||||
tt-gcc = scope.sfpi.tt-gcc;
|
tt-gcc = scope.sfpi.tt-gcc;
|
||||||
smi = scope.smi;
|
smi = scope.smi;
|
||||||
luwen = scope.luwen.luwen;
|
luwen = scope.luwen;
|
||||||
pyluwen = scope.luwen.pyluwen;
|
pyluwen = scope.pyluwen;
|
||||||
tools-common = scope.tools-common;
|
tools-common = scope.tools-common;
|
||||||
system-tools = scope.system-tools;
|
system-tools = scope.system-tools;
|
||||||
flash = scope.flash;
|
flash = scope.flash;
|
||||||
umd = scope.umd;
|
umd = scope.umd;
|
||||||
metal = scope.metal;
|
metal = scope.metal;
|
||||||
buda-docker = scope.buda-docker;
|
|
||||||
buda-prebuilt = scope.buda-prebuilt;
|
|
||||||
buda = scope.buda;
|
|
||||||
default = self.packages.${system}.smi;
|
default = self.packages.${system}.smi;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -101,17 +92,45 @@
|
||||||
})
|
})
|
||||||
];
|
];
|
||||||
kernelParams = [
|
kernelParams = [
|
||||||
# TODO: Use the scripts instead https://github.com/tenstorrent/tt-system-tools they work regardless of how many devices there are
|
|
||||||
# https://github.com/tenstorrent/tt-system-tools/blob/main/hugepages-setup.sh#L31
|
|
||||||
"hugepagesz=1G"
|
|
||||||
"hugepages=4"
|
|
||||||
"iommu=pt"
|
"iommu=pt"
|
||||||
];
|
];
|
||||||
kernelModules = [ "tenstorrent" ];
|
kernelModules = [ "tenstorrent" ];
|
||||||
};
|
};
|
||||||
services.udev.packages = [
|
systemd = {
|
||||||
(pkgs.tt-udev-rules or self.packages.${pkgs.hostPlatform.system}.udev-rules)
|
# https://github.com/NixOS/nixpkgs/issues/81138
|
||||||
];
|
services.tenstorrent-hugepages.wantedBy = [ "sysinit.target" ];
|
||||||
|
# Define https://github.com/tenstorrent/tt-system-tools/blob/29ba4dc6049eef3cee4314c53720417823ffc667/dev-hugepages%5Cx2d1G.mount
|
||||||
|
# because it has bad start ordering relations with tenstorrent-hugepages.service
|
||||||
|
# or it may be that the `wantedBy` does not work correctly in mounts like it does't work in serices.
|
||||||
|
mounts = [
|
||||||
|
{
|
||||||
|
description = "Mount hugepages at /dev/hugepages-1G for Tenstorrent ASICs";
|
||||||
|
what = "hugetlbfs";
|
||||||
|
where = "/dev/hugepages-1G";
|
||||||
|
type = "hugetlbfs";
|
||||||
|
options = "pagesize=1G,mode=0777,nosuid,nodev";
|
||||||
|
wantedBy = [ "sysinit.target" ];
|
||||||
|
after = [ "tenstorrent-hugepages.service" ];
|
||||||
|
unitConfig = {
|
||||||
|
DefaultDependencies = false;
|
||||||
|
ConditionPathExists = "/sys/kernel/mm/hugepages/hugepages-1048576kB";
|
||||||
|
ConditionCapability = "CAP_SYS_ADMIN";
|
||||||
|
};
|
||||||
|
}
|
||||||
|
];
|
||||||
|
packages = [
|
||||||
|
(pkgs.tt-system-tools or self.packages.${pkgs.hostPlatform.system}.system-tools)
|
||||||
|
];
|
||||||
|
};
|
||||||
|
services.udev = {
|
||||||
|
packages = [ (pkgs.tt-udev-rules or self.packages.${pkgs.hostPlatform.system}.udev-rules) ];
|
||||||
|
# NOTE: passing just the group does not work currently for docker so unneeded for now so use the udev-rules package for now
|
||||||
|
# TT_METAL_HOME=$PWD docker run -v $PWD:/host --workdir /host -v /dev/hugepages-1G:/dev/hugepages-1G -v /dev/tenstorrent:/dev/tenstorrent -u :994 -v /etc/group:/etc/group:ro -it tt-metal bash
|
||||||
|
# extraRules = ''
|
||||||
|
# KERNEL=="tenstorrent*", MODE="0666", OWNER="root", GROUP="tenstorrent"
|
||||||
|
# '';
|
||||||
|
};
|
||||||
|
# users.groups.tenstorrent = { };
|
||||||
|
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
|
@ -1,41 +0,0 @@
|
||||||
{
|
|
||||||
dockerTools,
|
|
||||||
callPackage,
|
|
||||||
git,
|
|
||||||
}:
|
|
||||||
|
|
||||||
let
|
|
||||||
budaDocker = dockerTools.pullImage {
|
|
||||||
imageName = "ghcr.io/tenstorrent/tt-buda/ubuntu-22-04-amd64/wh_b0";
|
|
||||||
imageDigest = "sha256:3a6f84ed355c8738432737f6498745c4bee235b871e97608394e29e396ff6925";
|
|
||||||
sha256 = "1vx7v9yx211dibshzgzz9zwm9xgkfj25iabplff19hx687w0n3sz";
|
|
||||||
finalImageName = "ghcr.io/tenstorrent/tt-buda/ubuntu-22-04-amd64/wh_b0";
|
|
||||||
finalImageTag = "v0.19.3";
|
|
||||||
};
|
|
||||||
|
|
||||||
#nixDocker = dockerTools.pullImage {
|
|
||||||
# imageName = "nixpkgs/nix-flakes";
|
|
||||||
# imageDigest = "sha256:cab18b64d25e4bc30415758d6e2f6bc05ecf6ae576092c0cf407b1cebb1ea0e5";
|
|
||||||
# sha256 = "0v4npm2h4z0k3y0h75zsk3q589vhris76g4vg5gkjlfbg16c822j";
|
|
||||||
# finalImageName = "nixpkgs/nix-flakes";
|
|
||||||
# finalImageTag = "latest";
|
|
||||||
#};
|
|
||||||
|
|
||||||
nixDocker = callPackage ../../docker/nix/default.nix {
|
|
||||||
fromImage = budaDocker;
|
|
||||||
|
|
||||||
# gitMinimal still ships with perl and python
|
|
||||||
gitReallyMinimal =
|
|
||||||
(git.override {
|
|
||||||
perlSupport = false;
|
|
||||||
pythonSupport = false;
|
|
||||||
withManual = false;
|
|
||||||
withpcre2 = false;
|
|
||||||
}).overrideAttrs
|
|
||||||
(_: {
|
|
||||||
# installCheck is broken when perl is disabled
|
|
||||||
doInstallCheck = false;
|
|
||||||
});
|
|
||||||
};
|
|
||||||
in
|
|
||||||
budaDocker
|
|
|
@ -1,181 +0,0 @@
|
||||||
{
|
|
||||||
lib,
|
|
||||||
python310Packages,
|
|
||||||
fetchzip,
|
|
||||||
stdenv,
|
|
||||||
callPackage,
|
|
||||||
__splicedPackages,
|
|
||||||
darwin,
|
|
||||||
runCommand,
|
|
||||||
}:
|
|
||||||
|
|
||||||
let
|
|
||||||
python3Packages = python310Packages;
|
|
||||||
pkgs = __splicedPackages;
|
|
||||||
|
|
||||||
boost_1_74 = (callPackage ./vendored/boost/default.nix { }).boost174;
|
|
||||||
yaml-cpp_0_6 = callPackage ./vendored/libyaml-cpp.nix { };
|
|
||||||
|
|
||||||
prebuilt-buda = fetchzip {
|
|
||||||
url = "https://github.com/tenstorrent/tt-buda/releases/download/v0.19.3/pybuda-wh.b0-v0.19.3-ubuntu-22-04-amd64-python3.10.zip";
|
|
||||||
hash = "sha256-M9sgFKSmWra+BglEWgrfFPJRS+UIVKUG+ZF1oTPVexg=";
|
|
||||||
stripRoot = false;
|
|
||||||
};
|
|
||||||
|
|
||||||
pipInstallHook' = python3Packages.callPackage (
|
|
||||||
{ makePythonHook, pip }:
|
|
||||||
makePythonHook {
|
|
||||||
name = "pip-install-hook";
|
|
||||||
propagatedBuildInputs = [ pip ];
|
|
||||||
substitutions = {
|
|
||||||
pythonInterpreter = python3Packages.python.interpreter;
|
|
||||||
pythonSitePackages = python3Packages.python.sitePackages;
|
|
||||||
};
|
|
||||||
} ./vendored/pip-install-hook.sh
|
|
||||||
) { };
|
|
||||||
|
|
||||||
nukeReferences = callPackage ./vendored/nuke-references.nix {
|
|
||||||
inherit (darwin) signingUtils;
|
|
||||||
};
|
|
||||||
|
|
||||||
autoPatchelfHook = callPackage (
|
|
||||||
{ makeSetupHook, bintools }:
|
|
||||||
makeSetupHook {
|
|
||||||
name = "auto-patchelf-hook";
|
|
||||||
propagatedBuildInputs = [
|
|
||||||
bintools
|
|
||||||
];
|
|
||||||
substitutions = {
|
|
||||||
pythonInterpreter = "${python3Packages.python.withPackages (ps: [ ps.pyelftools ])}/bin/python";
|
|
||||||
autoPatchelfScript = ./vendored/auto-patchelf.py;
|
|
||||||
};
|
|
||||||
} ./auto-patchelf.sh
|
|
||||||
) { };
|
|
||||||
|
|
||||||
tt-buda = stdenv.mkDerivation rec {
|
|
||||||
pname = "tt-buda";
|
|
||||||
version = "0.19.3";
|
|
||||||
format = "wheel";
|
|
||||||
|
|
||||||
src = prebuilt-buda;
|
|
||||||
|
|
||||||
nativeBuildInputs = [
|
|
||||||
pipInstallHook'
|
|
||||||
nukeReferences
|
|
||||||
];
|
|
||||||
|
|
||||||
preInstall = ''
|
|
||||||
mkdir dist
|
|
||||||
mv *.whl dist/
|
|
||||||
'';
|
|
||||||
|
|
||||||
postInstall = ''
|
|
||||||
find $out -name "__pycache__" -type d | xargs rm -rf
|
|
||||||
|
|
||||||
find $out/bin/ -type f -not -name 'debuda' -print0 | xargs -0 rm --
|
|
||||||
substituteInPlace $out/bin/debuda \
|
|
||||||
--replace-fail "${python3Packages.python.interpreter}" "/usr/bin/env python3"
|
|
||||||
|
|
||||||
# error: illegal path references in fixed-output derivation
|
|
||||||
find $out -print0 | xargs -0 nuke-refs
|
|
||||||
|
|
||||||
'';
|
|
||||||
|
|
||||||
dontPatchShebangs = true;
|
|
||||||
dontFixup = true;
|
|
||||||
|
|
||||||
outputHash = "sha256-eSU10kgIQzJ0kv6gmQwMCdVw0uBpohVyYqkjK4RU2ng=";
|
|
||||||
outputHashAlgo = "sha256";
|
|
||||||
outputHashMode = "recursive";
|
|
||||||
|
|
||||||
meta = {
|
|
||||||
description = "Tenstorrent TT-BUDA Repository";
|
|
||||||
homepage = "https://github.com/tenstorrent/tt-buda";
|
|
||||||
license = lib.licenses.asl20;
|
|
||||||
mainProgram = "tt-buda";
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
tt-buda-final = python3Packages.toPythonModule (
|
|
||||||
stdenv.mkDerivation (finalAttrs: {
|
|
||||||
pname = "tt-buda-final";
|
|
||||||
inherit (tt-buda) version;
|
|
||||||
|
|
||||||
nativeBuildInputs = [
|
|
||||||
autoPatchelfHook
|
|
||||||
python3Packages.pythonImportsCheckHook
|
|
||||||
];
|
|
||||||
|
|
||||||
buildInputs =
|
|
||||||
with pkgs;
|
|
||||||
[
|
|
||||||
stdenv.cc.libc.libgcc
|
|
||||||
stdenv.cc.libc.libgcc.lib
|
|
||||||
libpng
|
|
||||||
python3Packages.python
|
|
||||||
ncurses
|
|
||||||
expat
|
|
||||||
hwloc
|
|
||||||
zeromq
|
|
||||||
libjpeg8
|
|
||||||
glib
|
|
||||||
libGL
|
|
||||||
boost_1_74
|
|
||||||
yaml-cpp_0_6
|
|
||||||
]
|
|
||||||
++ (with pkgs.xorg; [
|
|
||||||
libxcb
|
|
||||||
libXext
|
|
||||||
libX11
|
|
||||||
libSM
|
|
||||||
libICE
|
|
||||||
]);
|
|
||||||
|
|
||||||
#runtimeDependencies = [
|
|
||||||
# # from torch._C import * # noqa: F403
|
|
||||||
# # ImportError: libstdc++.so.6: cannot open shared object file: No such file or directory
|
|
||||||
# stdenv.cc.libc.libgcc.lib
|
|
||||||
|
|
||||||
#];
|
|
||||||
|
|
||||||
#pythonImportsCheck = [
|
|
||||||
# "pybuda"
|
|
||||||
# "torch"
|
|
||||||
#];
|
|
||||||
|
|
||||||
passthru = {
|
|
||||||
inherit tt-buda yaml-cpp_0_6 boost_1_74;
|
|
||||||
pythonWith = python3Packages.python.withPackages (ps: [ finalAttrs.finalPackage ]);
|
|
||||||
|
|
||||||
tests = {
|
|
||||||
integrationTest =
|
|
||||||
runCommand "tt-buda-tests-integration-test"
|
|
||||||
{
|
|
||||||
strictDeps = true;
|
|
||||||
nativeBuildInputs = [
|
|
||||||
finalAttrs.passthru.pythonWith
|
|
||||||
stdenv.cc.libc.libgcc.lib
|
|
||||||
];
|
|
||||||
LD_LIBRARY_PATH = lib.makeLibraryPath [ stdenv.cc.libc.libgcc.lib ];
|
|
||||||
}
|
|
||||||
''
|
|
||||||
export HOME=$(mktemp -d)
|
|
||||||
python3 "${./test.py}"
|
|
||||||
touch "$out"
|
|
||||||
'';
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
dontUnpack = true;
|
|
||||||
installPhase = ''
|
|
||||||
runHook preInstall
|
|
||||||
mkdir -p $out
|
|
||||||
cp -r ${tt-buda}/* $out
|
|
||||||
runHook postInstall
|
|
||||||
|
|
||||||
|
|
||||||
'';
|
|
||||||
})
|
|
||||||
);
|
|
||||||
in
|
|
||||||
tt-buda-final
|
|
|
@ -1,26 +0,0 @@
|
||||||
import pybuda
|
|
||||||
import torch
|
|
||||||
|
|
||||||
|
|
||||||
# Sample PyTorch module
|
|
||||||
class PyTorchTestModule(torch.nn.Module):
|
|
||||||
def __init__(self):
|
|
||||||
super().__init__()
|
|
||||||
self.weights1 = torch.nn.Parameter(torch.rand(32, 32), requires_grad=True)
|
|
||||||
self.weights2 = torch.nn.Parameter(torch.rand(32, 32), requires_grad=True)
|
|
||||||
def forward(self, act1, act2):
|
|
||||||
m1 = torch.matmul(act1, self.weights1)
|
|
||||||
m2 = torch.matmul(act2, self.weights2)
|
|
||||||
return m1 + m2, m1
|
|
||||||
|
|
||||||
|
|
||||||
def test_module_direct_pytorch():
|
|
||||||
input1 = torch.rand(4, 32, 32)
|
|
||||||
input2 = torch.rand(4, 32, 32)
|
|
||||||
# Run single inference pass on a PyTorch module, using a wrapper to convert to PyBUDA first
|
|
||||||
output = pybuda.PyTorchModule("direct_pt", PyTorchTestModule()).run(input1, input2)
|
|
||||||
print(output)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
test_module_direct_pytorch()
|
|
|
@ -1,439 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
import argparse
|
|
||||||
import os
|
|
||||||
import pprint
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
import json
|
|
||||||
from fnmatch import fnmatch
|
|
||||||
from collections import defaultdict
|
|
||||||
from contextlib import contextmanager
|
|
||||||
from dataclasses import dataclass
|
|
||||||
from itertools import chain
|
|
||||||
from pathlib import Path, PurePath
|
|
||||||
from typing import DefaultDict, Generator, Iterator, Optional
|
|
||||||
|
|
||||||
from elftools.common.exceptions import ELFError # type: ignore
|
|
||||||
from elftools.elf.dynamic import DynamicSection # type: ignore
|
|
||||||
from elftools.elf.sections import NoteSection # type: ignore
|
|
||||||
from elftools.elf.elffile import ELFFile # type: ignore
|
|
||||||
from elftools.elf.enums import ENUM_E_TYPE, ENUM_EI_OSABI # type: ignore
|
|
||||||
|
|
||||||
|
|
||||||
@contextmanager
|
|
||||||
def open_elf(path: Path) -> Iterator[ELFFile]:
|
|
||||||
with path.open('rb') as stream:
|
|
||||||
yield ELFFile(stream)
|
|
||||||
|
|
||||||
|
|
||||||
def is_static_executable(elf: ELFFile) -> bool:
|
|
||||||
# Statically linked executables have an ELF type of EXEC but no INTERP.
|
|
||||||
return (elf.header["e_type"] == 'ET_EXEC'
|
|
||||||
and not elf.get_section_by_name(".interp"))
|
|
||||||
|
|
||||||
|
|
||||||
def is_dynamic_executable(elf: ELFFile) -> bool:
|
|
||||||
# We do not require an ELF type of EXEC. This also catches
|
|
||||||
# position-independent executables, as they typically have an INTERP
|
|
||||||
# section but their ELF type is DYN.
|
|
||||||
return bool(elf.get_section_by_name(".interp"))
|
|
||||||
|
|
||||||
|
|
||||||
def get_dependencies(elf: ELFFile) -> list[list[Path]]:
|
|
||||||
dependencies = []
|
|
||||||
# This convoluted code is here on purpose. For some reason, using
|
|
||||||
# elf.get_section_by_name(".dynamic") does not always return an
|
|
||||||
# instance of DynamicSection, but that is required to call iter_tags
|
|
||||||
for section in elf.iter_sections():
|
|
||||||
if isinstance(section, DynamicSection):
|
|
||||||
for tag in section.iter_tags('DT_NEEDED'):
|
|
||||||
dependencies.append([Path(tag.needed)])
|
|
||||||
break # There is only one dynamic section
|
|
||||||
|
|
||||||
return dependencies
|
|
||||||
|
|
||||||
|
|
||||||
def get_dlopen_dependencies(elf: ELFFile) -> list[list[Path]]:
|
|
||||||
"""
|
|
||||||
Extracts dependencies from the `.note.dlopen` section.
|
|
||||||
This is a FreeDesktop standard to annotate binaries with libraries that it may `dlopen`.
|
|
||||||
See https://systemd.io/ELF_DLOPEN_METADATA/
|
|
||||||
"""
|
|
||||||
dependencies = []
|
|
||||||
for section in elf.iter_sections():
|
|
||||||
if not isinstance(section, NoteSection) or section.name != ".note.dlopen":
|
|
||||||
continue
|
|
||||||
for note in section.iter_notes():
|
|
||||||
if note["n_type"] != 0x407C0C0A or note["n_name"] != "FDO":
|
|
||||||
continue
|
|
||||||
note_desc = note["n_desc"]
|
|
||||||
text = note_desc.decode("utf-8").rstrip("\0")
|
|
||||||
j = json.loads(text)
|
|
||||||
for d in j:
|
|
||||||
dependencies.append([Path(soname) for soname in d["soname"]])
|
|
||||||
return dependencies
|
|
||||||
|
|
||||||
|
|
||||||
def get_rpath(elf: ELFFile) -> list[str]:
|
|
||||||
# This convoluted code is here on purpose. For some reason, using
|
|
||||||
# elf.get_section_by_name(".dynamic") does not always return an
|
|
||||||
# instance of DynamicSection, but that is required to call iter_tags
|
|
||||||
for section in elf.iter_sections():
|
|
||||||
if isinstance(section, DynamicSection):
|
|
||||||
for tag in section.iter_tags('DT_RUNPATH'):
|
|
||||||
return tag.runpath.split(':')
|
|
||||||
|
|
||||||
for tag in section.iter_tags('DT_RPATH'):
|
|
||||||
return tag.rpath.split(':')
|
|
||||||
|
|
||||||
break # There is only one dynamic section
|
|
||||||
|
|
||||||
return []
|
|
||||||
|
|
||||||
|
|
||||||
def get_arch(elf: ELFFile) -> str:
|
|
||||||
return elf.get_machine_arch()
|
|
||||||
|
|
||||||
|
|
||||||
def get_osabi(elf: ELFFile) -> str:
|
|
||||||
return elf.header["e_ident"]["EI_OSABI"]
|
|
||||||
|
|
||||||
|
|
||||||
def osabi_are_compatible(wanted: str, got: str) -> bool:
|
|
||||||
"""
|
|
||||||
Tests whether two OS ABIs are compatible, taking into account the
|
|
||||||
generally accepted compatibility of SVR4 ABI with other ABIs.
|
|
||||||
"""
|
|
||||||
if not wanted or not got:
|
|
||||||
# One of the types couldn't be detected, so as a fallback we'll
|
|
||||||
# assume they're compatible.
|
|
||||||
return True
|
|
||||||
|
|
||||||
# Generally speaking, the base ABI (0x00), which is represented by
|
|
||||||
# readelf(1) as "UNIX - System V", indicates broad compatibility
|
|
||||||
# with other ABIs.
|
|
||||||
#
|
|
||||||
# TODO: This isn't always true. For example, some OSes embed ABI
|
|
||||||
# compatibility into SHT_NOTE sections like .note.tag and
|
|
||||||
# .note.ABI-tag. It would be prudent to add these to the detection
|
|
||||||
# logic to produce better ABI information.
|
|
||||||
if wanted == 'ELFOSABI_SYSV':
|
|
||||||
return True
|
|
||||||
|
|
||||||
# Similarly here, we should be able to link against a superset of
|
|
||||||
# features, so even if the target has another ABI, this should be
|
|
||||||
# fine.
|
|
||||||
if got == 'ELFOSABI_SYSV':
|
|
||||||
return True
|
|
||||||
|
|
||||||
# Otherwise, we simply return whether the ABIs are identical.
|
|
||||||
return wanted == got
|
|
||||||
|
|
||||||
|
|
||||||
def glob(path: Path, pattern: str, recursive: bool) -> Iterator[Path]:
|
|
||||||
if path.is_dir():
|
|
||||||
return path.rglob(pattern) if recursive else path.glob(pattern)
|
|
||||||
else:
|
|
||||||
# path.glob won't return anything if the path is not a directory.
|
|
||||||
# We extend that behavior by matching the file name against the pattern.
|
|
||||||
# This allows to pass single files instead of dirs to auto_patchelf,
|
|
||||||
# for greater control on the files to consider.
|
|
||||||
return [path] if path.match(pattern) else []
|
|
||||||
|
|
||||||
|
|
||||||
cached_paths: set[Path] = set()
|
|
||||||
soname_cache: DefaultDict[tuple[str, str], list[tuple[Path, str]]] = defaultdict(list)
|
|
||||||
|
|
||||||
|
|
||||||
def populate_cache(initial: list[Path], recursive: bool =False) -> None:
|
|
||||||
lib_dirs = list(initial)
|
|
||||||
|
|
||||||
while lib_dirs:
|
|
||||||
lib_dir = lib_dirs.pop(0)
|
|
||||||
|
|
||||||
if lib_dir in cached_paths:
|
|
||||||
continue
|
|
||||||
|
|
||||||
cached_paths.add(lib_dir)
|
|
||||||
|
|
||||||
for path in glob(lib_dir, "*.so*", recursive):
|
|
||||||
if not path.is_file():
|
|
||||||
continue
|
|
||||||
|
|
||||||
# As an optimisation, resolve the symlinks here, as the target is unique
|
|
||||||
# XXX: (layus, 2022-07-25) is this really an optimisation in all cases ?
|
|
||||||
# It could make the rpath bigger or break the fragile precedence of $out.
|
|
||||||
resolved = path.resolve()
|
|
||||||
# Do not use resolved paths when names do not match
|
|
||||||
if resolved.name != path.name:
|
|
||||||
resolved = path
|
|
||||||
|
|
||||||
try:
|
|
||||||
with open_elf(path) as elf:
|
|
||||||
osabi = get_osabi(elf)
|
|
||||||
arch = get_arch(elf)
|
|
||||||
rpath = [Path(p) for p in get_rpath(elf)
|
|
||||||
if p and '$ORIGIN' not in p]
|
|
||||||
lib_dirs += rpath
|
|
||||||
soname_cache[(path.name, arch)].append((resolved.parent, osabi))
|
|
||||||
|
|
||||||
except ELFError:
|
|
||||||
# Not an ELF file in the right format
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def find_dependency(soname: str, soarch: str, soabi: str) -> Optional[Path]:
|
|
||||||
for lib, libabi in soname_cache[(soname, soarch)]:
|
|
||||||
if osabi_are_compatible(soabi, libabi):
|
|
||||||
return lib
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class Dependency:
|
|
||||||
file: Path # The file that contains the dependency
|
|
||||||
name: Path # The name of the dependency
|
|
||||||
found: bool = False # Whether it was found somewhere
|
|
||||||
|
|
||||||
|
|
||||||
def auto_patchelf_file(path: Path, runtime_deps: list[Path], append_rpaths: list[Path] = [], keep_libc: bool = False, extra_args: list[str] = []) -> list[Dependency]:
|
|
||||||
try:
|
|
||||||
with open_elf(path) as elf:
|
|
||||||
|
|
||||||
if is_static_executable(elf):
|
|
||||||
# No point patching these
|
|
||||||
print(f"skipping {path} because it is statically linked")
|
|
||||||
return []
|
|
||||||
|
|
||||||
if elf.num_segments() == 0:
|
|
||||||
# no segment (e.g. object file)
|
|
||||||
print(f"skipping {path} because it contains no segment")
|
|
||||||
return []
|
|
||||||
|
|
||||||
file_arch = get_arch(elf)
|
|
||||||
if interpreter_arch != file_arch:
|
|
||||||
# Our target architecture is different than this file's
|
|
||||||
# architecture, so skip it.
|
|
||||||
print(f"skipping {path} because its architecture ({file_arch})"
|
|
||||||
f" differs from target ({interpreter_arch})")
|
|
||||||
return []
|
|
||||||
|
|
||||||
file_osabi = get_osabi(elf)
|
|
||||||
if not osabi_are_compatible(interpreter_osabi, file_osabi):
|
|
||||||
print(f"skipping {path} because its OS ABI ({file_osabi}) is"
|
|
||||||
f" not compatible with target ({interpreter_osabi})")
|
|
||||||
return []
|
|
||||||
|
|
||||||
file_is_dynamic_executable = is_dynamic_executable(elf)
|
|
||||||
|
|
||||||
file_dependencies = get_dependencies(elf) + get_dlopen_dependencies(elf)
|
|
||||||
|
|
||||||
except ELFError:
|
|
||||||
return []
|
|
||||||
|
|
||||||
rpath = []
|
|
||||||
if file_is_dynamic_executable:
|
|
||||||
print("setting interpreter of", path)
|
|
||||||
subprocess.run(
|
|
||||||
["patchelf", "--set-interpreter", interpreter_path.as_posix(), path.as_posix()] + extra_args,
|
|
||||||
check=True)
|
|
||||||
rpath += runtime_deps
|
|
||||||
|
|
||||||
print("searching for dependencies of", path)
|
|
||||||
dependencies = []
|
|
||||||
# Be sure to get the output of all missing dependencies instead of
|
|
||||||
# failing at the first one, because it's more useful when working
|
|
||||||
# on a new package where you don't yet know the dependencies.
|
|
||||||
for dep in file_dependencies:
|
|
||||||
was_found = False
|
|
||||||
for candidate in dep:
|
|
||||||
|
|
||||||
# This loop determines which candidate for a given
|
|
||||||
# dependency can be found, and how. There may be multiple
|
|
||||||
# candidates for a dep because of '.note.dlopen'
|
|
||||||
# dependencies.
|
|
||||||
#
|
|
||||||
# 1. If a candidate is an absolute path, it is already a
|
|
||||||
# valid dependency if that path exists, and nothing needs
|
|
||||||
# to be done. It should be an error if that path does not exist.
|
|
||||||
# 2. If a candidate is found within libc, it should be dropped
|
|
||||||
# and resolved automatically by the dynamic linker, unless
|
|
||||||
# keep_libc is enabled.
|
|
||||||
# 3. If a candidate is found in our library dependencies, that
|
|
||||||
# dependency should be added to rpath.
|
|
||||||
# 4. If all of the above fail, libc dependencies should still be
|
|
||||||
# considered found. This is in contrast to step 2, because
|
|
||||||
# enabling keep_libc should allow libc to be found in step 3
|
|
||||||
# if possible to preserve its presence in rpath.
|
|
||||||
#
|
|
||||||
# These conditions are checked in this order, because #2
|
|
||||||
# and #3 may both be true. In that case, we still want to
|
|
||||||
# add the dependency to rpath, as the original binary
|
|
||||||
# presumably had it and this should be preserved.
|
|
||||||
|
|
||||||
is_libc = (libc_lib / candidate).is_file()
|
|
||||||
|
|
||||||
if candidate.is_absolute() and candidate.is_file():
|
|
||||||
was_found = True
|
|
||||||
break
|
|
||||||
elif is_libc and not keep_libc:
|
|
||||||
was_found = True
|
|
||||||
break
|
|
||||||
elif found_dependency := find_dependency(candidate.name, file_arch, file_osabi):
|
|
||||||
rpath.append(found_dependency)
|
|
||||||
dependencies.append(Dependency(path, candidate, found=True))
|
|
||||||
print(f" {candidate} -> found: {found_dependency}")
|
|
||||||
was_found = True
|
|
||||||
break
|
|
||||||
elif is_libc and keep_libc:
|
|
||||||
was_found = True
|
|
||||||
break
|
|
||||||
|
|
||||||
if not was_found:
|
|
||||||
dep_name = dep[0] if len(dep) == 1 else f"any({', '.join(map(str, dep))})"
|
|
||||||
dependencies.append(Dependency(path, dep_name, found=False))
|
|
||||||
print(f" {dep_name} -> not found!")
|
|
||||||
|
|
||||||
rpath.extend(append_rpaths)
|
|
||||||
|
|
||||||
# Dedup the rpath
|
|
||||||
rpath_str = ":".join(dict.fromkeys(map(Path.as_posix, rpath)))
|
|
||||||
|
|
||||||
if rpath:
|
|
||||||
print("setting RPATH to:", rpath_str)
|
|
||||||
subprocess.run(
|
|
||||||
["patchelf", "--set-rpath", rpath_str, path.as_posix()] + extra_args,
|
|
||||||
check=True)
|
|
||||||
|
|
||||||
return dependencies
|
|
||||||
|
|
||||||
|
|
||||||
def auto_patchelf(
|
|
||||||
paths_to_patch: list[Path],
|
|
||||||
lib_dirs: list[Path],
|
|
||||||
runtime_deps: list[Path],
|
|
||||||
recursive: bool = True,
|
|
||||||
ignore_missing: list[str] = [],
|
|
||||||
append_rpaths: list[Path] = [],
|
|
||||||
keep_libc: bool = False,
|
|
||||||
extra_args: list[str] = []) -> None:
|
|
||||||
|
|
||||||
if not paths_to_patch:
|
|
||||||
sys.exit("No paths to patch, stopping.")
|
|
||||||
|
|
||||||
# Add all shared objects of the current output path to the cache,
|
|
||||||
# before lib_dirs, so that they are chosen first in find_dependency.
|
|
||||||
populate_cache(paths_to_patch, recursive)
|
|
||||||
populate_cache(lib_dirs)
|
|
||||||
|
|
||||||
dependencies = []
|
|
||||||
for path in chain.from_iterable(glob(p, '*', recursive) for p in paths_to_patch):
|
|
||||||
if not path.is_symlink() and path.is_file():
|
|
||||||
dependencies += auto_patchelf_file(path, runtime_deps, append_rpaths, keep_libc, extra_args)
|
|
||||||
|
|
||||||
missing = [dep for dep in dependencies if not dep.found]
|
|
||||||
|
|
||||||
# Print a summary of the missing dependencies at the end
|
|
||||||
print(f"auto-patchelf: {len(missing)} dependencies could not be satisfied")
|
|
||||||
failure = False
|
|
||||||
for dep in missing:
|
|
||||||
for pattern in ignore_missing:
|
|
||||||
if fnmatch(dep.name.name, pattern):
|
|
||||||
print(f"warn: auto-patchelf ignoring missing {dep.name} wanted by {dep.file}")
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
print(f"error: auto-patchelf could not satisfy dependency {dep.name} wanted by {dep.file}")
|
|
||||||
failure = True
|
|
||||||
|
|
||||||
if failure:
|
|
||||||
sys.exit('auto-patchelf failed to find all the required dependencies.\n'
|
|
||||||
'Add the missing dependencies to --libs or use '
|
|
||||||
'`--ignore-missing="foo.so.1 bar.so etc.so"`.')
|
|
||||||
|
|
||||||
|
|
||||||
def main() -> None:
|
|
||||||
parser = argparse.ArgumentParser(
|
|
||||||
prog="auto-patchelf",
|
|
||||||
description='auto-patchelf tries as hard as possible to patch the'
|
|
||||||
' provided binary files by looking for compatible'
|
|
||||||
'libraries in the provided paths.')
|
|
||||||
parser.add_argument(
|
|
||||||
"--ignore-missing",
|
|
||||||
nargs="*",
|
|
||||||
type=str,
|
|
||||||
help="Do not fail when some dependencies are not found.")
|
|
||||||
parser.add_argument(
|
|
||||||
"--no-recurse",
|
|
||||||
dest="recursive",
|
|
||||||
action="store_false",
|
|
||||||
help="Disable the recursive traversal of paths to patch.")
|
|
||||||
parser.add_argument(
|
|
||||||
"--paths", nargs="*", type=Path,
|
|
||||||
help="Paths whose content needs to be patched."
|
|
||||||
" Single files and directories are accepted."
|
|
||||||
" Directories are traversed recursively by default.")
|
|
||||||
parser.add_argument(
|
|
||||||
"--libs", nargs="*", type=Path,
|
|
||||||
help="Paths where libraries are searched for."
|
|
||||||
" Single files and directories are accepted."
|
|
||||||
" Directories are not searched recursively.")
|
|
||||||
parser.add_argument(
|
|
||||||
"--runtime-dependencies", nargs="*", type=Path,
|
|
||||||
help="Paths to prepend to the runtime path of executable binaries."
|
|
||||||
" Subject to deduplication, which may imply some reordering.")
|
|
||||||
parser.add_argument(
|
|
||||||
"--append-rpaths",
|
|
||||||
nargs="*",
|
|
||||||
type=Path,
|
|
||||||
help="Paths to append to all runtime paths unconditionally",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--keep-libc",
|
|
||||||
dest="keep_libc",
|
|
||||||
action="store_true",
|
|
||||||
help="Attempt to search for and relink libc dependencies.",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--extra-args",
|
|
||||||
# Undocumented Python argparse feature: consume all remaining arguments
|
|
||||||
# as values for this one. This means this argument should always be passed
|
|
||||||
# last.
|
|
||||||
nargs="...",
|
|
||||||
type=str,
|
|
||||||
help="Extra arguments to pass to patchelf. This argument should always come last."
|
|
||||||
)
|
|
||||||
|
|
||||||
print("automatically fixing dependencies for ELF files")
|
|
||||||
args = parser.parse_args()
|
|
||||||
pprint.pprint(vars(args))
|
|
||||||
|
|
||||||
auto_patchelf(
|
|
||||||
args.paths,
|
|
||||||
args.libs,
|
|
||||||
args.runtime_dependencies,
|
|
||||||
args.recursive,
|
|
||||||
args.ignore_missing,
|
|
||||||
append_rpaths=args.append_rpaths,
|
|
||||||
keep_libc=args.keep_libc,
|
|
||||||
extra_args=args.extra_args)
|
|
||||||
|
|
||||||
|
|
||||||
interpreter_path: Path = None # type: ignore
|
|
||||||
interpreter_osabi: str = None # type: ignore
|
|
||||||
interpreter_arch: str = None # type: ignore
|
|
||||||
libc_lib: Path = None # type: ignore
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
nix_support = Path(os.environ['NIX_BINTOOLS']) / 'nix-support'
|
|
||||||
interpreter_path = Path((nix_support / 'dynamic-linker').read_text().strip())
|
|
||||||
libc_lib = Path((nix_support / 'orig-libc').read_text().strip()) / 'lib'
|
|
||||||
|
|
||||||
with open_elf(interpreter_path) as interpreter:
|
|
||||||
interpreter_osabi = get_osabi(interpreter)
|
|
||||||
interpreter_arch = get_arch(interpreter)
|
|
||||||
|
|
||||||
if interpreter_arch and interpreter_osabi and interpreter_path and libc_lib:
|
|
||||||
main()
|
|
||||||
else:
|
|
||||||
sys.exit("Failed to parse dynamic linker (ld) properties.")
|
|
|
@ -1,20 +0,0 @@
|
||||||
# This setup hook calls patchelf to automatically remove unneeded
|
|
||||||
# directories from the RPATH of every library or executable in every
|
|
||||||
# output.
|
|
||||||
|
|
||||||
fixupOutputHooks+=('if [ -z "${dontPatchELF-}" ]; then patchELF "$prefix"; fi')
|
|
||||||
|
|
||||||
patchELF() {
|
|
||||||
local dir="$1"
|
|
||||||
[ -e "$dir" ] || return 0
|
|
||||||
|
|
||||||
echo "shrinking RPATHs of ELF executables and libraries in $dir"
|
|
||||||
|
|
||||||
local i
|
|
||||||
while IFS= read -r -d $'\0' i; do
|
|
||||||
if [[ "$i" =~ .build-id ]]; then continue; fi
|
|
||||||
if ! isELF "$i"; then continue; fi
|
|
||||||
echo "shrinking $i"
|
|
||||||
patchelf --shrink-rpath "$i" || true
|
|
||||||
done < <(find "$dir" -type f -print0)
|
|
||||||
}
|
|
|
@ -1,15 +0,0 @@
|
||||||
{ callPackage, fetchurl, fetchpatch, ... } @ args:
|
|
||||||
|
|
||||||
callPackage ./generic.nix (args // rec {
|
|
||||||
version = "1.74.0";
|
|
||||||
|
|
||||||
src = fetchurl {
|
|
||||||
urls = [
|
|
||||||
"mirror://sourceforge/boost/boost_${builtins.replaceStrings ["."] ["_"] version}.tar.bz2"
|
|
||||||
"https://boostorg.jfrog.io/artifactory/main/release/${version}/source/boost_${builtins.replaceStrings ["."] ["_"] version}.tar.bz2"
|
|
||||||
];
|
|
||||||
# SHA256 from http://www.boost.org/users/history/version_1_74_0.html
|
|
||||||
sha256 = "83bfc1507731a0906e387fc28b7ef5417d591429e51e788417fe9ff025e116b1";
|
|
||||||
};
|
|
||||||
})
|
|
||||||
|
|
|
@ -1,22 +0,0 @@
|
||||||
diff --git a/tools/boost_install/boost-install.jam b/tools/boost_install/boost-install.jam
|
|
||||||
index 4238f921e..8fc1ea269 100644
|
|
||||||
--- a/tools/boost_install/boost-install.jam
|
|
||||||
+++ b/tools/boost_install/boost-install.jam
|
|
||||||
@@ -649,7 +649,7 @@ rule generate-cmake-config- ( target : sources * : properties * )
|
|
||||||
""
|
|
||||||
"# Compute the include and library directories relative to this file."
|
|
||||||
""
|
|
||||||
- "get_filename_component(_BOOST_CMAKEDIR \"${CMAKE_CURRENT_LIST_DIR}/../\" REALPATH)"
|
|
||||||
+ "get_filename_component(_BOOST_REAL_CMAKEDIR \"${CMAKE_CURRENT_LIST_DIR}/../\" REALPATH)"
|
|
||||||
: true ;
|
|
||||||
|
|
||||||
if [ path.is-rooted $(cmakedir) ]
|
|
||||||
@@ -668,6 +668,8 @@ rule generate-cmake-config- ( target : sources * : properties * )
|
|
||||||
" unset(_BOOST_CMAKEDIR_ORIGINAL)"
|
|
||||||
"endif()"
|
|
||||||
""
|
|
||||||
+ "# Assume that the installer actually did know where the libs were to be installed"
|
|
||||||
+ "get_filename_component(_BOOST_CMAKEDIR \"$(cmakedir-native)\" REALPATH)"
|
|
||||||
: true ;
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,21 +0,0 @@
|
||||||
diff --git a/tools/boost_install/boost-install.jam b/tools/boost_install/boost-install.jam
|
|
||||||
index ad19f7b55..ec6bf57ff 100644
|
|
||||||
--- a/tools/boost_install/boost-install.jam
|
|
||||||
+++ b/tools/boost_install/boost-install.jam
|
|
||||||
@@ -587,6 +587,7 @@ rule generate-cmake-config- ( target : sources * : properties * )
|
|
||||||
"# Compute the include and library directories relative to this file."
|
|
||||||
""
|
|
||||||
"get_filename_component(_BOOST_CMAKEDIR \"${CMAKE_CURRENT_LIST_DIR}/../\" ABSOLUTE)"
|
|
||||||
+ "get_filename_component(_BOOST_REAL_CMAKEDIR \"${CMAKE_CURRENT_LIST_DIR}/../\" ABSOLUTE)"
|
|
||||||
: true ;
|
|
||||||
|
|
||||||
if [ path.is-rooted $(cmakedir) ]
|
|
||||||
@@ -607,6 +608,8 @@ rule generate-cmake-config- ( target : sources * : properties * )
|
|
||||||
" unset(_BOOST_CMAKEDIR_ORIGINAL)"
|
|
||||||
"endif()"
|
|
||||||
""
|
|
||||||
+ "# Assume that the installer actually did know where the libs were to be installed"
|
|
||||||
+ "get_filename_component(_BOOST_CMAKEDIR \"$(cmakedir-native)\" REALPATH)"
|
|
||||||
: true ;
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,45 +0,0 @@
|
||||||
diff --git a/tools/build/src/tools/python.jam b/tools/build/src/tools/python.jam
|
|
||||||
index 273b28a..2d2031e 100644
|
|
||||||
--- a/tools/build/src/tools/python.jam
|
|
||||||
+++ b/tools/build/src/tools/python.jam
|
|
||||||
@@ -428,13 +428,7 @@ local rule windows-installed-pythons ( version ? )
|
|
||||||
|
|
||||||
local rule darwin-installed-pythons ( version ? )
|
|
||||||
{
|
|
||||||
- version ?= $(.version-countdown) ;
|
|
||||||
-
|
|
||||||
- local prefix
|
|
||||||
- = [ GLOB /System/Library/Frameworks /Library/Frameworks
|
|
||||||
- : Python.framework ] ;
|
|
||||||
-
|
|
||||||
- return $(prefix)/Versions/$(version)/bin/python ;
|
|
||||||
+ return ;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@@ -890,25 +884,6 @@ local rule configure ( version ? : cmd-or-prefix ? : includes * : libraries ? :
|
|
||||||
|
|
||||||
# See if we can find a framework directory on darwin.
|
|
||||||
local framework-directory ;
|
|
||||||
- if $(target-os) = darwin
|
|
||||||
- {
|
|
||||||
- # Search upward for the framework directory.
|
|
||||||
- local framework-directory = $(libraries[-1]) ;
|
|
||||||
- while $(framework-directory:D=) && $(framework-directory:D=) != Python.framework
|
|
||||||
- {
|
|
||||||
- framework-directory = $(framework-directory:D) ;
|
|
||||||
- }
|
|
||||||
-
|
|
||||||
- if $(framework-directory:D=) = Python.framework
|
|
||||||
- {
|
|
||||||
- debug-message framework directory is \"$(framework-directory)\" ;
|
|
||||||
- }
|
|
||||||
- else
|
|
||||||
- {
|
|
||||||
- debug-message "no framework directory found; using library path" ;
|
|
||||||
- framework-directory = ;
|
|
||||||
- }
|
|
||||||
- }
|
|
||||||
|
|
||||||
local dll-path = $(libraries) ;
|
|
||||||
|
|
|
@ -1,20 +0,0 @@
|
||||||
{ lib
|
|
||||||
, callPackage
|
|
||||||
, boost-build
|
|
||||||
, fetchurl
|
|
||||||
}:
|
|
||||||
|
|
||||||
let
|
|
||||||
makeBoost = file:
|
|
||||||
lib.fix (self:
|
|
||||||
callPackage file {
|
|
||||||
boost-build = boost-build.override {
|
|
||||||
# useBoost allows us passing in src and version from
|
|
||||||
# the derivation we are building to get a matching b2 version.
|
|
||||||
useBoost = self;
|
|
||||||
};
|
|
||||||
}
|
|
||||||
);
|
|
||||||
in {
|
|
||||||
boost174 = makeBoost ./1.74.nix;
|
|
||||||
}
|
|
|
@ -1,249 +0,0 @@
|
||||||
{ lib, stdenv, icu, expat, zlib, bzip2, zstd, xz, python ? null, fixDarwinDylibNames, libiconv, libxcrypt
|
|
||||||
, boost-build
|
|
||||||
, fetchpatch
|
|
||||||
, which
|
|
||||||
, toolset ? /**/ if stdenv.cc.isClang then "clang"
|
|
||||||
else if stdenv.cc.isGNU then "gcc"
|
|
||||||
else null
|
|
||||||
, enableRelease ? true
|
|
||||||
, enableDebug ? false
|
|
||||||
, enableSingleThreaded ? false
|
|
||||||
, enableMultiThreaded ? true
|
|
||||||
, enableShared ? !(with stdenv.hostPlatform; isStatic || libc == "msvcrt") # problems for now
|
|
||||||
, enableStatic ? !enableShared
|
|
||||||
, enablePython ? false
|
|
||||||
, enableNumpy ? false
|
|
||||||
, enableIcu ? stdenv.hostPlatform == stdenv.buildPlatform
|
|
||||||
, taggedLayout ? ((enableRelease && enableDebug) || (enableSingleThreaded && enableMultiThreaded) || (enableShared && enableStatic))
|
|
||||||
, patches ? []
|
|
||||||
, boostBuildPatches ? []
|
|
||||||
, useMpi ? false
|
|
||||||
, mpi
|
|
||||||
, extraB2Args ? []
|
|
||||||
|
|
||||||
# Attributes inherit from specific versions
|
|
||||||
, version, src
|
|
||||||
, ...
|
|
||||||
}:
|
|
||||||
|
|
||||||
# We must build at least one type of libraries
|
|
||||||
assert enableShared || enableStatic;
|
|
||||||
|
|
||||||
assert enableNumpy -> enablePython;
|
|
||||||
|
|
||||||
# Boost <1.69 can't be built on linux with clang >8, because pth was removed
|
|
||||||
assert with lib; (stdenv.isLinux && toolset == "clang" && versionAtLeast stdenv.cc.version "8.0.0") -> versionAtLeast version "1.69";
|
|
||||||
|
|
||||||
let
|
|
||||||
|
|
||||||
variant = lib.concatStringsSep ","
|
|
||||||
(lib.optional enableRelease "release" ++
|
|
||||||
lib.optional enableDebug "debug");
|
|
||||||
|
|
||||||
threading = lib.concatStringsSep ","
|
|
||||||
(lib.optional enableSingleThreaded "single" ++
|
|
||||||
lib.optional enableMultiThreaded "multi");
|
|
||||||
|
|
||||||
link = lib.concatStringsSep ","
|
|
||||||
(lib.optional enableShared "shared" ++
|
|
||||||
lib.optional enableStatic "static");
|
|
||||||
|
|
||||||
runtime-link = if enableShared then "shared" else "static";
|
|
||||||
|
|
||||||
# To avoid library name collisions
|
|
||||||
layout = if taggedLayout then "tagged" else "system";
|
|
||||||
|
|
||||||
needUserConfig = stdenv.hostPlatform != stdenv.buildPlatform || useMpi || (stdenv.isDarwin && enableShared);
|
|
||||||
|
|
||||||
b2Args = lib.concatStringsSep " " ([
|
|
||||||
"--includedir=$dev/include"
|
|
||||||
"--libdir=$out/lib"
|
|
||||||
"-j$NIX_BUILD_CORES"
|
|
||||||
"--layout=${layout}"
|
|
||||||
"variant=${variant}"
|
|
||||||
"threading=${threading}"
|
|
||||||
"link=${link}"
|
|
||||||
"-sEXPAT_INCLUDE=${expat.dev}/include"
|
|
||||||
"-sEXPAT_LIBPATH=${expat.out}/lib"
|
|
||||||
|
|
||||||
# TODO: make this unconditional
|
|
||||||
] ++ lib.optionals (stdenv.hostPlatform != stdenv.buildPlatform ||
|
|
||||||
# required on mips; see 61d9f201baeef4c4bb91ad8a8f5f89b747e0dfe4
|
|
||||||
(stdenv.hostPlatform.isMips && lib.versionAtLeast version "1.79")) [
|
|
||||||
"address-model=${toString stdenv.hostPlatform.parsed.cpu.bits}"
|
|
||||||
"architecture=${if stdenv.hostPlatform.isMips64
|
|
||||||
then if lib.versionOlder version "1.78" then "mips1" else "mips"
|
|
||||||
else if stdenv.hostPlatform.parsed.cpu.name == "s390x" then "s390x"
|
|
||||||
else toString stdenv.hostPlatform.parsed.cpu.family}"
|
|
||||||
"binary-format=${toString stdenv.hostPlatform.parsed.kernel.execFormat.name}"
|
|
||||||
"target-os=${toString stdenv.hostPlatform.parsed.kernel.name}"
|
|
||||||
|
|
||||||
# adapted from table in boost manual
|
|
||||||
# https://www.boost.org/doc/libs/1_66_0/libs/context/doc/html/context/architectures.html
|
|
||||||
"abi=${if stdenv.hostPlatform.parsed.cpu.family == "arm" then "aapcs"
|
|
||||||
else if stdenv.hostPlatform.isWindows then "ms"
|
|
||||||
else if stdenv.hostPlatform.isMips32 then "o32"
|
|
||||||
else if stdenv.hostPlatform.isMips64n64 then "n64"
|
|
||||||
else "sysv"}"
|
|
||||||
] ++ lib.optional (link != "static") "runtime-link=${runtime-link}"
|
|
||||||
++ lib.optional (variant == "release") "debug-symbols=off"
|
|
||||||
++ lib.optional (toolset != null) "toolset=${toolset}"
|
|
||||||
++ lib.optional (!enablePython) "--without-python"
|
|
||||||
++ lib.optional needUserConfig "--user-config=user-config.jam"
|
|
||||||
++ lib.optional (stdenv.buildPlatform.isDarwin && stdenv.hostPlatform.isLinux) "pch=off"
|
|
||||||
++ lib.optionals (stdenv.hostPlatform.libc == "msvcrt") [
|
|
||||||
"threadapi=win32"
|
|
||||||
] ++ extraB2Args
|
|
||||||
);
|
|
||||||
|
|
||||||
in
|
|
||||||
|
|
||||||
stdenv.mkDerivation {
|
|
||||||
pname = "boost";
|
|
||||||
|
|
||||||
inherit src version;
|
|
||||||
|
|
||||||
patchFlags = [];
|
|
||||||
|
|
||||||
patches = patches
|
|
||||||
++ lib.optional stdenv.isDarwin ./darwin-no-system-python.patch
|
|
||||||
# Fix boost-context segmentation faults on ppc64 due to ABI violation
|
|
||||||
++ lib.optional (lib.versionOlder version "1.71") (fetchpatch {
|
|
||||||
url = "https://github.com/boostorg/context/commit/2354eca9b776a6739112833f64754108cc0d1dc5.patch";
|
|
||||||
sha256 = "067m4bjpmcanqvg28djax9a10avmdwhlpfx6gn73kbqqq70dnz29";
|
|
||||||
stripLen = 1;
|
|
||||||
extraPrefix = "libs/context/";
|
|
||||||
})
|
|
||||||
++ lib.optional (lib.versionOlder version "1.70") (fetchpatch {
|
|
||||||
# support for Mips64n64 appeared in boost-context 1.70
|
|
||||||
url = "https://github.com/boostorg/context/commit/e3f744a1862164062d579d1972272d67bdaa9c39.patch";
|
|
||||||
sha256 = "sha256-qjQy1b4jDsIRrI+UYtcguhvChrMbGWO0UlEzEJHYzRI=";
|
|
||||||
stripLen = 1;
|
|
||||||
extraPrefix = "libs/context/";
|
|
||||||
})
|
|
||||||
++ lib.optional (lib.versionAtLeast version "1.70" && lib.versionOlder version "1.73") ./cmake-paths.patch
|
|
||||||
++ lib.optional (lib.versionAtLeast version "1.73") ./cmake-paths-173.patch
|
|
||||||
++ lib.optional (version == "1.77.0") (fetchpatch {
|
|
||||||
url = "https://github.com/boostorg/math/commit/7d482f6ebc356e6ec455ccb5f51a23971bf6ce5b.patch";
|
|
||||||
relative = "include";
|
|
||||||
sha256 = "sha256-KlmIbixcds6GyKYt1fx5BxDIrU7msrgDdYo9Va/KJR4=";
|
|
||||||
});
|
|
||||||
|
|
||||||
meta = with lib; {
|
|
||||||
homepage = "http://boost.org/";
|
|
||||||
description = "Collection of C++ libraries";
|
|
||||||
license = licenses.boost;
|
|
||||||
platforms = platforms.unix ++ platforms.windows;
|
|
||||||
badPlatforms = optionals (versionOlder version "1.73") platforms.riscv;
|
|
||||||
maintainers = with maintainers; [ hjones2199 ];
|
|
||||||
|
|
||||||
broken =
|
|
||||||
# boost-context lacks support for the N32 ABI on mips64. The build
|
|
||||||
# will succeed, but packages depending on boost-context will fail with
|
|
||||||
# a very cryptic error message.
|
|
||||||
stdenv.hostPlatform.isMips64n32;
|
|
||||||
};
|
|
||||||
|
|
||||||
passthru = {
|
|
||||||
inherit boostBuildPatches;
|
|
||||||
};
|
|
||||||
|
|
||||||
preConfigure = lib.optionalString useMpi ''
|
|
||||||
cat << EOF >> user-config.jam
|
|
||||||
using mpi : ${mpi}/bin/mpiCC ;
|
|
||||||
EOF
|
|
||||||
''
|
|
||||||
# On darwin we need to add the `$out/lib` to the libraries' rpath explicitly,
|
|
||||||
# otherwise the dynamic linker is unable to resolve the reference to @rpath
|
|
||||||
# when the boost libraries want to load each other at runtime.
|
|
||||||
+ lib.optionalString (stdenv.isDarwin && enableShared) ''
|
|
||||||
cat << EOF >> user-config.jam
|
|
||||||
using clang-darwin : : ${stdenv.cc.targetPrefix}c++
|
|
||||||
: <linkflags>"-rpath $out/lib/"
|
|
||||||
;
|
|
||||||
EOF
|
|
||||||
''
|
|
||||||
# b2 has trouble finding the correct compiler and tools for cross compilation
|
|
||||||
# since it apparently ignores $CC, $AR etc. Thus we need to set everything
|
|
||||||
# in user-config.jam. To keep things simple we just set everything in an
|
|
||||||
# uniform way for clang and gcc (which works thanks to our cc-wrapper).
|
|
||||||
# We pass toolset later which will make b2 invoke everything in the right
|
|
||||||
# way -- the other toolset in user-config.jam will be ignored.
|
|
||||||
+ lib.optionalString (stdenv.hostPlatform != stdenv.buildPlatform) ''
|
|
||||||
cat << EOF >> user-config.jam
|
|
||||||
using gcc : cross : ${stdenv.cc.targetPrefix}c++
|
|
||||||
: <archiver>$AR
|
|
||||||
<ranlib>$RANLIB
|
|
||||||
;
|
|
||||||
|
|
||||||
using clang : cross : ${stdenv.cc.targetPrefix}c++
|
|
||||||
: <archiver>$AR
|
|
||||||
<ranlib>$RANLIB
|
|
||||||
;
|
|
||||||
EOF
|
|
||||||
''
|
|
||||||
# b2 needs to be explicitly told how to find Python when cross-compiling
|
|
||||||
+ lib.optionalString enablePython ''
|
|
||||||
cat << EOF >> user-config.jam
|
|
||||||
using python : : ${python.interpreter}
|
|
||||||
: ${python}/include/python${python.pythonVersion}
|
|
||||||
: ${python}/lib
|
|
||||||
;
|
|
||||||
EOF
|
|
||||||
'';
|
|
||||||
|
|
||||||
NIX_CFLAGS_LINK = lib.optionalString stdenv.isDarwin
|
|
||||||
"-headerpad_max_install_names";
|
|
||||||
|
|
||||||
enableParallelBuilding = true;
|
|
||||||
|
|
||||||
nativeBuildInputs = [ which boost-build ]
|
|
||||||
++ lib.optional stdenv.hostPlatform.isDarwin fixDarwinDylibNames;
|
|
||||||
buildInputs = [ expat zlib bzip2 libiconv ]
|
|
||||||
++ lib.optional (lib.versionAtLeast version "1.69") zstd
|
|
||||||
++ [ xz ]
|
|
||||||
++ lib.optional enableIcu icu
|
|
||||||
++ lib.optionals enablePython [ libxcrypt python ]
|
|
||||||
++ lib.optional enableNumpy python.pkgs.numpy;
|
|
||||||
|
|
||||||
configureScript = "./bootstrap.sh";
|
|
||||||
configurePlatforms = [];
|
|
||||||
dontDisableStatic = true;
|
|
||||||
dontAddStaticConfigureFlags = true;
|
|
||||||
configureFlags = [
|
|
||||||
"--includedir=$(dev)/include"
|
|
||||||
"--libdir=$(out)/lib"
|
|
||||||
"--with-bjam=b2" # prevent bootstrapping b2 in configurePhase
|
|
||||||
] ++ lib.optional (toolset != null) "--with-toolset=${toolset}"
|
|
||||||
++ [ (if enableIcu then "--with-icu=${icu.dev}" else "--without-icu") ];
|
|
||||||
|
|
||||||
buildPhase = ''
|
|
||||||
runHook preBuild
|
|
||||||
b2 ${b2Args}
|
|
||||||
runHook postBuild
|
|
||||||
'';
|
|
||||||
|
|
||||||
installPhase = ''
|
|
||||||
runHook preInstall
|
|
||||||
|
|
||||||
# boostbook is needed by some applications
|
|
||||||
mkdir -p $dev/share/boostbook
|
|
||||||
cp -a tools/boostbook/{xsl,dtd} $dev/share/boostbook/
|
|
||||||
|
|
||||||
# Let boost install everything else
|
|
||||||
b2 ${b2Args} install
|
|
||||||
|
|
||||||
runHook postInstall
|
|
||||||
'';
|
|
||||||
|
|
||||||
postFixup = ''
|
|
||||||
# Make boost header paths relative so that they are not runtime dependencies
|
|
||||||
cd "$dev" && find include \( -name '*.hpp' -or -name '*.h' -or -name '*.ipp' \) \
|
|
||||||
-exec sed '1s/^\xef\xbb\xbf//;1i#line 1 "{}"' -i '{}' \;
|
|
||||||
'' + lib.optionalString (stdenv.hostPlatform.libc == "msvcrt") ''
|
|
||||||
$RANLIB "$out/lib/"*.a
|
|
||||||
'';
|
|
||||||
|
|
||||||
outputs = [ "out" "dev" ];
|
|
||||||
setOutputFlags = false;
|
|
||||||
}
|
|
|
@ -1,15 +0,0 @@
|
||||||
Taken from https://github.com/conan-io/conan-center-index/pull/361/files
|
|
||||||
|
|
||||||
diff --git a/include/boost/thread/pthread/thread_data.hpp b/include/boost/thread/pthread/thread_data.hpp
|
|
||||||
index aefbeb4..bc9b136 100644
|
|
||||||
--- a/boost/thread/pthread/thread_data.hpp
|
|
||||||
+++ b/boost/thread/pthread/thread_data.hpp
|
|
||||||
@@ -57,7 +57,7 @@ namespace boost
|
|
||||||
#else
|
|
||||||
std::size_t page_size = ::sysconf( _SC_PAGESIZE);
|
|
||||||
#endif
|
|
||||||
-#if PTHREAD_STACK_MIN > 0
|
|
||||||
+#ifdef PTHREAD_STACK_MIN
|
|
||||||
if (size<PTHREAD_STACK_MIN) size=PTHREAD_STACK_MIN;
|
|
||||||
#endif
|
|
||||||
size = ((size+page_size-1)/page_size)*page_size;
|
|
|
@ -1,24 +0,0 @@
|
||||||
{ lib, stdenv, fetchFromGitHub, cmake }:
|
|
||||||
|
|
||||||
stdenv.mkDerivation rec {
|
|
||||||
pname = "libyaml-cpp";
|
|
||||||
version = "0.6.2";
|
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
|
||||||
owner = "jbeder";
|
|
||||||
repo = "yaml-cpp";
|
|
||||||
rev = "yaml-cpp-${version}";
|
|
||||||
sha256 = "16lclpa487yghf9019wymj419wkyx4795wv9q7539hhimajw9kpb";
|
|
||||||
};
|
|
||||||
|
|
||||||
# implement https://github.com/jbeder/yaml-cpp/commit/52a1378e48e15d42a0b755af7146394c6eff998c
|
|
||||||
postPatch = ''
|
|
||||||
substituteInPlace CMakeLists.txt \
|
|
||||||
--replace 'option(YAML_BUILD_SHARED_LIBS "Build Shared Libraries" OFF)' \
|
|
||||||
'option(YAML_BUILD_SHARED_LIBS "Build yaml-cpp shared library" ''${BUILD_SHARED_LIBS})'
|
|
||||||
'';
|
|
||||||
|
|
||||||
nativeBuildInputs = [ cmake ];
|
|
||||||
|
|
||||||
cmakeFlags = [ "-DBUILD_SHARED_LIBS=ON" "-DYAML_CPP_BUILD_TESTS=OFF" ];
|
|
||||||
}
|
|
|
@ -1,43 +0,0 @@
|
||||||
# The program `nuke-refs' created by this derivation replaces all
|
|
||||||
# references to the Nix store in the specified files by a non-existant
|
|
||||||
# path (/nix/store/eeee...). This is useful for getting rid of
|
|
||||||
# dependencies that you know are not actually needed at runtime.
|
|
||||||
|
|
||||||
{ lib, stdenvNoCC, perl, signingUtils, shell ? stdenvNoCC.shell }:
|
|
||||||
|
|
||||||
let
|
|
||||||
stdenv = stdenvNoCC;
|
|
||||||
|
|
||||||
darwinCodeSign = stdenv.targetPlatform.isDarwin && stdenv.targetPlatform.isAarch64;
|
|
||||||
in
|
|
||||||
|
|
||||||
stdenvNoCC.mkDerivation {
|
|
||||||
name = "nuke-references";
|
|
||||||
|
|
||||||
strictDeps = true;
|
|
||||||
enableParallelBuilding = true;
|
|
||||||
dontUnpack = true;
|
|
||||||
dontConfigure = true;
|
|
||||||
dontBuild = true;
|
|
||||||
|
|
||||||
installPhase = ''
|
|
||||||
mkdir -p $out/bin
|
|
||||||
substituteAll ${./nuke-refs.sh} $out/bin/nuke-refs
|
|
||||||
chmod a+x $out/bin/nuke-refs
|
|
||||||
'';
|
|
||||||
|
|
||||||
postFixup = lib.optionalString darwinCodeSign ''
|
|
||||||
mkdir -p $out/nix-support
|
|
||||||
substituteAll ${./darwin-sign-fixup.sh} $out/nix-support/setup-hooks.sh
|
|
||||||
'';
|
|
||||||
|
|
||||||
# FIXME: get rid of perl dependency.
|
|
||||||
env = {
|
|
||||||
inherit perl;
|
|
||||||
inherit (builtins) storeDir;
|
|
||||||
shell = lib.getBin shell + (shell.shellPath or "");
|
|
||||||
signingUtils = lib.optionalString darwinCodeSign signingUtils;
|
|
||||||
};
|
|
||||||
|
|
||||||
meta.mainProgram = "nuke-refs";
|
|
||||||
}
|
|
|
@ -1,34 +0,0 @@
|
||||||
#! @shell@
|
|
||||||
|
|
||||||
fixupHooks=()
|
|
||||||
|
|
||||||
if [ -e @out@/nix-support/setup-hooks.sh ]; then
|
|
||||||
source @out@/nix-support/setup-hooks.sh
|
|
||||||
fi
|
|
||||||
|
|
||||||
excludes=""
|
|
||||||
while getopts e: o; do
|
|
||||||
case "$o" in
|
|
||||||
e) storeId=$(echo "$OPTARG" | @perl@/bin/perl -ne "print \"\$1\" if m|^\Q@storeDir@\E/([a-z0-9]{32})-.*|")
|
|
||||||
if [ -z "$storeId" ]; then
|
|
||||||
echo "-e argument must be a Nix store path"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
excludes="$excludes(?!$storeId)"
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
done
|
|
||||||
shift $(($OPTIND-1))
|
|
||||||
|
|
||||||
for i in "$@"; do
|
|
||||||
if test ! -L "$i" -a -f "$i"; then
|
|
||||||
cat "$i" | @perl@/bin/perl -pe "s|\Q@storeDir@\E/$excludes[a-z0-9]{32}-|@storeDir@/eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee-|g" > "$i.tmp"
|
|
||||||
if test -x "$i"; then chmod +x "$i.tmp"; fi
|
|
||||||
cmp -s "$i" "$i.tmp" || echo "Changed file: \"$i\""
|
|
||||||
mv "$i.tmp" "$i"
|
|
||||||
|
|
||||||
for hook in "${fixupHooks[@]}"; do
|
|
||||||
eval "$hook" "$i"
|
|
||||||
done
|
|
||||||
fi
|
|
||||||
done
|
|
|
@ -1,24 +0,0 @@
|
||||||
# Setup hook for pip.
|
|
||||||
echo "Sourcing pip-install-hook"
|
|
||||||
|
|
||||||
declare -a pipInstallFlags
|
|
||||||
|
|
||||||
pipInstallPhase() {
|
|
||||||
echo "Executing pipInstallPhase"
|
|
||||||
runHook preInstall
|
|
||||||
|
|
||||||
mkdir -p "$out/@pythonSitePackages@"
|
|
||||||
export PYTHONPATH="$out/@pythonSitePackages@:$PYTHONPATH"
|
|
||||||
|
|
||||||
pushd dist || return 1
|
|
||||||
@pythonInterpreter@ -m pip install ./*.whl --no-warn-script-location --prefix="$out" --no-cache $pipInstallFlags
|
|
||||||
popd || return 1
|
|
||||||
|
|
||||||
runHook postInstall
|
|
||||||
echo "Finished executing pipInstallPhase"
|
|
||||||
}
|
|
||||||
|
|
||||||
if [ -z "${dontUsePipInstall-}" ] && [ -z "${installPhase-}" ]; then
|
|
||||||
echo "Using pipInstallPhase"
|
|
||||||
installPhase=pipInstallPhase
|
|
||||||
fi
|
|
|
@ -1,58 +0,0 @@
|
||||||
{
|
|
||||||
lib,
|
|
||||||
python3Packages,
|
|
||||||
fetchFromGitHub,
|
|
||||||
git,
|
|
||||||
cmake,
|
|
||||||
}:
|
|
||||||
|
|
||||||
python3Packages.buildPythonApplication rec {
|
|
||||||
pname = "tt-buda";
|
|
||||||
version = "0.19.3";
|
|
||||||
pyproject = true;
|
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
|
||||||
owner = "tenstorrent";
|
|
||||||
repo = "tt-buda";
|
|
||||||
rev = "v${version}";
|
|
||||||
#hash = "sha256-g5eB2roVh4t4fhM+t2QYm+3NXYM94hbwstWES2sL6hA=";
|
|
||||||
hash = "sha256-a+yamtu93AypLAXa9cj3yQ1AcizEBbmDd8fa2RNjGcQ=";
|
|
||||||
fetchSubmodules = true;
|
|
||||||
leaveDotGit = true;
|
|
||||||
};
|
|
||||||
|
|
||||||
build-system = [
|
|
||||||
python3Packages.setuptools
|
|
||||||
python3Packages.wheel
|
|
||||||
];
|
|
||||||
|
|
||||||
nativeBuildInputs = [
|
|
||||||
git
|
|
||||||
cmake
|
|
||||||
] ++ python3Packages.pybind11.propagatedNativeBuildInputs;
|
|
||||||
|
|
||||||
buildInputs = [
|
|
||||||
python3Packages.python # pybind python.h
|
|
||||||
];
|
|
||||||
|
|
||||||
postPatch = ''
|
|
||||||
substituteInPlace compile_flags.txt third_party/budabackend/compile_flags.txt \
|
|
||||||
--replace-fail "-I/usr/include/python3.8" "-I/usr/include/python3.8"
|
|
||||||
'';
|
|
||||||
|
|
||||||
dontUseCmakeConfigure = true;
|
|
||||||
|
|
||||||
pythonImportsCheck = [
|
|
||||||
"tt_buda"
|
|
||||||
];
|
|
||||||
|
|
||||||
BACKEND_ARCH_NAME = "wormhole_b0";
|
|
||||||
ARCH_NAME = "wormhole_b0";
|
|
||||||
|
|
||||||
meta = {
|
|
||||||
description = "Tenstorrent TT-BUDA Repository";
|
|
||||||
homepage = "https://github.com/tenstorrent/tt-buda";
|
|
||||||
license = lib.licenses.asl20;
|
|
||||||
mainProgram = "tt-buda";
|
|
||||||
};
|
|
||||||
}
|
|
|
@ -8,13 +8,13 @@ with pkgs.python3Packages;
|
||||||
|
|
||||||
buildPythonPackage rec {
|
buildPythonPackage rec {
|
||||||
pname = "tt-flash";
|
pname = "tt-flash";
|
||||||
version = "unstable-2024-09-27";
|
version = "3.1.1";
|
||||||
|
|
||||||
src = pkgs.fetchFromGitHub {
|
src = pkgs.fetchFromGitHub {
|
||||||
owner = "tenstorrent";
|
owner = "tenstorrent";
|
||||||
repo = "tt-flash";
|
repo = "tt-flash";
|
||||||
rev = "4002fee1da7edfcbf09093ba23612caeca071f23";
|
rev = "refs/tags/v${version}";
|
||||||
hash = "sha256-O6b/vS/zCjp/mrNzFEylWs0jtwdHY65nwkvn5GFridI=";
|
hash = "sha256-t2B1XEOKBKxE2eQiS7pc+EemBWomMgocyk4oRDt0Q78=";
|
||||||
};
|
};
|
||||||
|
|
||||||
nativeBuildInputs = [ pythonRelaxDepsHook ];
|
nativeBuildInputs = [ pythonRelaxDepsHook ];
|
||||||
|
|
|
@ -4,10 +4,10 @@ let
|
||||||
src = pkgs.fetchFromGitHub {
|
src = pkgs.fetchFromGitHub {
|
||||||
owner = "tenstorrent";
|
owner = "tenstorrent";
|
||||||
repo = "tt-kmd";
|
repo = "tt-kmd";
|
||||||
rev = "715a5d76e5dbb6d8972d4aa92e8cbe3434986b9f";
|
rev = "refs/tags/ttkmd-${version}";
|
||||||
hash = "sha256-OCnGhvIDIqkQJXlIpOVnP0O9cA9J7/bz1JPAOpeDNYQ=";
|
hash = "sha256-TTd+SXUQ/RwsZB7YIc0QsE9zHBCYO3NRrCub7/K1rP4=";
|
||||||
};
|
};
|
||||||
version = "unstable-2024-09-06";
|
version = "1.31";
|
||||||
in
|
in
|
||||||
{
|
{
|
||||||
kmd = pkgs.stdenv.mkDerivation {
|
kmd = pkgs.stdenv.mkDerivation {
|
||||||
|
@ -41,6 +41,9 @@ in
|
||||||
test = pkgs.stdenv.mkDerivation {
|
test = pkgs.stdenv.mkDerivation {
|
||||||
pname = "tt-kmd-test";
|
pname = "tt-kmd-test";
|
||||||
|
|
||||||
|
# https://github.com/tenstorrent/tt-kmd/pull/37
|
||||||
|
patches = ./limits.patch;
|
||||||
|
|
||||||
inherit src version;
|
inherit src version;
|
||||||
|
|
||||||
nativeBuildInputs = [ pkgs.gnumake ];
|
nativeBuildInputs = [ pkgs.gnumake ];
|
||||||
|
|
12
pkgs/kmd/limits.patch
Normal file
12
pkgs/kmd/limits.patch
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
diff --git a/test/query_mappings.cpp b/test/query_mappings.cpp
|
||||||
|
index 0721485..7fe7ae4 100644
|
||||||
|
--- a/test/query_mappings.cpp
|
||||||
|
+++ b/test/query_mappings.cpp
|
||||||
|
@@ -16,6 +16,7 @@
|
||||||
|
#include <set>
|
||||||
|
#include <string>
|
||||||
|
#include <cstddef>
|
||||||
|
+#include <limits>
|
||||||
|
#include <cstdint>
|
||||||
|
|
||||||
|
#include <sys/ioctl.h>
|
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
|
@ -1,16 +1,26 @@
|
||||||
{ pkgs }:
|
{ pkgs }:
|
||||||
|
|
||||||
|
let
|
||||||
|
|
||||||
|
# Upstream does not vendor a lock file so one has to created manually
|
||||||
|
# `cargo generate-lockfile`
|
||||||
|
# Use verson needed by tt-smi
|
||||||
|
# https://github.com/tenstorrent/tt-smi/blob/main/pyproject.toml#L30
|
||||||
|
version = "0.4.9";
|
||||||
|
|
||||||
|
src = pkgs.fetchFromGitHub {
|
||||||
|
owner = "tenstorrent";
|
||||||
|
repo = "luwen";
|
||||||
|
rev = "refs/tags/v${version}";
|
||||||
|
sha256 = "sha256-K68PjccE2fBkU4RvKv8X6jKRPYqsVhKB6jU92aajLgo=";
|
||||||
|
};
|
||||||
|
|
||||||
|
in
|
||||||
|
|
||||||
{
|
{
|
||||||
luwen = pkgs.rustPlatform.buildRustPackage rec {
|
luwen = pkgs.rustPlatform.buildRustPackage rec {
|
||||||
pname = "luwen";
|
pname = "luwen";
|
||||||
version = "unstable-2024-09-13";
|
inherit version src;
|
||||||
|
|
||||||
src = pkgs.fetchFromGitHub {
|
|
||||||
owner = "tenstorrent";
|
|
||||||
repo = "luwen";
|
|
||||||
rev = "e4e10e95928f4b73d31ac4f41ea08cd6e3ef5573";
|
|
||||||
sha256 = "sha256-cScaqWAyjDuvy9M2EccMfUHfDq23IWniaKeq+upHzOg=";
|
|
||||||
};
|
|
||||||
|
|
||||||
postPatch = ''
|
postPatch = ''
|
||||||
ln -s ${./Cargo_0_2.lock} Cargo.lock
|
ln -s ${./Cargo_0_2.lock} Cargo.lock
|
||||||
|
@ -22,14 +32,7 @@
|
||||||
|
|
||||||
pyluwen = pkgs.python3.pkgs.buildPythonPackage rec {
|
pyluwen = pkgs.python3.pkgs.buildPythonPackage rec {
|
||||||
pname = "pyluwen";
|
pname = "pyluwen";
|
||||||
version = "unstable-2024-09-13";
|
inherit version src;
|
||||||
|
|
||||||
src = pkgs.fetchFromGitHub {
|
|
||||||
owner = "tenstorrent";
|
|
||||||
repo = "luwen";
|
|
||||||
rev = "e4e10e95928f4b73d31ac4f41ea08cd6e3ef5573";
|
|
||||||
sha256 = "sha256-cScaqWAyjDuvy9M2EccMfUHfDq23IWniaKeq+upHzOg=";
|
|
||||||
};
|
|
||||||
|
|
||||||
postPatch = ''
|
postPatch = ''
|
||||||
ln -s ${./Cargo_0_2.lock} Cargo.lock
|
ln -s ${./Cargo_0_2.lock} Cargo.lock
|
||||||
|
@ -52,39 +55,4 @@
|
||||||
"pyluwen"
|
"pyluwen"
|
||||||
];
|
];
|
||||||
};
|
};
|
||||||
|
|
||||||
pyluwen_0_1 = pkgs.python3.pkgs.buildPythonPackage rec {
|
|
||||||
pname = "pyluwen";
|
|
||||||
version = "v0.1.0";
|
|
||||||
|
|
||||||
src = pkgs.fetchFromGitHub {
|
|
||||||
owner = "tenstorrent";
|
|
||||||
repo = "luwen";
|
|
||||||
rev = "${version}";
|
|
||||||
sha256 = "sha256-MyOzm3dfEkL7MsVzV51DaO+Op3+QhUzsYCTDsvYsvpk=";
|
|
||||||
};
|
|
||||||
|
|
||||||
postPatch = ''
|
|
||||||
ln -s ${./Cargo_0_1.lock} Cargo.lock
|
|
||||||
'';
|
|
||||||
|
|
||||||
buildAndTestSubdir = "crates/pyluwen";
|
|
||||||
|
|
||||||
format = "pyproject";
|
|
||||||
|
|
||||||
cargoDeps = pkgs.rustPlatform.fetchCargoTarball {
|
|
||||||
inherit src postPatch;
|
|
||||||
name = "${pname}-${version}";
|
|
||||||
hash = "sha256-ZXcj/pzQ/tAROdmi2w+AWYBvLSEZFayizxw+BmNDj70=";
|
|
||||||
};
|
|
||||||
|
|
||||||
nativeBuildInputs = [
|
|
||||||
pkgs.rustPlatform.cargoSetupHook
|
|
||||||
pkgs.rustPlatform.maturinBuildHook
|
|
||||||
];
|
|
||||||
|
|
||||||
pythonImportsCheck = [
|
|
||||||
"pyluwen"
|
|
||||||
];
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -12,21 +12,23 @@
|
||||||
sfpi,
|
sfpi,
|
||||||
hwloc,
|
hwloc,
|
||||||
libz,
|
libz,
|
||||||
|
runCommand,
|
||||||
|
libexecinfo,
|
||||||
|
callPackage,
|
||||||
}:
|
}:
|
||||||
|
|
||||||
let
|
let
|
||||||
llvmPackages = llvmPackages_17;
|
llvmPackages = llvmPackages_17;
|
||||||
depsDir = "deps";
|
depsDir = "deps";
|
||||||
|
|
||||||
version = "unstable-2024-10-04";
|
version = "0.53.0";
|
||||||
|
|
||||||
metal-deps = metal.overrideAttrs (previousAttrs: {
|
metal-deps = metal.overrideAttrs (previousAttrs: {
|
||||||
name = "tt-metal-deps-${version}.tar.gz";
|
name = "tt-metal-deps-${version}.tar.gz";
|
||||||
|
|
||||||
dontBuild = true;
|
dontBuild = true;
|
||||||
|
|
||||||
outputHash = "sha256-UOBBqIP2KKEn2pfv7l5v2Of9RoZY0+3TCEu94MQUVYo=";
|
outputHash = "sha256-hhLjEssXID+uiPQ3kexMCOVB6DB9m/eAVmfr2OleGXc=";
|
||||||
outputHashAlgo = "sha256";
|
outputHashAlgo = "sha256";
|
||||||
|
|
||||||
cmakeFlags = [
|
cmakeFlags = [
|
||||||
|
@ -51,7 +53,9 @@ let
|
||||||
runHook postInstall
|
runHook postInstall
|
||||||
'';
|
'';
|
||||||
|
|
||||||
|
postInstall = "";
|
||||||
});
|
});
|
||||||
|
|
||||||
# NOTE: When changing something remember to make sure the outputHash above doesn't change, or if it changes then update it.
|
# NOTE: When changing something remember to make sure the outputHash above doesn't change, or if it changes then update it.
|
||||||
metal = llvmPackages.libcxxStdenv.mkDerivation {
|
metal = llvmPackages.libcxxStdenv.mkDerivation {
|
||||||
pname = "tt-metal";
|
pname = "tt-metal";
|
||||||
|
@ -59,12 +63,16 @@ let
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "tenstorrent";
|
owner = "tenstorrent";
|
||||||
repo = "tt-metal";
|
repo = "tt-metal";
|
||||||
rev = "0fb4249a94a99714de8f91d93d338832694c09e0";
|
rev = "154e6993aed78213446c59731e41c3617d83c1f1";
|
||||||
# this takes a while and we don't need all of them
|
hash = "sha256-edtlE4CVsTO4BW0PKhkN0IxdV666Tu/Y1jgZ2Exljeo=";
|
||||||
fetchSubmodules = true;
|
fetchSubmodules = true;
|
||||||
hash = "sha256-0tcIwaJzM75S7SFKCJ2UbfElwASpFwdySmzt2LUTT4A=";
|
fetchLFS = true;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
patches = [
|
||||||
|
./rpath.patch
|
||||||
|
];
|
||||||
|
|
||||||
env.NIX_CFLAGS_COMPILE = "-Wno-unused-command-line-argument";
|
env.NIX_CFLAGS_COMPILE = "-Wno-unused-command-line-argument";
|
||||||
|
|
||||||
nativeBuildInputs = [
|
nativeBuildInputs = [
|
||||||
|
@ -78,11 +86,9 @@ let
|
||||||
|
|
||||||
buildInputs = [
|
buildInputs = [
|
||||||
numactl
|
numactl
|
||||||
|
|
||||||
# umd
|
# umd
|
||||||
hwloc
|
hwloc
|
||||||
libz
|
libz
|
||||||
|
|
||||||
];
|
];
|
||||||
|
|
||||||
postUnpack = ''
|
postUnpack = ''
|
||||||
|
@ -92,28 +98,56 @@ let
|
||||||
|
|
||||||
postPatch = ''
|
postPatch = ''
|
||||||
cp ${cpm-cmake}/share/cpm/CPM.cmake cmake/CPM.cmake
|
cp ${cpm-cmake}/share/cpm/CPM.cmake cmake/CPM.cmake
|
||||||
rm -rf tt_metal/third_party/sfpi/compiler
|
|
||||||
ln -s ${sfpi.tt-gcc} tt_metal/third_party/sfpi/compiler
|
substituteInPlace tt_metal/CMakeLists.txt ttnn/CMakeLists.txt \
|
||||||
|
--replace-fail "REPLACETHIS\"" "$out/lib\"" \
|
||||||
|
--replace-warn "REPLACETHIS1" "$out/build/lib"
|
||||||
|
|
||||||
|
substituteInPlace tt_metal/hw/CMakeLists.txt \
|
||||||
|
--replace-fail "FetchContent_MakeAvailable(sfpi)" ""
|
||||||
|
mkdir -p runtime
|
||||||
|
ln -s ${sfpi.prebuilt} runtime/sfpi
|
||||||
'';
|
'';
|
||||||
|
|
||||||
|
ARCH_NAME = "wormhole_b0";
|
||||||
|
|
||||||
preConfigure = ''
|
preConfigure = ''
|
||||||
export ARCH_NAME=wormhole_b0
|
|
||||||
export TT_METAL_HOME=$(pwd)
|
export TT_METAL_HOME=$(pwd)
|
||||||
export PYTHONPATH=$(pwd)
|
export PYTHONPATH=$(pwd)
|
||||||
'';
|
'';
|
||||||
|
|
||||||
cmakeFlags = [
|
cmakeFlags = [
|
||||||
"-DCPM_SOURCE_CACHE=${depsDir}"
|
"-DCPM_SOURCE_CACHE=${depsDir}"
|
||||||
|
"-DBUILD_PROGRAMMING_EXAMPLES=TRUE"
|
||||||
];
|
];
|
||||||
|
|
||||||
postInstall = ''
|
postInstall = ''
|
||||||
pwd
|
# Have to do this until cpp-ttnn-project-template is fixed
|
||||||
mkdir -p $out/lib
|
# ttnn-template> ninja: error: '/nix/store/-tt-metal-unstable-2024-10-04/build/lib/_ttnn.so', needed by 'sources/examples/sample_app/sample_app', missing and no known rule to make it
|
||||||
cp lib/{_ttnn.so,libtt_metal.so} $out/lib
|
cp -r ../ $out
|
||||||
|
rm -rf $out/.cpmcache
|
||||||
|
ln -s $out/build/deps $out/.cpmcache
|
||||||
|
|
||||||
|
# Nix checks for references to /build/source so these should be different but not a different size to prevent corruption
|
||||||
|
find "$out" -type f -print0 | while IFS= read -r -d $'\0' f; do
|
||||||
|
sed -i "s|/build/source|/suild/source|g" "$f"
|
||||||
|
sed -i 's|$ORIGIN/build/lib:|$ORIGIN/suild/lib:|g' "$f"
|
||||||
|
done
|
||||||
|
|
||||||
|
# Using tt-metal tries to write to <storepath>/built
|
||||||
|
# Create the built dir so a writable dir can be bind mounted to it
|
||||||
|
mkdir $out/built
|
||||||
|
mkdir $out/generated
|
||||||
'';
|
'';
|
||||||
|
|
||||||
|
dontPatchELF = true;
|
||||||
|
dontStrip = true;
|
||||||
|
|
||||||
passthru = {
|
passthru = {
|
||||||
inherit metal-deps;
|
inherit metal-deps;
|
||||||
|
tests = {
|
||||||
|
template = callPackage ./ttnn-template.nix { inherit metal; };
|
||||||
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
};
|
};
|
||||||
|
|
28
pkgs/metal/rpath.patch
Normal file
28
pkgs/metal/rpath.patch
Normal file
|
@ -0,0 +1,28 @@
|
||||||
|
diff --git a/tt_metal/CMakeLists.txt b/tt_metal/CMakeLists.txt
|
||||||
|
index cba3743fbc..e580f6696c 100644
|
||||||
|
--- a/tt_metal/CMakeLists.txt
|
||||||
|
+++ b/tt_metal/CMakeLists.txt
|
||||||
|
@@ -55,7 +55,7 @@ set_target_properties(
|
||||||
|
tt_metal
|
||||||
|
PROPERTIES
|
||||||
|
INSTALL_RPATH
|
||||||
|
- "${PROJECT_BINARY_DIR}/lib"
|
||||||
|
+ "REPLACETHIS"
|
||||||
|
ADDITIONAL_CLEAN_FILES
|
||||||
|
"${PROJECT_BINARY_DIR}/lib;${PROJECT_BINARY_DIR}/obj"
|
||||||
|
)
|
||||||
|
diff --git a/ttnn/CMakeLists.txt b/ttnn/CMakeLists.txt
|
||||||
|
index 45ee3dd8f8..4401f51b39 100644
|
||||||
|
--- a/ttnn/CMakeLists.txt
|
||||||
|
+++ b/ttnn/CMakeLists.txt
|
||||||
|
@@ -683,8 +683,8 @@ TT_ENABLE_UNITY_BUILD(ttnn)
|
||||||
|
#We move the library binaries to a different path rather than PROJECT_BINARY_DIR
|
||||||
|
#in the Python wheel
|
||||||
|
set(TTNN_INSTALL_RPATH
|
||||||
|
- "${PROJECT_BINARY_DIR}/lib"
|
||||||
|
- "$ORIGIN/build/lib"
|
||||||
|
+ "REPLACETHIS"
|
||||||
|
+ "REPLACETHIS1"
|
||||||
|
)
|
||||||
|
|
||||||
|
#Make sure library built is _ttnn.so and that it can find all it's linked libraries
|
119
pkgs/metal/ttnn-template.nix
Normal file
119
pkgs/metal/ttnn-template.nix
Normal file
|
@ -0,0 +1,119 @@
|
||||||
|
{
|
||||||
|
stdenv,
|
||||||
|
fetchFromGitHub,
|
||||||
|
metal,
|
||||||
|
cmake,
|
||||||
|
ninja,
|
||||||
|
git,
|
||||||
|
cacert,
|
||||||
|
python3,
|
||||||
|
numactl,
|
||||||
|
hwloc,
|
||||||
|
libz,
|
||||||
|
llvmPackages_17,
|
||||||
|
cpm-cmake,
|
||||||
|
sfpi,
|
||||||
|
|
||||||
|
}:
|
||||||
|
|
||||||
|
let
|
||||||
|
version = "0";
|
||||||
|
llvmPackages = llvmPackages_17;
|
||||||
|
depsDir = "deps";
|
||||||
|
|
||||||
|
ttnn-template-deps = ttnn-template.overrideAttrs (previousAttrs: {
|
||||||
|
name = "ttnn-template-deps-deps-${version}.tar.gz";
|
||||||
|
|
||||||
|
dontBuild = true;
|
||||||
|
|
||||||
|
outputHash = "sha256-qt3PLKE3lwqiYQq6m06V1xk1qDOyHAtGj8lw0Q99qgE=";
|
||||||
|
outputHashAlgo = "sha256";
|
||||||
|
|
||||||
|
cmakeFlags = [
|
||||||
|
"-DCPM_DOWNLOAD_ALL=ON"
|
||||||
|
"-DCPM_SOURCE_CACHE=${depsDir}"
|
||||||
|
];
|
||||||
|
|
||||||
|
# Infinite recursion
|
||||||
|
postUnpack = "";
|
||||||
|
|
||||||
|
installPhase = ''
|
||||||
|
runHook preInstall
|
||||||
|
|
||||||
|
# Prune the `.git` directories
|
||||||
|
find ${depsDir} -name .git -type d -prune -exec rm -rf {} \;;
|
||||||
|
# Build a reproducible tar, per instructions at https://reproducible-builds.org/docs/archives/
|
||||||
|
tar --owner=0 --group=0 --numeric-owner --format=gnu \
|
||||||
|
--sort=name --mtime="@$SOURCE_DATE_EPOCH" \
|
||||||
|
-czf $out \
|
||||||
|
${depsDir} \
|
||||||
|
|
||||||
|
runHook postInstall
|
||||||
|
'';
|
||||||
|
|
||||||
|
postInstall = "";
|
||||||
|
});
|
||||||
|
|
||||||
|
ttnn-template = llvmPackages.libcxxStdenv.mkDerivation {
|
||||||
|
pname = "ttnn-template";
|
||||||
|
version = "0";
|
||||||
|
src = fetchFromGitHub {
|
||||||
|
owner = "tenstorrent";
|
||||||
|
repo = "cpp-ttnn-project-template";
|
||||||
|
rev = "702b453aa7000daa56692b7559ec77adf407828d";
|
||||||
|
hash = "sha256-NzcZCVujJCyHQgALAewJjoWj+6bQXncONO7nRK8zSx4=";
|
||||||
|
};
|
||||||
|
|
||||||
|
nativeBuildInputs = [
|
||||||
|
cmake
|
||||||
|
#ninja
|
||||||
|
python3
|
||||||
|
# for cpm
|
||||||
|
git
|
||||||
|
cacert
|
||||||
|
];
|
||||||
|
|
||||||
|
buildInputs = [
|
||||||
|
numactl
|
||||||
|
# umd
|
||||||
|
hwloc
|
||||||
|
libz
|
||||||
|
];
|
||||||
|
|
||||||
|
ARCH_NAME = "wormhole_b0";
|
||||||
|
TT_METAL_HOME = metal;
|
||||||
|
|
||||||
|
postUnpack = ''
|
||||||
|
mkdir -p $sourceRoot/build
|
||||||
|
tar -xf ${ttnn-template-deps} -C $sourceRoot/build
|
||||||
|
'';
|
||||||
|
|
||||||
|
postPatch = ''
|
||||||
|
cp ${cpm-cmake}/share/cpm/CPM.cmake cmake/CPM.cmake
|
||||||
|
|
||||||
|
# Upstream changed these locations and removed libfmt but the template hasn't been updated yet
|
||||||
|
# https://github.com/tenstorrent/tt-metal/pull/13788
|
||||||
|
substituteInPlace sources/sample_lib/CMakeLists.txt \
|
||||||
|
--replace-fail '$ENV{TT_METAL_HOME}/build/lib/_ttnn.so' '${metal}/lib/_ttnn.so' \
|
||||||
|
--replace-fail '$ENV{TT_METAL_HOME}/build/lib/libdevice.so' '${metal}/lib/libdevice.so' \
|
||||||
|
--replace-fail '$ENV{TT_METAL_HOME}/build/lib/libfmt.so' "" \
|
||||||
|
--replace-fail '$ENV{TT_METAL_HOME}/build/lib/libnng.so.1' '${metal}/lib/libnng.so.1.8.0' \
|
||||||
|
|
||||||
|
substituteInPlace sources/sample_lib/CMakeLists.txt \
|
||||||
|
--replace-fail '$ENV{TT_METAL_HOME}/build/lib' '${metal}/lib ${metal}/build/lib'
|
||||||
|
'';
|
||||||
|
|
||||||
|
# No default install target
|
||||||
|
installPhase = ''
|
||||||
|
runHook preInstall
|
||||||
|
pwd
|
||||||
|
install -D sources/examples/sample_app/sample_app $out/bin/sample_app
|
||||||
|
runHook postInstall
|
||||||
|
'';
|
||||||
|
|
||||||
|
cmakeFlags = [
|
||||||
|
"-DCPM_SOURCE_CACHE=${depsDir}"
|
||||||
|
];
|
||||||
|
};
|
||||||
|
in
|
||||||
|
ttnn-template
|
|
@ -27,5 +27,32 @@ rec {
|
||||||
'';
|
'';
|
||||||
};
|
};
|
||||||
|
|
||||||
|
prebuilt = pkgs.stdenv.mkDerivation rec {
|
||||||
|
pname = "tt-gcc";
|
||||||
|
version = "5.0.0";
|
||||||
|
|
||||||
|
src = pkgs.fetchzip {
|
||||||
|
url = "https://github.com/tenstorrent/sfpi/releases/download/v5.0.0/sfpi-release.tgz";
|
||||||
|
hash = "sha256-RBhJ6BWmvB06zWoELTumpzroHDMpNXU0/WC6elgAkW0=";
|
||||||
|
};
|
||||||
|
|
||||||
|
nativeBuildInputs = with pkgs; [
|
||||||
|
autoPatchelfHook
|
||||||
|
];
|
||||||
|
|
||||||
|
buildInputs = with pkgs; [
|
||||||
|
libmpc
|
||||||
|
mpfr
|
||||||
|
gmp
|
||||||
|
zlib
|
||||||
|
expat
|
||||||
|
];
|
||||||
|
|
||||||
|
installPhase = ''
|
||||||
|
cp -r . $out
|
||||||
|
'';
|
||||||
|
|
||||||
|
};
|
||||||
|
|
||||||
tt-gcc = import ./tt-gcc.nix { inherit pkgs; };
|
tt-gcc = import ./tt-gcc.nix { inherit pkgs; };
|
||||||
}
|
}
|
||||||
|
|
|
@ -8,13 +8,13 @@ with pkgs.python3Packages;
|
||||||
|
|
||||||
buildPythonPackage rec {
|
buildPythonPackage rec {
|
||||||
pname = "tt-smi";
|
pname = "tt-smi";
|
||||||
version = "unstable-2024-09-27";
|
version = "3.0.5";
|
||||||
|
|
||||||
src = pkgs.fetchFromGitHub {
|
src = pkgs.fetchFromGitHub {
|
||||||
owner = "tenstorrent";
|
owner = "tenstorrent";
|
||||||
repo = "tt-smi";
|
repo = "tt-smi";
|
||||||
rev = "052f1ce49b94581710744a91939121e01c24b5f2";
|
rev = "refs/tags/v${version}";
|
||||||
hash = "sha256-IA60unZpSWVnMnDjDIC31QtURi9nIr/F7s7PGZilPcw=";
|
hash = "sha256-+Dw6F9aupe4VTWQFiNWGKMDOTmxwCW2bHuDQxWxluUc=";
|
||||||
};
|
};
|
||||||
|
|
||||||
format = "pyproject";
|
format = "pyproject";
|
||||||
|
@ -39,7 +39,6 @@ buildPythonPackage rec {
|
||||||
propagatedBuildInputs = [
|
propagatedBuildInputs = [
|
||||||
setuptools
|
setuptools
|
||||||
requests
|
requests
|
||||||
textual
|
|
||||||
black
|
black
|
||||||
distro
|
distro
|
||||||
elasticsearch
|
elasticsearch
|
||||||
|
@ -51,5 +50,14 @@ buildPythonPackage rec {
|
||||||
importlib-resources
|
importlib-resources
|
||||||
pkgs.pre-commit
|
pkgs.pre-commit
|
||||||
tools-common
|
tools-common
|
||||||
|
tools-common.textual
|
||||||
];
|
];
|
||||||
|
|
||||||
|
dontUsePytestCheck = true; # no tests
|
||||||
|
|
||||||
|
installCheckPhase = ''
|
||||||
|
output=$($out/bin/tt-smi || true)
|
||||||
|
echo "tt-smi output: $output"
|
||||||
|
echo $output | grep -q "No Tenstorrent driver detected"
|
||||||
|
'';
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,7 +4,9 @@
|
||||||
fetchFromGitHub,
|
fetchFromGitHub,
|
||||||
makeWrapper,
|
makeWrapper,
|
||||||
bash,
|
bash,
|
||||||
|
coreutils,
|
||||||
pciutils,
|
pciutils,
|
||||||
|
gawk,
|
||||||
}:
|
}:
|
||||||
|
|
||||||
# NOTE: We might not use these files if we end up doing the things it does in nix instead if possible.
|
# NOTE: We might not use these files if we end up doing the things it does in nix instead if possible.
|
||||||
|
@ -31,7 +33,8 @@ stdenv.mkDerivation rec {
|
||||||
runHook preInstall
|
runHook preInstall
|
||||||
|
|
||||||
install -Dm444 -t $out/lib/systemd/system/ "tenstorrent-hugepages.service"
|
install -Dm444 -t $out/lib/systemd/system/ "tenstorrent-hugepages.service"
|
||||||
install -Dm444 -t $out/lib/systemd/system/ 'dev-hugepages\x2d1G.mount'
|
# Defined in the module
|
||||||
|
#install -Dm444 -t $out/lib/systemd/system/ 'dev-hugepages\x2d1G.mount'
|
||||||
install -Dm555 -t $out/libexec/ "hugepages-setup.sh"
|
install -Dm555 -t $out/libexec/ "hugepages-setup.sh"
|
||||||
|
|
||||||
runHook postInstall
|
runHook postInstall
|
||||||
|
@ -41,12 +44,17 @@ stdenv.mkDerivation rec {
|
||||||
substituteInPlace "$out/lib/systemd/system/tenstorrent-hugepages.service" \
|
substituteInPlace "$out/lib/systemd/system/tenstorrent-hugepages.service" \
|
||||||
--replace-fail "/opt/tenstorrent/bin/hugepages-setup.sh" "$out/libexec/hugepages-setup.sh"
|
--replace-fail "/opt/tenstorrent/bin/hugepages-setup.sh" "$out/libexec/hugepages-setup.sh"
|
||||||
|
|
||||||
wrapProgram "$out/libexec/hugepages-setup.sh" \
|
mv "$out/libexec/hugepages-setup.sh" "$out/libexec/.hugepages-setup.sh-wrapped"
|
||||||
|
makeWrapper ${bash}/bin/bash "$out/libexec/hugepages-setup.sh" \
|
||||||
--prefix PATH : ${
|
--prefix PATH : ${
|
||||||
lib.makeBinPath [
|
lib.makeBinPath [
|
||||||
pciutils # for lspci
|
coreutils
|
||||||
|
pciutils
|
||||||
|
gawk
|
||||||
]
|
]
|
||||||
}
|
} \
|
||||||
|
--add-flags "-x $out/libexec/.hugepages-setup.sh-wrapped"
|
||||||
|
# add -x easier debugging
|
||||||
'';
|
'';
|
||||||
|
|
||||||
meta = {
|
meta = {
|
||||||
|
|
|
@ -1,21 +1,37 @@
|
||||||
{ pkgs }:
|
{ fetchFromGitHub, python3Packages }:
|
||||||
|
|
||||||
with pkgs.python3Packages;
|
let
|
||||||
|
# Can be unpinned once https://github.com/tenstorrent/tt-tools-common/blob/main/pyproject.toml#L32
|
||||||
|
# is v1
|
||||||
|
textual_0_82 = python3Packages.textual.overridePythonAttrs (old: rec {
|
||||||
|
version = "0.82.0";
|
||||||
|
src = fetchFromGitHub {
|
||||||
|
owner = "Textualize";
|
||||||
|
repo = "textual";
|
||||||
|
rev = "refs/tags/v${version}";
|
||||||
|
hash = "sha256-belpoXQ+CkTchK+FjI/Ur8v4cNgzX39xLdNfPCwaU6E=";
|
||||||
|
};
|
||||||
|
disabledTests = old.disabledTests ++ [
|
||||||
|
"test_selection"
|
||||||
|
];
|
||||||
|
});
|
||||||
|
in
|
||||||
|
|
||||||
buildPythonPackage rec {
|
python3Packages.buildPythonPackage rec {
|
||||||
pname = "tools-common";
|
pname = "tools-common";
|
||||||
version = "unstable-2024-09-27";
|
# https://github.com/tenstorrent/tt-smi/blob/main/pyproject.toml#L31
|
||||||
|
version = "1.4.11";
|
||||||
|
|
||||||
src = pkgs.fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "tenstorrent";
|
owner = "tenstorrent";
|
||||||
repo = "tt-tools-common";
|
repo = "tt-tools-common";
|
||||||
rev = "a89b2db6d086698ab0351a820ea689b4809429a3";
|
rev = "refs/tags/v${version}";
|
||||||
sha256 = "sha256-xeiJQkWsg9p8re2XJai0mNWuP7LwJ9faj3+Z3U/KvzI=";
|
sha256 = "sha256-Q5GpT6B3pamY6bUjPbvNJ11npiR4q/6QMjRxovQ/MZ0=";
|
||||||
};
|
};
|
||||||
|
|
||||||
format = "pyproject";
|
format = "pyproject";
|
||||||
|
|
||||||
nativeBuildInputs = [ pythonRelaxDepsHook ];
|
nativeBuildInputs = with python3Packages; [ pythonRelaxDepsHook ];
|
||||||
|
|
||||||
pythonRelaxDeps = [
|
pythonRelaxDeps = [
|
||||||
"distro"
|
"distro"
|
||||||
|
@ -28,14 +44,14 @@ buildPythonPackage rec {
|
||||||
"tqdm"
|
"tqdm"
|
||||||
];
|
];
|
||||||
|
|
||||||
propagatedBuildInputs = [
|
propagatedBuildInputs = with python3Packages; [
|
||||||
setuptools
|
setuptools
|
||||||
distro
|
distro
|
||||||
elasticsearch
|
elasticsearch
|
||||||
psutil
|
psutil
|
||||||
pyyaml
|
pyyaml
|
||||||
rich
|
rich
|
||||||
textual
|
textual_0_82
|
||||||
requests
|
requests
|
||||||
jsons
|
jsons
|
||||||
tqdm
|
tqdm
|
||||||
|
@ -45,4 +61,8 @@ buildPythonPackage rec {
|
||||||
pythonImportsCheck = [
|
pythonImportsCheck = [
|
||||||
"tt_tools_common"
|
"tt_tools_common"
|
||||||
];
|
];
|
||||||
|
|
||||||
|
passthru = {
|
||||||
|
textual = textual_0_82;
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue