mirror of
https://github.com/ggerganov/llama.cpp.git
synced 2024-12-24 02:14:35 +00:00
flake.nix: rocm not yet supported on aarch64, so hide the output
This commit is contained in:
parent
1e3900ebac
commit
a5c088d8c6
@ -74,6 +74,7 @@
|
||||
{
|
||||
config,
|
||||
lib,
|
||||
system,
|
||||
pkgs,
|
||||
pkgsCuda,
|
||||
pkgsRocm,
|
||||
@ -103,10 +104,12 @@
|
||||
// lib.optionalAttrs pkgs.stdenv.isLinux {
|
||||
opencl = config.packages.default.override { useOpenCL = true; };
|
||||
cuda = config.legacyPackages.llamaPackagesCuda.llama-cpp;
|
||||
rocm = config.legacyPackages.llamaPackagesRocm.llama-cpp;
|
||||
|
||||
mpi-cpu = config.packages.default.override { useMpi = true; };
|
||||
mpi-cuda = config.packages.default.override { useMpi = true; };
|
||||
}
|
||||
// lib.optionalAttrs (system == "x86_64-linux") {
|
||||
rocm = config.legacyPackages.llamaPackagesRocm.llama-cpp;
|
||||
};
|
||||
};
|
||||
};
|
||||
|
Loading…
Reference in New Issue
Block a user