mirror of
https://github.com/ggerganov/llama.cpp.git
synced 2024-12-25 02:44:36 +00:00
nix: init singularity and docker images (#5056)
Exposes a few attributes demonstrating how to build [singularity](https://docs.sylabs.io/guides/latest/user-guide/)/[apptainer](https://apptainer.org/) and Docker images re-using llama.cpp's Nix expression. Built locally on `x86_64-linux` with `nix build github:someoneserge/llama.cpp/feat/nix/images#llamaPackages.{docker,docker-min,sif,llama-cpp}` and it's fast and effective.
This commit is contained in:
parent
5a9e2f60ba
commit
201294ae17
37
.devops/nix/docker.nix
Normal file
37
.devops/nix/docker.nix
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
{
|
||||||
|
lib,
|
||||||
|
dockerTools,
|
||||||
|
buildEnv,
|
||||||
|
llama-cpp,
|
||||||
|
interactive ? true,
|
||||||
|
coreutils,
|
||||||
|
}:
|
||||||
|
|
||||||
|
# A tar that can be fed into `docker load`:
|
||||||
|
#
|
||||||
|
# $ nix build .#llamaPackages.docker
|
||||||
|
# $ docker load < result
|
||||||
|
|
||||||
|
# For details and variations cf.
|
||||||
|
# - https://nixos.org/manual/nixpkgs/unstable/#ssec-pkgs-dockerTools-buildLayeredImage
|
||||||
|
# - https://discourse.nixos.org/t/a-faster-dockertools-buildimage-prototype/16922
|
||||||
|
# - https://nixery.dev/
|
||||||
|
|
||||||
|
# Approximate (compressed) sizes, at the time of writing, are:
|
||||||
|
#
|
||||||
|
# .#llamaPackages.docker: 125M;
|
||||||
|
# .#llamaPackagesCuda.docker: 537M;
|
||||||
|
# .#legacyPackages.aarch64-linux.llamaPackagesXavier.docker: 415M.
|
||||||
|
|
||||||
|
dockerTools.buildLayeredImage {
|
||||||
|
name = llama-cpp.pname;
|
||||||
|
tag = "latest";
|
||||||
|
|
||||||
|
contents =
|
||||||
|
[ llama-cpp ]
|
||||||
|
++ lib.optionals interactive [
|
||||||
|
coreutils
|
||||||
|
dockerTools.binSh
|
||||||
|
dockerTools.caCertificates
|
||||||
|
];
|
||||||
|
}
|
@ -12,5 +12,8 @@ lib.makeScope newScope (
|
|||||||
self: {
|
self: {
|
||||||
inherit llamaVersion;
|
inherit llamaVersion;
|
||||||
llama-cpp = self.callPackage ./package.nix { };
|
llama-cpp = self.callPackage ./package.nix { };
|
||||||
|
docker = self.callPackage ./docker.nix { };
|
||||||
|
docker-min = self.callPackage ./docker.nix { interactive = false; };
|
||||||
|
sif = self.callPackage ./sif.nix { };
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
27
.devops/nix/sif.nix
Normal file
27
.devops/nix/sif.nix
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
{
|
||||||
|
lib,
|
||||||
|
singularity-tools,
|
||||||
|
llama-cpp,
|
||||||
|
bashInteractive,
|
||||||
|
interactive ? false,
|
||||||
|
}:
|
||||||
|
|
||||||
|
let
|
||||||
|
optionalInt = cond: x: if cond then x else 0;
|
||||||
|
in
|
||||||
|
singularity-tools.buildImage rec {
|
||||||
|
inherit (llama-cpp) name;
|
||||||
|
contents = [ llama-cpp ] ++ lib.optionals interactive [ bashInteractive ];
|
||||||
|
|
||||||
|
# These are excessive (but safe) for most variants. Building singularity
|
||||||
|
# images requires superuser privileges, so we build them inside a VM in a
|
||||||
|
# writable image of pre-determined size.
|
||||||
|
#
|
||||||
|
# ROCm is currently affected by https://github.com/NixOS/nixpkgs/issues/276846
|
||||||
|
#
|
||||||
|
# Expected image sizes:
|
||||||
|
# - cpu/blas: 150M,
|
||||||
|
# - cuda, all gencodes: 560M,
|
||||||
|
diskSize = 4096 + optionalInt llama-cpp.useRocm 16384;
|
||||||
|
memSize = diskSize;
|
||||||
|
}
|
Loading…
Reference in New Issue
Block a user