mirror of
https://github.com/ggerganov/llama.cpp.git
synced 2024-11-11 21:39:52 +00:00
201294ae17
Exposes a few attributes demonstrating how to build [singularity](https://docs.sylabs.io/guides/latest/user-guide/)/[apptainer](https://apptainer.org/) and Docker images re-using llama.cpp's Nix expression. Built locally on `x86_64-linux` with `nix build github:someoneserge/llama.cpp/feat/nix/images#llamaPackages.{docker,docker-min,sif,llama-cpp}` and it's fast and effective.
20 lines
514 B
Nix
20 lines
514 B
Nix
{
|
|
lib,
|
|
newScope,
|
|
llamaVersion ? "0.0.0",
|
|
}:
|
|
|
|
# We're using `makeScope` instead of just writing out an attrset
|
|
# because it allows users to apply overlays later using `overrideScope'`.
|
|
# Cf. https://noogle.dev/f/lib/makeScope
|
|
|
|
lib.makeScope newScope (
|
|
self: {
|
|
inherit llamaVersion;
|
|
llama-cpp = self.callPackage ./package.nix { };
|
|
docker = self.callPackage ./docker.nix { };
|
|
docker-min = self.callPackage ./docker.nix { interactive = false; };
|
|
sif = self.callPackage ./sif.nix { };
|
|
}
|
|
)
|