mirror of
https://github.com/ggerganov/llama.cpp.git
synced 2024-12-24 02:14:35 +00:00
nix: ci: dont test cuda and rocm (for now)
Until https://github.com/ggerganov/llama.cpp/issues/6346 is resolved
This commit is contained in:
parent
e5b89a441a
commit
53c7ec53d5
11
flake.nix
11
flake.nix
@ -168,9 +168,14 @@
|
||||
};
|
||||
|
||||
# Packages exposed in `.#checks` will be built by the CI and by
|
||||
# `nix flake check`. Currently we expose all packages, but we could
|
||||
# make more granular choices
|
||||
checks = config.packages;
|
||||
# `nix flake check`.
|
||||
#
|
||||
# We could test all outputs e.g. as `checks = confg.packages`.
|
||||
#
|
||||
# TODO: Build more once https://github.com/ggerganov/llama.cpp/issues/6346 has been addressed
|
||||
checks = {
|
||||
inherit (config.packages) default vulkan;
|
||||
};
|
||||
};
|
||||
};
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user