mirror of
https://github.com/ggerganov/llama.cpp.git
synced 2024-12-27 03:44:35 +00:00
9f40989351
Some checks are pending
Publish Docker image / Push Docker image to Docker Hub (map[dockerfile:.devops/full-cuda.Dockerfile platforms:linux/amd64 tag:full-cuda]) (push) Waiting to run
Publish Docker image / Push Docker image to Docker Hub (map[dockerfile:.devops/full-musa.Dockerfile platforms:linux/amd64 tag:full-musa]) (push) Waiting to run
Publish Docker image / Push Docker image to Docker Hub (map[dockerfile:.devops/full.Dockerfile platforms:linux/amd64,linux/arm64 tag:full]) (push) Waiting to run
Publish Docker image / Push Docker image to Docker Hub (map[dockerfile:.devops/llama-cli-cuda.Dockerfile platforms:linux/amd64 tag:light-cuda]) (push) Waiting to run
Publish Docker image / Push Docker image to Docker Hub (map[dockerfile:.devops/llama-cli-intel.Dockerfile platforms:linux/amd64 tag:light-intel]) (push) Waiting to run
Publish Docker image / Push Docker image to Docker Hub (map[dockerfile:.devops/llama-cli-musa.Dockerfile platforms:linux/amd64 tag:light-musa]) (push) Waiting to run
Publish Docker image / Push Docker image to Docker Hub (map[dockerfile:.devops/llama-cli.Dockerfile platforms:linux/amd64,linux/arm64 tag:light]) (push) Waiting to run
Publish Docker image / Push Docker image to Docker Hub (map[dockerfile:.devops/llama-server-cuda.Dockerfile platforms:linux/amd64 tag:server-cuda]) (push) Waiting to run
Publish Docker image / Push Docker image to Docker Hub (map[dockerfile:.devops/llama-server-intel.Dockerfile platforms:linux/amd64 tag:server-intel]) (push) Waiting to run
Publish Docker image / Push Docker image to Docker Hub (map[dockerfile:.devops/llama-server-musa.Dockerfile platforms:linux/amd64 tag:server-musa]) (push) Waiting to run
Publish Docker image / Push Docker image to Docker Hub (map[dockerfile:.devops/llama-server.Dockerfile platforms:linux/amd64,linux/arm64 tag:server]) (push) Waiting to run
Nix CI / nix-eval (macos-latest) (push) Waiting to run
Nix CI / nix-eval (ubuntu-latest) (push) Waiting to run
Nix CI / nix-build (macos-latest) (push) Waiting to run
Nix CI / nix-build (ubuntu-latest) (push) Waiting to run
flake8 Lint / Lint (push) Waiting to run
55 lines
1.3 KiB
CMake
55 lines
1.3 KiB
CMake
# dependencies
|
|
|
|
find_package(Threads REQUIRED)
|
|
|
|
# third-party
|
|
|
|
# ...
|
|
|
|
# examples
|
|
|
|
include_directories(${CMAKE_CURRENT_SOURCE_DIR})
|
|
|
|
if (EMSCRIPTEN)
|
|
else()
|
|
add_subdirectory(cvector-generator)
|
|
add_subdirectory(batched-bench)
|
|
add_subdirectory(batched)
|
|
add_subdirectory(convert-llama2c-to-ggml)
|
|
add_subdirectory(embedding)
|
|
add_subdirectory(eval-callback)
|
|
add_subdirectory(export-lora)
|
|
add_subdirectory(gbnf-validator)
|
|
add_subdirectory(gguf-hash)
|
|
add_subdirectory(gguf-split)
|
|
add_subdirectory(gguf)
|
|
add_subdirectory(gritlm)
|
|
add_subdirectory(imatrix)
|
|
add_subdirectory(infill)
|
|
add_subdirectory(llama-bench)
|
|
add_subdirectory(llava)
|
|
add_subdirectory(lookahead)
|
|
add_subdirectory(lookup)
|
|
add_subdirectory(main)
|
|
add_subdirectory(parallel)
|
|
add_subdirectory(passkey)
|
|
add_subdirectory(perplexity)
|
|
add_subdirectory(quantize-stats)
|
|
add_subdirectory(quantize)
|
|
add_subdirectory(retrieval)
|
|
if (GGML_RPC)
|
|
add_subdirectory(rpc)
|
|
endif()
|
|
if (LLAMA_BUILD_SERVER)
|
|
add_subdirectory(server)
|
|
endif()
|
|
if (GGML_SYCL)
|
|
add_subdirectory(sycl)
|
|
endif()
|
|
add_subdirectory(save-load-state)
|
|
add_subdirectory(simple)
|
|
add_subdirectory(simple-chat)
|
|
add_subdirectory(speculative)
|
|
add_subdirectory(tokenize)
|
|
endif()
|