llama.cpp/scripts/hf.sh

113 lines
2.3 KiB
Bash
Raw Permalink Normal View History

#!/bin/bash
#
# Shortcut for downloading HF models
#
# Usage:
`build`: rename main → llama-cli, server → llama-server, llava-cli → llama-llava-cli, etc... (#7809) * `main`/`server`: rename to `llama` / `llama-server` for consistency w/ homebrew * server: update refs -> llama-server gitignore llama-server * server: simplify nix package * main: update refs -> llama fix examples/main ref * main/server: fix targets * update more names * Update build.yml * rm accidentally checked in bins * update straggling refs * Update .gitignore * Update server-llm.sh * main: target name -> llama-cli * Prefix all example bins w/ llama- * fix main refs * rename {main->llama}-cmake-pkg binary * prefix more cmake targets w/ llama- * add/fix gbnf-validator subfolder to cmake * sort cmake example subdirs * rm bin files * fix llama-lookup-* Makefile rules * gitignore /llama-* * rename Dockerfiles * rename llama|main -> llama-cli; consistent RPM bin prefixes * fix some missing -cli suffixes * rename dockerfile w/ llama-cli * rename(make): llama-baby-llama * update dockerfile refs * more llama-cli(.exe) * fix test-eval-callback * rename: llama-cli-cmake-pkg(.exe) * address gbnf-validator unused fread warning (switched to C++ / ifstream) * add two missing llama- prefixes * Updating docs for eval-callback binary to use new `llama-` prefix. * Updating a few lingering doc references for rename of main to llama-cli * Updating `run-with-preset.py` to use new binary names. Updating docs around `perplexity` binary rename. * Updating documentation references for lookup-merge and export-lora * Updating two small `main` references missed earlier in the finetune docs. * Update apps.nix * update grammar/README.md w/ new llama-* names * update llama-rpc-server bin name + doc * Revert "update llama-rpc-server bin name + doc" This reverts commit e474ef1df481fd8936cd7d098e3065d7de378930. * add hot topic notice to README.md * Update README.md * Update README.md * rename gguf-split & quantize bins refs in **/tests.sh --------- Co-authored-by: HanClinto <hanclinto@gmail.com>
2024-06-12 23:41:52 +00:00
# ./llama-cli -m $(./scripts/hf.sh https://huggingface.co/TheBloke/Mixtral-8x7B-v0.1-GGUF/resolve/main/mixtral-8x7b-v0.1.Q4_K_M.gguf)
# ./llama-cli -m $(./scripts/hf.sh --url https://huggingface.co/TheBloke/Mixtral-8x7B-v0.1-GGUF/blob/main/mixtral-8x7b-v0.1.Q4_K_M.gguf)
# ./llama-cli -m $(./scripts/hf.sh --repo TheBloke/Mixtral-8x7B-v0.1-GGUF --file mixtral-8x7b-v0.1.Q4_K_M.gguf)
#
# all logs go to stderr
function log {
echo "$@" 1>&2
}
function usage {
log "Usage: $0 [[--url] <url>] [--repo <repo>] [--file <file>] [--outdir <dir> [-h|--help]"
exit 1
}
# check for curl or wget
function has_cmd {
if ! [ -x "$(command -v $1)" ]; then
return 1
fi
}
if has_cmd wget; then
cmd="wget -q --show-progress -c -O %s/%s %s"
elif has_cmd curl; then
cmd="curl -C - -f --output-dir %s -o %s -L %s"
else
log "[E] curl or wget not found"
exit 1
fi
url=""
repo=""
file=""
outdir="."
# parse args
while [[ $# -gt 0 ]]; do
case "$1" in
--url)
url="$2"
shift 2
;;
--repo)
repo="$2"
shift 2
;;
--file)
file="$2"
shift 2
;;
--outdir)
outdir="$2"
shift 2
;;
-h|--help)
usage
;;
*)
url="$1"
shift
;;
esac
done
if [ -n "$repo" ] && [ -n "$file" ]; then
url="https://huggingface.co/$repo/resolve/main/$file"
fi
if [ -z "$url" ]; then
log "[E] missing --url"
usage
fi
# check if the URL is a HuggingFace model, and if so, try to download it
is_url=false
if [[ ${#url} -gt 22 ]]; then
if [[ ${url:0:22} == "https://huggingface.co" ]]; then
is_url=true
fi
fi
if [ "$is_url" = false ]; then
log "[E] invalid URL, must start with https://huggingface.co"
exit 0
fi
# replace "blob/main" with "resolve/main"
url=${url/blob\/main/resolve\/main}
basename=$(basename $url)
log "[+] attempting to download $basename"
if [ -n "$cmd" ]; then
cmd=$(printf "$cmd" "$outdir" "$basename" "$url")
log "[+] $cmd"
if $cmd; then
echo $outdir/$basename
exit 0
fi
fi
log "[-] failed to download"
exit 1