mirror of
https://github.com/ggerganov/llama.cpp.git
synced 2024-11-11 13:30:35 +00:00
gguf : make gguf pip-installable
* gitignore : add dist and rm pyproject.toml * gguf: prepare as Pip package * gguf: prepare as Pip package * gguf : fix line endings * requirements : add gguf * gguf : update readme with build notes * gguf : update readme with build notes * gguf : add notes for tests
This commit is contained in:
parent
b91ad7f461
commit
87e3733f24
2
.gitignore
vendored
2
.gitignore
vendored
@ -60,6 +60,7 @@ compile_commands.json
|
|||||||
CMakeSettings.json
|
CMakeSettings.json
|
||||||
|
|
||||||
__pycache__
|
__pycache__
|
||||||
|
dist
|
||||||
|
|
||||||
zig-out/
|
zig-out/
|
||||||
zig-cache/
|
zig-cache/
|
||||||
@ -70,7 +71,6 @@ perf-*.txt
|
|||||||
|
|
||||||
examples/jeopardy/results.txt
|
examples/jeopardy/results.txt
|
||||||
|
|
||||||
pyproject.toml
|
|
||||||
poetry.lock
|
poetry.lock
|
||||||
poetry.toml
|
poetry.toml
|
||||||
|
|
||||||
|
21
gguf-py/LICENSE
Normal file
21
gguf-py/LICENSE
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) 2023 Georgi Gerganov
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
55
gguf-py/README.md
Normal file
55
gguf-py/README.md
Normal file
@ -0,0 +1,55 @@
|
|||||||
|
## gguf
|
||||||
|
|
||||||
|
This is a Python package for writing binary files in the [GGUF](https://github.com/ggerganov/ggml/pull/302)
|
||||||
|
(GGML Universal File) format.
|
||||||
|
|
||||||
|
See [convert-llama-hf-to-gguf.py](https://github.com/ggerganov/llama.cpp/blob/master/convert-llama-hf-to-gguf.py)
|
||||||
|
as an example for its usage.
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
```sh
|
||||||
|
pip install gguf
|
||||||
|
```
|
||||||
|
|
||||||
|
## Development
|
||||||
|
Maintainers who participate in development of this package are advised to install it in editable mode:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
cd /path/to/llama.cpp/gguf-py
|
||||||
|
|
||||||
|
pip install --editable .
|
||||||
|
```
|
||||||
|
|
||||||
|
**Note**: This may require to upgrade your Pip installation, with a message saying that editable installation currently requires `setup.py`.
|
||||||
|
In this case, upgrade Pip to the latest:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
pip install --upgrade pip
|
||||||
|
```
|
||||||
|
|
||||||
|
## Publishing
|
||||||
|
To publish the package, you need to have `twine` and `build` installed:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
pip install build twine
|
||||||
|
```
|
||||||
|
|
||||||
|
Then, folow these steps to release a new version:
|
||||||
|
|
||||||
|
1. Update the version in `pyproject.toml`.
|
||||||
|
2. Build the package:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
python -m build
|
||||||
|
```
|
||||||
|
|
||||||
|
3. Upload the generated distribution archives:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
python -m twine upload dist/*
|
||||||
|
```
|
||||||
|
|
||||||
|
## TODO
|
||||||
|
- [ ] Add tests
|
||||||
|
- [ ] Include conversion scripts as command line entry points in this package.
|
||||||
|
- Add CI workflow for releasing the package.
|
1
gguf-py/gguf/__init__.py
Normal file
1
gguf-py/gguf/__init__.py
Normal file
@ -0,0 +1 @@
|
|||||||
|
from .gguf import GGUFWriter
|
0
gguf.py → gguf-py/gguf/gguf.py
Executable file → Normal file
0
gguf.py → gguf-py/gguf/gguf.py
Executable file → Normal file
28
gguf-py/pyproject.toml
Normal file
28
gguf-py/pyproject.toml
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
[tool.poetry]
|
||||||
|
name = "gguf"
|
||||||
|
version = "0.2.0"
|
||||||
|
description = "Write ML models in GGUF for GGML"
|
||||||
|
authors = ["GGML <ggml@ggml.ai>"]
|
||||||
|
packages = [
|
||||||
|
{include = "gguf"},
|
||||||
|
]
|
||||||
|
readme = "README.md"
|
||||||
|
homepage = "https://ggml.ai"
|
||||||
|
repository = "https://github.com/ggerganov/llama.cpp"
|
||||||
|
keywords = ["ggml", "gguf", "llama.cpp"]
|
||||||
|
classifiers = [
|
||||||
|
"Programming Language :: Python :: 3",
|
||||||
|
"License :: OSI Approved :: MIT License",
|
||||||
|
"Operating System :: OS Independent",
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.poetry.dependencies]
|
||||||
|
python = ">=3.8"
|
||||||
|
numpy = ">=1.17"
|
||||||
|
|
||||||
|
[tool.poetry.dev-dependencies]
|
||||||
|
pytest = "^5.2"
|
||||||
|
|
||||||
|
[build-system]
|
||||||
|
requires = ["poetry-core>=1.0.0"]
|
||||||
|
build-backend = "poetry.core.masonry.api"
|
7
gguf-py/tests/test_gguf.py
Normal file
7
gguf-py/tests/test_gguf.py
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
import gguf
|
||||||
|
|
||||||
|
# TODO: add tests
|
||||||
|
|
||||||
|
|
||||||
|
def test_write_gguf():
|
||||||
|
pass
|
@ -1,2 +1,3 @@
|
|||||||
numpy==1.24
|
numpy==1.24
|
||||||
sentencepiece==0.1.98
|
sentencepiece==0.1.98
|
||||||
|
gguf>=0.1.0
|
||||||
|
Loading…
Reference in New Issue
Block a user