mirror of
https://github.com/ggerganov/llama.cpp.git
synced 2024-12-27 03:44:35 +00:00
gguf : first API pass
This commit is contained in:
parent
7e82d25f40
commit
6873148771
69
ggml.c
69
ggml.c
@ -18297,6 +18297,75 @@ size_t ggml_quantize_chunk(enum ggml_type type, const float * src, void * dst, i
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
struct gguf_string {
|
||||
uint32_t n;
|
||||
char * data;
|
||||
};
|
||||
|
||||
union gguf_value;
|
||||
|
||||
union gguf_value {
|
||||
uint8_t uint8;
|
||||
int8_t int8;
|
||||
uint16_t uint16;
|
||||
int16_t int16;
|
||||
uint32_t uint32;
|
||||
int32_t int32;
|
||||
float float32;
|
||||
bool bool_;
|
||||
|
||||
struct gguf_string str;
|
||||
|
||||
struct {
|
||||
enum gguf_type type;
|
||||
|
||||
uint32_t n;
|
||||
union gguf_value * arr;
|
||||
} arr;
|
||||
};
|
||||
|
||||
struct gguf_kv {
|
||||
struct gguf_string key;
|
||||
|
||||
uint32_t n_bytes; // TODO: is this actually needed?
|
||||
|
||||
enum gguf_type type;
|
||||
union gguf_value value;
|
||||
};
|
||||
|
||||
struct gguf_header {
|
||||
uint32_t magic;
|
||||
uint32_t version;
|
||||
uint32_t n_tensors;
|
||||
|
||||
uint32_t n_kv;
|
||||
struct gguf_kv * kv;
|
||||
};
|
||||
|
||||
struct gguf_tensor_info {
|
||||
struct gguf_string name;
|
||||
|
||||
uint32_t n_dims;
|
||||
uint32_t ne[GGML_MAX_DIMS];
|
||||
uint32_t n_elements; // TODO: is this needed?
|
||||
|
||||
enum ggml_type type;
|
||||
|
||||
uint64_t offset; // must be a multiple of `ALIGNMENT`.
|
||||
};
|
||||
|
||||
struct gguf_context {
|
||||
struct gguf_header header;
|
||||
struct gguf_tensor_info * infos;
|
||||
|
||||
size_t alignment;
|
||||
|
||||
uint8_t * padding;
|
||||
uint8_t * data;
|
||||
};
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
int ggml_cpu_has_avx(void) {
|
||||
#if defined(__AVX__)
|
||||
return 1;
|
||||
|
55
ggml.h
55
ggml.h
@ -204,6 +204,7 @@
|
||||
#define GGML_MAX_NAME 48
|
||||
#define GGML_MAX_OP_PARAMS 32
|
||||
#define GGML_DEFAULT_N_THREADS 4
|
||||
#define GGUF_DEFAULT_ALIGNMENT 32
|
||||
|
||||
#define GGML_EXIT_SUCCESS 0
|
||||
#define GGML_EXIT_ABORTED 1
|
||||
@ -1617,23 +1618,47 @@ extern "C" {
|
||||
// gguf
|
||||
//
|
||||
|
||||
enum gguf_metadata_value_type {
|
||||
GGUF_METADATA_VALUE_TYPE_UINT8 = 0,
|
||||
GGUF_METADATA_VALUE_TYPE_INT8 = 1,
|
||||
GGUF_METADATA_VALUE_TYPE_UINT16 = 2,
|
||||
GGUF_METADATA_VALUE_TYPE_INT16 = 3,
|
||||
GGUF_METADATA_VALUE_TYPE_UINT32 = 4,
|
||||
GGUF_METADATA_VALUE_TYPE_INT32 = 5,
|
||||
GGUF_METADATA_VALUE_TYPE_FLOAT32 = 6,
|
||||
GGUF_METADATA_VALUE_TYPE_BOOL = 7,
|
||||
GGUF_METADATA_VALUE_TYPE_STRING = 8,
|
||||
GGUF_METADATA_VALUE_TYPE_ARRAY = 9,
|
||||
enum gguf_type {
|
||||
GGUF_TYPE_UINT8 = 0,
|
||||
GGUF_TYPE_INT8 = 1,
|
||||
GGUF_TYPE_UINT16 = 2,
|
||||
GGUF_TYPE_INT16 = 3,
|
||||
GGUF_TYPE_UINT32 = 4,
|
||||
GGUF_TYPE_INT32 = 5,
|
||||
GGUF_TYPE_FLOAT32 = 6,
|
||||
GGUF_TYPE_BOOL = 7,
|
||||
GGUF_TYPE_STRING = 8,
|
||||
GGUF_TYPE_ARRAY = 9,
|
||||
};
|
||||
|
||||
struct gguf_string {
|
||||
uint32_t n;
|
||||
char * data;
|
||||
};
|
||||
struct gguf_context;
|
||||
|
||||
GGML_API struct gguf_context * gguf_gguf_init(const char * path);
|
||||
GGML_API void gguf_gguf_free(struct gguf_context * ctx);
|
||||
|
||||
GGML_API int gguf_get_version (struct gguf_context * ctx);
|
||||
GGML_API size_t gguf_get_alignment (struct gguf_context * ctx);
|
||||
GGML_API size_t gguf_get_data_offset(struct gguf_context * ctx);
|
||||
|
||||
GGML_API int gguf_get_n_kv(struct gguf_context * ctx);
|
||||
GGML_API const char * gguf_get_key (struct gguf_context * ctx, int i);
|
||||
GGML_API enum gguf_type gguf_get_type(struct gguf_context * ctx, int i);
|
||||
GGML_API void gguf_get_val (struct gguf_context * ctx, int i, void * val);
|
||||
|
||||
GGML_API uint8_t gguf_get_val_u8 (struct gguf_context * ctx, int i);
|
||||
GGML_API int8_t gguf_get_val_i8 (struct gguf_context * ctx, int i);
|
||||
GGML_API uint16_t gguf_get_val_u16 (struct gguf_context * ctx, int i);
|
||||
GGML_API int16_t gguf_get_val_i16 (struct gguf_context * ctx, int i);
|
||||
GGML_API uint32_t gguf_get_val_u32 (struct gguf_context * ctx, int i);
|
||||
GGML_API int32_t gguf_get_val_i32 (struct gguf_context * ctx, int i);
|
||||
GGML_API float gguf_get_val_f32 (struct gguf_context * ctx, int i);
|
||||
GGML_API bool gguf_get_val_bool(struct gguf_context * ctx, int i);
|
||||
GGML_API const char * gguf_get_val_str (struct gguf_context * ctx, int i);
|
||||
// TODO: arr
|
||||
|
||||
GGML_API int gguf_get_n_tensors (struct gguf_context * ctx);
|
||||
GGML_API size_t gguf_get_tensor_offset(struct gguf_context * ctx, int i);
|
||||
|
||||
//
|
||||
// system info
|
||||
//
|
||||
|
Loading…
Reference in New Issue
Block a user