define default model path once, sync path with readme (#1366)

This commit is contained in:
András Salamon 2023-05-16 16:46:34 +01:00 committed by GitHub
parent 2a5ee023ad
commit 9560655409
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 1 additions and 5 deletions

View File

@ -45,7 +45,7 @@ struct gpt_params {
float mirostat_tau = 5.00f; // target entropy float mirostat_tau = 5.00f; // target entropy
float mirostat_eta = 0.10f; // learning rate float mirostat_eta = 0.10f; // learning rate
std::string model = "models/lamma-7B/ggml-model.bin"; // model path std::string model = "models/7B/ggml-model.bin"; // model path
std::string prompt = ""; std::string prompt = "";
std::string path_prompt_cache = ""; // path to file for saving/loading prompt eval state std::string path_prompt_cache = ""; // path to file for saving/loading prompt eval state
std::string input_prefix = ""; // string to prefix user inputs with std::string input_prefix = ""; // string to prefix user inputs with

View File

@ -6,7 +6,6 @@
int main(int argc, char ** argv) { int main(int argc, char ** argv) {
gpt_params params; gpt_params params;
params.model = "models/llama-7B/ggml-model.bin";
if (gpt_params_parse(argc, argv, params) == false) { if (gpt_params_parse(argc, argv, params) == false) {
return 1; return 1;

View File

@ -50,7 +50,6 @@ void sigint_handler(int signo) {
int main(int argc, char ** argv) { int main(int argc, char ** argv) {
gpt_params params; gpt_params params;
params.model = "models/llama-7B/ggml-model.bin";
if (gpt_params_parse(argc, argv, params) == false) { if (gpt_params_parse(argc, argv, params) == false) {
return 1; return 1;

View File

@ -116,7 +116,6 @@ void perplexity(llama_context * ctx, const gpt_params & params) {
int main(int argc, char ** argv) { int main(int argc, char ** argv) {
gpt_params params; gpt_params params;
params.model = "models/llama-7B/ggml-model.bin";
params.n_batch = 512; params.n_batch = 512;
if (gpt_params_parse(argc, argv, params) == false) { if (gpt_params_parse(argc, argv, params) == false) {

View File

@ -8,7 +8,6 @@
int main(int argc, char ** argv) { int main(int argc, char ** argv) {
gpt_params params; gpt_params params;
params.model = "models/llama-7B/ggml-model.bin";
params.seed = 42; params.seed = 42;
params.n_threads = 4; params.n_threads = 4;
params.repeat_last_n = 64; params.repeat_last_n = 64;