From d7def1a7524f712e5ebb7cd02bab0f13aa56a7f9 Mon Sep 17 00:00:00 2001 From: Ronsor Date: Sat, 18 Mar 2023 17:10:47 -0700 Subject: [PATCH] Warn user if a context size greater than 2048 tokens is specified (#274) LLaMA doesn't support more than 2048 token context sizes, and going above that produces terrible results. --- main.cpp | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/main.cpp b/main.cpp index c88405b82..105dd91ee 100644 --- a/main.cpp +++ b/main.cpp @@ -792,6 +792,11 @@ int main(int argc, char ** argv) { if (gpt_params_parse(argc, argv, params) == false) { return 1; } + + if (params.n_ctx > 2048) { + fprintf(stderr, "%s: warning: model does not support context sizes greater than 2048 tokens (%d specified);" + "expect poor results\n", __func__, params.n_ctx); + } if (params.seed < 0) { params.seed = time(NULL);