From 052027d41dff8f82b654a44290dd224fd9f6b7a0 Mon Sep 17 00:00:00 2001 From: Ronsor Date: Sat, 18 Mar 2023 16:14:49 -0700 Subject: [PATCH] Warn user if a context size greater than 2048 is specified LLaMA doesn't support more than 2048 token context sizes, and going above that produces terrible results. --- main.cpp | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/main.cpp b/main.cpp index c88405b82956a..105dd91ee6065 100644 --- a/main.cpp +++ b/main.cpp @@ -792,6 +792,11 @@ int main(int argc, char ** argv) { if (gpt_params_parse(argc, argv, params) == false) { return 1; } + + if (params.n_ctx > 2048) { + fprintf(stderr, "%s: warning: model does not support context sizes greater than 2048 tokens (%d specified);" + "expect poor results\n", __func__, params.n_ctx); + } if (params.seed < 0) { params.seed = time(NULL);