diff options
author | Georgi Gerganov <ggerganov@gmail.com> | 2023-03-25 21:51:41 +0200 |
---|---|---|
committer | Georgi Gerganov <ggerganov@gmail.com> | 2023-03-25 21:51:41 +0200 |
commit | 79b2b266db6b198b5af450982c3cd61120fac951 (patch) | |
tree | 81859f0020c4ba4d55bfc2307cf7c9a8cd75595c /examples/main/main.cpp | |
parent | e2d490dafd860eaaaf9aa8008ab790527d556daf (diff) |
If n_predict == -1, generate forever
Diffstat (limited to 'examples/main/main.cpp')
-rw-r--r-- | examples/main/main.cpp | 3 |
1 files changed, 1 insertions, 2 deletions
diff --git a/examples/main/main.cpp b/examples/main/main.cpp index f78936d..a453743 100644 --- a/examples/main/main.cpp +++ b/examples/main/main.cpp @@ -199,7 +199,6 @@ int main(int argc, char ** argv) { } params.n_keep = std::min(params.n_keep, (int) embd_inp.size()); - //params.n_predict = std::min(params.n_predict, n_ctx - (int) embd_inp.size()); // prefix & suffix for instruct mode const auto inp_pfx = ::llama_tokenize(ctx, "\n\n### Instruction:\n\n", true); @@ -293,7 +292,7 @@ int main(int argc, char ** argv) { std::vector<llama_token> embd; - while (n_remain > 0 || params.interactive) { + while (n_remain != 0 || params.interactive) { // predict if (embd.size() > 0) { // infinite text generation via context swapping |