aboutsummaryrefslogtreecommitdiff
path: root/examples
diff options
context:
space:
mode:
Diffstat (limited to 'examples')
-rw-r--r--examples/common.cpp46
-rw-r--r--examples/main/README.md9
2 files changed, 32 insertions, 23 deletions
diff --git a/examples/common.cpp b/examples/common.cpp
index 1a2f474..cd63000 100644
--- a/examples/common.cpp
+++ b/examples/common.cpp
@@ -66,35 +66,33 @@ int32_t get_num_physical_cores() {
return n_threads > 0 ? (n_threads <= 4 ? n_threads : n_threads / 2) : 4;
}
-std::string process_escapes(const char* input) {
- std::string output;
+void process_escapes(std::string& input) {
+ std::size_t input_len = input.length();
+ std::size_t output_idx = 0;
- if (input != nullptr) {
- std::size_t input_len = std::strlen(input);
- output.reserve(input_len);
-
- for (std::size_t i = 0; i < input_len; ++i) {
- if (input[i] == '\\' && i + 1 < input_len) {
- switch (input[++i]) {
- case 'n': output.push_back('\n'); break;
- case 't': output.push_back('\t'); break;
- case '\'': output.push_back('\''); break;
- case '\"': output.push_back('\"'); break;
- case '\\': output.push_back('\\'); break;
- default: output.push_back('\\');
- output.push_back(input[i]); break;
- }
- } else {
- output.push_back(input[i]);
+ for (std::size_t input_idx = 0; input_idx < input_len; ++input_idx) {
+ if (input[input_idx] == '\\' && input_idx + 1 < input_len) {
+ switch (input[++input_idx]) {
+ case 'n': input[output_idx++] = '\n'; break;
+ case 'r': input[output_idx++] = '\r'; break;
+ case 't': input[output_idx++] = '\t'; break;
+ case '\'': input[output_idx++] = '\''; break;
+ case '\"': input[output_idx++] = '\"'; break;
+ case '\\': input[output_idx++] = '\\'; break;
+ default: input[output_idx++] = '\\';
+ input[output_idx++] = input[input_idx]; break;
}
+ } else {
+ input[output_idx++] = input[input_idx];
}
}
- return output;
+ input.resize(output_idx);
}
bool gpt_params_parse(int argc, char ** argv, gpt_params & params) {
bool invalid_param = false;
+ bool escape_prompt = false;
std::string arg;
gpt_params default_params;
@@ -118,7 +116,9 @@ bool gpt_params_parse(int argc, char ** argv, gpt_params & params) {
invalid_param = true;
break;
}
- params.prompt = process_escapes(argv[i]);
+ params.prompt = argv[i];
+ } else if (arg == "-e") {
+ escape_prompt = true;
} else if (arg == "--session") {
if (++i >= argc) {
invalid_param = true;
@@ -335,6 +335,9 @@ bool gpt_params_parse(int argc, char ** argv, gpt_params & params) {
gpt_print_usage(argc, argv, default_params);
exit(1);
}
+ if (escape_prompt) {
+ process_escapes(params.prompt);
+ }
return true;
}
@@ -355,6 +358,7 @@ void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) {
fprintf(stderr, " -t N, --threads N number of threads to use during computation (default: %d)\n", params.n_threads);
fprintf(stderr, " -p PROMPT, --prompt PROMPT\n");
fprintf(stderr, " prompt to start generation with (default: empty)\n");
+ fprintf(stderr, " -e process prompt escapes sequences (\\n, \\r, \\t, \\', \\\", \\\\)\n");
fprintf(stderr, " --session FNAME file to cache model state in (may be large!) (default: none)\n");
fprintf(stderr, " --random-prompt start with a randomized prompt.\n");
fprintf(stderr, " --in-prefix STRING string to prefix user inputs with (default: empty)\n");
diff --git a/examples/main/README.md b/examples/main/README.md
index 493a8c0..6b7facb 100644
--- a/examples/main/README.md
+++ b/examples/main/README.md
@@ -34,13 +34,18 @@ For an interactive experience, try this command:
#### Unix-based systems (Linux, macOS, etc.):
```bash
-./main -m models/7B/ggml-model.bin -n -1 --color -r "User:" --in-prefix " " --prompt 'User: Hi\nAI: Hello. I am an AI chatbot. Would you like to talk?\nUser: Sure!\nAI: What would you like to talk about?\nUser:'
+./main -m models/7B/ggml-model.bin -n -1 --color -r "User:" --in-prefix " " \
+'User: Hi
+AI: Hello. I am an AI chatbot. Would you like to talk?
+User: Sure!
+AI: What would you like to talk about?
+User:'
```
#### Windows:
```powershell
-main.exe -m models\7B\ggml-model.bin -n -1 --color -r "User:" --in-prefix " " --prompt "User: Hi\nAI: Hello. I am an AI chatbot. Would you like to talk?\nUser: Sure!\nAI: What would you like to talk about?\nUser:"
+main.exe -m models\7B\ggml-model.bin -n -1 --color -r "User:" --in-prefix " " -e --prompt "User: Hi\nAI: Hello. I am an AI chatbot. Would you like to talk?\nUser: Sure!\nAI: What would you like to talk about?\nUser:"
```
The following command generates "infinite" text from a starting prompt (you can use `Ctrl-C` to stop it):