diff options
author | slaren <2141330+slaren@users.noreply.github.com> | 2023-04-17 17:28:55 +0200 |
---|---|---|
committer | GitHub <noreply@github.com> | 2023-04-17 17:28:55 +0200 |
commit | 315a95a4d30db726fb7d244dd3b9e90a83fb1616 (patch) | |
tree | 569d8140cde36ad971d3d3120556ab5533603931 /examples/common.h | |
parent | efd05648c88a0923a55f56e7ce1b0f9c33410afb (diff) |
Add LoRA support (#820)
Diffstat (limited to 'examples/common.h')
-rw-r--r-- | examples/common.h | 7 |
1 files changed, 4 insertions, 3 deletions
diff --git a/examples/common.h b/examples/common.h index 1ea6f74..cbbc2df 100644 --- a/examples/common.h +++ b/examples/common.h @@ -31,11 +31,12 @@ struct gpt_params { std::string model = "models/lamma-7B/ggml-model.bin"; // model path std::string prompt = ""; - std::string input_prefix = ""; // string to prefix user inputs with - - + std::string input_prefix = ""; // string to prefix user inputs with std::vector<std::string> antiprompt; // string upon seeing which more user input is prompted + std::string lora_adapter = ""; // lora adapter path + std::string lora_base = ""; // base model path for the lora adapter + bool memory_f16 = true; // use f16 instead of f32 for memory kv bool random_prompt = false; // do not randomize prompt if none provided bool use_color = false; // use color to distinguish generations and inputs |