mirror of
https://github.com/mudler/LocalAI.git
synced 2025-05-20 18:45:00 +00:00
Reset auto detected template
Signed-off-by: Ettore Di Giacinto <mudler@localai.io>
This commit is contained in:
parent
b087a44fa0
commit
73cb2f8fa5
1 changed files with 4 additions and 0 deletions
|
@ -3587,6 +3587,10 @@ static void start_llama_server(server_context& ctx_server) {
|
|||
common_chat_templates_source(ctx_server.chat_templates.get()),
|
||||
common_chat_format_example(ctx_server.chat_templates.get(), ctx_server.params_base.use_jinja).c_str());
|
||||
|
||||
// Reset the chat templates
|
||||
// TODO: We should make this configurable by respecting the option that is already present in LocalAI for vLLM
|
||||
ctx_server.chat_templates.reset();
|
||||
|
||||
ctx_server.queue_tasks.on_new_task([&ctx_server](server_task && task) {
|
||||
ctx_server.process_single_task(std::move(task));
|
||||
});
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue