mirror of
https://github.com/mudler/LocalAI.git
synced 2025-05-25 13:04:59 +00:00
models(gallery): add new models to the gallery (#2124)
* models: add reranker and parler-tts-mini Signed-off-by: Ettore Di Giacinto <mudler@localai.io> * fix: chatml im_end should not have a newline Signed-off-by: Ettore Di Giacinto <mudler@localai.io> * models(noromaid): add Signed-off-by: Ettore Di Giacinto <mudler@localai.io> * models(llama3): add 70b, add dolphin2.9 Signed-off-by: Ettore Di Giacinto <mudler@localai.io> * models(llama3): add unholy-8b Signed-off-by: Ettore Di Giacinto <mudler@localai.io> * models(llama3): add therapyllama3, aura Signed-off-by: Ettore Di Giacinto <mudler@localai.io> --------- Signed-off-by: Ettore Di Giacinto <mudler@localai.io>
This commit is contained in:
parent
b664edde29
commit
48d0aa2f6d
10 changed files with 272 additions and 34 deletions
|
@ -24,8 +24,7 @@ const chatML = `<|im_start|>{{if eq .RoleName "assistant"}}assistant{{else if eq
|
|||
</tool_call>
|
||||
{{- else if eq .RoleName "tool" }}
|
||||
</tool_response>
|
||||
{{- end }}
|
||||
<|im_end|>`
|
||||
{{- end }}<|im_end|>`
|
||||
|
||||
const llama3 = `<|start_header_id|>{{if eq .RoleName "assistant"}}assistant{{else if eq .RoleName "system"}}system{{else if eq .RoleName "tool"}}tool{{else if eq .RoleName "user"}}user{{end}}<|end_header_id|>
|
||||
|
||||
|
@ -107,7 +106,7 @@ var llama3TestMatch map[string]map[string]interface{} = map[string]map[string]in
|
|||
var chatMLTestMatch map[string]map[string]interface{} = map[string]map[string]interface{}{
|
||||
"user": {
|
||||
"template": chatML,
|
||||
"expected": "<|im_start|>user\nA long time ago in a galaxy far, far away...\n<|im_end|>",
|
||||
"expected": "<|im_start|>user\nA long time ago in a galaxy far, far away...<|im_end|>",
|
||||
"data": model.ChatMessageTemplateData{
|
||||
SystemPrompt: "",
|
||||
Role: "user",
|
||||
|
@ -122,7 +121,7 @@ var chatMLTestMatch map[string]map[string]interface{} = map[string]map[string]in
|
|||
},
|
||||
"assistant": {
|
||||
"template": chatML,
|
||||
"expected": "<|im_start|>assistant\nA long time ago in a galaxy far, far away...\n<|im_end|>",
|
||||
"expected": "<|im_start|>assistant\nA long time ago in a galaxy far, far away...<|im_end|>",
|
||||
"data": model.ChatMessageTemplateData{
|
||||
SystemPrompt: "",
|
||||
Role: "assistant",
|
||||
|
@ -137,7 +136,7 @@ var chatMLTestMatch map[string]map[string]interface{} = map[string]map[string]in
|
|||
},
|
||||
"function_call": {
|
||||
"template": chatML,
|
||||
"expected": "<|im_start|>assistant\n<tool_call>\n{\"function\":\"test\"}\n</tool_call>\n<|im_end|>",
|
||||
"expected": "<|im_start|>assistant\n<tool_call>\n{\"function\":\"test\"}\n</tool_call><|im_end|>",
|
||||
"data": model.ChatMessageTemplateData{
|
||||
SystemPrompt: "",
|
||||
Role: "assistant",
|
||||
|
@ -152,7 +151,7 @@ var chatMLTestMatch map[string]map[string]interface{} = map[string]map[string]in
|
|||
},
|
||||
"function_response": {
|
||||
"template": chatML,
|
||||
"expected": "<|im_start|>tool\n<tool_response>\nResponse from tool\n</tool_response>\n<|im_end|>",
|
||||
"expected": "<|im_start|>tool\n<tool_response>\nResponse from tool\n</tool_response><|im_end|>",
|
||||
"data": model.ChatMessageTemplateData{
|
||||
SystemPrompt: "",
|
||||
Role: "tool",
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue