mirror of
https://github.com/mudler/LocalAI.git
synced 2025-05-20 02:24:59 +00:00
chore(model gallery): add localai-functioncall-qwen2.5-7b-v0.5 (#4796)
Signed-off-by: Ettore Di Giacinto <mudler@localai.io>
This commit is contained in:
parent
3cddf24747
commit
6be8c0c618
2 changed files with 65 additions and 0 deletions
|
@ -3673,6 +3673,22 @@
|
||||||
- filename: SubtleOne_Qwen2.5-32b-Erudite-Writer-Q4_K_M.gguf
|
- filename: SubtleOne_Qwen2.5-32b-Erudite-Writer-Q4_K_M.gguf
|
||||||
sha256: fb059c88be4d7d579f0776cead4ca44cf7423b834c5502ce67ef41b15cd0973b
|
sha256: fb059c88be4d7d579f0776cead4ca44cf7423b834c5502ce67ef41b15cd0973b
|
||||||
uri: huggingface://bartowski/SubtleOne_Qwen2.5-32b-Erudite-Writer-GGUF/SubtleOne_Qwen2.5-32b-Erudite-Writer-Q4_K_M.gguf
|
uri: huggingface://bartowski/SubtleOne_Qwen2.5-32b-Erudite-Writer-GGUF/SubtleOne_Qwen2.5-32b-Erudite-Writer-Q4_K_M.gguf
|
||||||
|
- !!merge <<: *qwen25
|
||||||
|
name: "localai-functioncall-qwen2.5-7b-v0.5"
|
||||||
|
url: "github:mudler/LocalAI/gallery/qwen-fcall.yaml@master"
|
||||||
|
icon: https://cdn-uploads.huggingface.co/production/uploads/647374aa7ff32a81ac6d35d4/Dzbdzn27KEc3K6zNNi070.png
|
||||||
|
urls:
|
||||||
|
- https://huggingface.co/mudler/LocalAI-functioncall-qwen2.5-7b-v0.5
|
||||||
|
- https://huggingface.co/mudler/LocalAI-functioncall-qwen2.5-7b-v0.5-Q4_K_M-GGUF
|
||||||
|
description: |
|
||||||
|
A model tailored to be conversational and execute function calls with LocalAI. This model is based on qwen2.5 (7B).
|
||||||
|
overrides:
|
||||||
|
parameters:
|
||||||
|
model: localai-functioncall-qwen2.5-7b-v0.5-q4_k_m.gguf
|
||||||
|
files:
|
||||||
|
- filename: localai-functioncall-qwen2.5-7b-v0.5-q4_k_m.gguf
|
||||||
|
sha256: 4e7b7fe1d54b881f1ef90799219dc6cc285d29db24f559c8998d1addb35713d4
|
||||||
|
uri: huggingface://mudler/LocalAI-functioncall-qwen2.5-7b-v0.5-Q4_K_M-GGUF/localai-functioncall-qwen2.5-7b-v0.5-q4_k_m.gguf
|
||||||
- &llama31
|
- &llama31
|
||||||
url: "github:mudler/LocalAI/gallery/llama3.1-instruct.yaml@master" ## LLama3.1
|
url: "github:mudler/LocalAI/gallery/llama3.1-instruct.yaml@master" ## LLama3.1
|
||||||
icon: https://avatars.githubusercontent.com/u/153379578
|
icon: https://avatars.githubusercontent.com/u/153379578
|
||||||
|
|
49
gallery/qwen-fcall.yaml
Normal file
49
gallery/qwen-fcall.yaml
Normal file
|
@ -0,0 +1,49 @@
|
||||||
|
---
|
||||||
|
name: "qwen-fcall"
|
||||||
|
|
||||||
|
config_file: |
|
||||||
|
mmap: true
|
||||||
|
function:
|
||||||
|
json_regex_match:
|
||||||
|
- "(?s)<Output>(.*?)</Output>"
|
||||||
|
capture_llm_results:
|
||||||
|
- (?s)<Thought>(.*?)</Thought>
|
||||||
|
replace_llm_results:
|
||||||
|
- key: (?s)<Thought>(.*?)</Thought>
|
||||||
|
value: ""
|
||||||
|
grammar:
|
||||||
|
properties_order: "name,arguments"
|
||||||
|
template:
|
||||||
|
chat_message: |
|
||||||
|
<|im_start|>{{ .RoleName }}
|
||||||
|
{{ if .FunctionCall -}}
|
||||||
|
Function call:
|
||||||
|
{{ else if eq .RoleName "tool" -}}
|
||||||
|
Function response:
|
||||||
|
{{ end -}}
|
||||||
|
{{ if .Content -}}
|
||||||
|
{{.Content }}
|
||||||
|
{{ end -}}
|
||||||
|
{{ if .FunctionCall -}}
|
||||||
|
{{toJson .FunctionCall}}
|
||||||
|
{{ end -}}<|im_end|>
|
||||||
|
function: |
|
||||||
|
<|im_start|>system
|
||||||
|
You are an AI assistant that executes function calls, and these are the tools at your disposal:
|
||||||
|
{{range .Functions}}
|
||||||
|
{'type': 'function', 'function': {'name': '{{.Name}}', 'description': '{{.Description}}', 'parameters': {{toJson .Parameters}} }}
|
||||||
|
{{end}}
|
||||||
|
<|im_end|>
|
||||||
|
{{.Input -}}
|
||||||
|
<|im_start|>assistant
|
||||||
|
chat: |
|
||||||
|
{{.Input -}}
|
||||||
|
<|im_start|>assistant
|
||||||
|
completion: |
|
||||||
|
{{.Input}}
|
||||||
|
context_size: 4096
|
||||||
|
f16: true
|
||||||
|
stopwords:
|
||||||
|
- '<|im_end|>'
|
||||||
|
- '<dummy32000>'
|
||||||
|
- '</s>'
|
Loading…
Add table
Add a link
Reference in a new issue