From 6be8c0c618e74b6731531c22d21fa5c550cf6c26 Mon Sep 17 00:00:00 2001 From: Ettore Di Giacinto Date: Mon, 10 Feb 2025 12:07:35 +0100 Subject: [PATCH] chore(model gallery): add localai-functioncall-qwen2.5-7b-v0.5 (#4796) Signed-off-by: Ettore Di Giacinto --- gallery/index.yaml | 16 ++++++++++++++ gallery/qwen-fcall.yaml | 49 +++++++++++++++++++++++++++++++++++++++++ 2 files changed, 65 insertions(+) create mode 100644 gallery/qwen-fcall.yaml diff --git a/gallery/index.yaml b/gallery/index.yaml index 9cb7e4c4..bc837f92 100644 --- a/gallery/index.yaml +++ b/gallery/index.yaml @@ -3673,6 +3673,22 @@ - filename: SubtleOne_Qwen2.5-32b-Erudite-Writer-Q4_K_M.gguf sha256: fb059c88be4d7d579f0776cead4ca44cf7423b834c5502ce67ef41b15cd0973b uri: huggingface://bartowski/SubtleOne_Qwen2.5-32b-Erudite-Writer-GGUF/SubtleOne_Qwen2.5-32b-Erudite-Writer-Q4_K_M.gguf +- !!merge <<: *qwen25 + name: "localai-functioncall-qwen2.5-7b-v0.5" + url: "github:mudler/LocalAI/gallery/qwen-fcall.yaml@master" + icon: https://cdn-uploads.huggingface.co/production/uploads/647374aa7ff32a81ac6d35d4/Dzbdzn27KEc3K6zNNi070.png + urls: + - https://huggingface.co/mudler/LocalAI-functioncall-qwen2.5-7b-v0.5 + - https://huggingface.co/mudler/LocalAI-functioncall-qwen2.5-7b-v0.5-Q4_K_M-GGUF + description: | + A model tailored to be conversational and execute function calls with LocalAI. This model is based on qwen2.5 (7B). + overrides: + parameters: + model: localai-functioncall-qwen2.5-7b-v0.5-q4_k_m.gguf + files: + - filename: localai-functioncall-qwen2.5-7b-v0.5-q4_k_m.gguf + sha256: 4e7b7fe1d54b881f1ef90799219dc6cc285d29db24f559c8998d1addb35713d4 + uri: huggingface://mudler/LocalAI-functioncall-qwen2.5-7b-v0.5-Q4_K_M-GGUF/localai-functioncall-qwen2.5-7b-v0.5-q4_k_m.gguf - &llama31 url: "github:mudler/LocalAI/gallery/llama3.1-instruct.yaml@master" ## LLama3.1 icon: https://avatars.githubusercontent.com/u/153379578 diff --git a/gallery/qwen-fcall.yaml b/gallery/qwen-fcall.yaml new file mode 100644 index 00000000..f168c7fe --- /dev/null +++ b/gallery/qwen-fcall.yaml @@ -0,0 +1,49 @@ +--- +name: "qwen-fcall" + +config_file: | + mmap: true + function: + json_regex_match: + - "(?s)(.*?)" + capture_llm_results: + - (?s)(.*?) + replace_llm_results: + - key: (?s)(.*?) + value: "" + grammar: + properties_order: "name,arguments" + template: + chat_message: | + <|im_start|>{{ .RoleName }} + {{ if .FunctionCall -}} + Function call: + {{ else if eq .RoleName "tool" -}} + Function response: + {{ end -}} + {{ if .Content -}} + {{.Content }} + {{ end -}} + {{ if .FunctionCall -}} + {{toJson .FunctionCall}} + {{ end -}}<|im_end|> + function: | + <|im_start|>system + You are an AI assistant that executes function calls, and these are the tools at your disposal: + {{range .Functions}} + {'type': 'function', 'function': {'name': '{{.Name}}', 'description': '{{.Description}}', 'parameters': {{toJson .Parameters}} }} + {{end}} + <|im_end|> + {{.Input -}} + <|im_start|>assistant + chat: | + {{.Input -}} + <|im_start|>assistant + completion: | + {{.Input}} + context_size: 4096 + f16: true + stopwords: + - '<|im_end|>' + - '' + - ''