models(gallery): add hermes-2-theta-llama-3-70b (#2626)

Signed-off-by: Ettore Di Giacinto <mudler@localai.io>
This commit is contained in:
Ettore Di Giacinto 2024-06-21 19:41:49 +02:00 committed by GitHub
parent 70a2bfe82e
commit 964732590d
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
2 changed files with 36 additions and 34 deletions

View file

@ -7,47 +7,26 @@ config_file: |
stopwords:
- "<|im_end|>"
- "<dummy32000>"
- "</tool_call>"
- "<|eot_id|>"
- "<|end_of_text|>"
function:
# disable injecting the "answer" tool
disable_no_action: true
grammar:
# This allows the grammar to also return messages
mixed_mode: true
return_name_in_function_response: true
# Without grammar uncomment the lines below
# Warning: this is relying only on the capability of the
# LLM model to generate the correct function call.
grammar:
# Uncomment the line below to enable grammar matching for JSON results if the model is breaking
# the output. This will make the model more accurate and won't break the JSON output.
# This however, will make parallel_calls not functional (it is a known bug)
# mixed_mode: true
parallel_calls: true
expect_strings_after_json: true
json_regex_match:
- "(?s)<tool_call>(.*?)</tool_call>"
- "(?s)<tool_call>(.*?)"
- "(?s)<tool_call>(.*)"
capture_llm_results:
- (?s)<scratchpad>(.*?)</scratchpad>
replace_llm_results:
# Drop the scratchpad content from responses
- key: "(?s)<scratchpad>.*</scratchpad>"
value: ""
replace_function_results:
# Replace everything that is not JSON array or object
- key: '(?s)^[^{\[]*'
value: ""
- key: '(?s)[^}\]]*$'
value: ""
- key: "'([^']*?)'"
value: "_DQUOTE_${1}_DQUOTE_"
- key: '\\"'
value: "__TEMP_QUOTE__"
- key: "\'"
value: "'"
- key: "_DQUOTE_"
value: '"'
- key: "__TEMP_QUOTE__"
value: '"'
# Drop the scratchpad content from responses
- key: "(?s)<scratchpad>.*</scratchpad>"
value: ""
- key: (?s)<scratchpad>(.*?)</scratchpad>
value: ""
template:
chat: |
@ -73,7 +52,7 @@ config_file: |
{{- end }}<|im_end|>
completion: |
{{.Input}}
function: |-
function: |
<|im_start|>system
You are a function calling AI model.
Here are the available tools: