refactor: OpenAI integration (#996)

- Add `OpenAIResponse` to trim the `<think>...</think>` block
- Add an `Enable Streaming` option to fix the issue that some services do not support streaming output

Signed-off-by: leo <longshuang@msn.cn>
This commit is contained in:
leo 2025-02-19 18:01:16 +08:00
parent 69d107430a
commit c3eca0d7fd
No known key found for this signature in database
6 changed files with 145 additions and 85 deletions

View file

@ -1,7 +1,6 @@
using System; using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.Text; using System.Text;
using System.Text.RegularExpressions;
using System.Threading; using System.Threading;
using Avalonia.Threading; using Avalonia.Threading;
@ -36,6 +35,8 @@ namespace SourceGit.Commands
{ {
try try
{ {
_onResponse?.Invoke("Waiting for pre-file analyzing to completed...\n\n");
var responseBuilder = new StringBuilder(); var responseBuilder = new StringBuilder();
var summaryBuilder = new StringBuilder(); var summaryBuilder = new StringBuilder();
foreach (var change in _changes) foreach (var change in _changes)
@ -49,18 +50,17 @@ namespace SourceGit.Commands
var rs = new GetDiffContent(_repo, new Models.DiffOption(change, false)).ReadToEnd(); var rs = new GetDiffContent(_repo, new Models.DiffOption(change, false)).ReadToEnd();
if (rs.IsSuccess) if (rs.IsSuccess)
{ {
var hasFirstValidChar = false;
var thinkingBuffer = new StringBuilder();
_service.Chat( _service.Chat(
_service.AnalyzeDiffPrompt, _service.AnalyzeDiffPrompt,
$"Here is the `git diff` output: {rs.StdOut}", $"Here is the `git diff` output: {rs.StdOut}",
_cancelToken, _cancelToken,
update => update =>
ProcessChatResponse(update, ref hasFirstValidChar, thinkingBuffer, {
(responseBuilder, text => responseBuilder.Append(update);
_onResponse?.Invoke( summaryBuilder.Append(update);
$"Waiting for pre-file analyzing to completed...\n\n{text}")),
(summaryBuilder, null))); _onResponse?.Invoke($"Waiting for pre-file analyzing to completed...\n\n{responseBuilder}");
});
} }
responseBuilder.Append("\n"); responseBuilder.Append("\n");
@ -74,15 +74,15 @@ namespace SourceGit.Commands
var responseBody = responseBuilder.ToString(); var responseBody = responseBuilder.ToString();
var subjectBuilder = new StringBuilder(); var subjectBuilder = new StringBuilder();
var hasSubjectFirstValidChar = false;
var subjectThinkingBuffer = new StringBuilder();
_service.Chat( _service.Chat(
_service.GenerateSubjectPrompt, _service.GenerateSubjectPrompt,
$"Here are the summaries changes:\n{summaryBuilder}", $"Here are the summaries changes:\n{summaryBuilder}",
_cancelToken, _cancelToken,
update => update =>
ProcessChatResponse(update, ref hasSubjectFirstValidChar, subjectThinkingBuffer, {
(subjectBuilder, text => _onResponse?.Invoke($"{text}\n\n{responseBody}")))); subjectBuilder.Append(update);
_onResponse?.Invoke($"{subjectBuilder}\n\n{responseBody}");
});
} }
catch (Exception e) catch (Exception e)
{ {
@ -90,67 +90,10 @@ namespace SourceGit.Commands
} }
} }
private void ProcessChatResponse(
string update,
ref bool hasFirstValidChar,
StringBuilder thinkingBuffer,
params (StringBuilder builder, Action<string> callback)[] outputs)
{
if (!hasFirstValidChar)
{
update = update.TrimStart();
if (string.IsNullOrEmpty(update))
return;
if (update.StartsWith("<", StringComparison.Ordinal))
thinkingBuffer.Append(update);
hasFirstValidChar = true;
}
if (thinkingBuffer.Length > 0)
thinkingBuffer.Append(update);
if (thinkingBuffer.Length > 15)
{
var match = REG_COT.Match(thinkingBuffer.ToString());
if (match.Success)
{
update = REG_COT.Replace(thinkingBuffer.ToString(), "").TrimStart();
if (update.Length > 0)
{
foreach (var output in outputs)
output.builder.Append(update);
thinkingBuffer.Clear();
}
return;
}
match = REG_THINK_START.Match(thinkingBuffer.ToString());
if (!match.Success)
{
foreach (var output in outputs)
output.builder.Append(thinkingBuffer);
thinkingBuffer.Clear();
return;
}
}
if (thinkingBuffer.Length == 0)
{
foreach (var output in outputs)
{
output.builder.Append(update);
output.callback?.Invoke(output.builder.ToString());
}
}
}
private Models.OpenAIService _service; private Models.OpenAIService _service;
private string _repo; private string _repo;
private List<Models.Change> _changes; private List<Models.Change> _changes;
private CancellationToken _cancelToken; private CancellationToken _cancelToken;
private Action<string> _onResponse; private Action<string> _onResponse;
private static readonly Regex REG_COT = new(@"^<(think|thought|thinking|thought_chain)>(.*?)</\1>", RegexOptions.Singleline);
private static readonly Regex REG_THINK_START = new(@"^<(think|thought|thinking|thought_chain)>", RegexOptions.Singleline);
} }
} }

View file

@ -1,5 +1,8 @@
using System; using System;
using System.ClientModel; using System.ClientModel;
using System.Collections.Generic;
using System.Text;
using System.Text.RegularExpressions;
using System.Threading; using System.Threading;
using Azure.AI.OpenAI; using Azure.AI.OpenAI;
using CommunityToolkit.Mvvm.ComponentModel; using CommunityToolkit.Mvvm.ComponentModel;
@ -8,6 +11,91 @@ using OpenAI.Chat;
namespace SourceGit.Models namespace SourceGit.Models
{ {
public partial class OpenAIResponse
{
public OpenAIResponse(Action<string> onUpdate)
{
_onUpdate = onUpdate;
}
public void Append(string text)
{
var buffer = text;
if (_thinkTail.Length > 0)
{
_thinkTail.Append(buffer);
buffer = _thinkTail.ToString();
_thinkTail.Clear();
}
buffer = REG_COT().Replace(buffer, "");
var startIdx = buffer.IndexOf('<', StringComparison.Ordinal);
if (startIdx >= 0)
{
if (startIdx > 0)
OnReceive(buffer.Substring(0, startIdx));
var endIdx = buffer.IndexOf(">", startIdx + 1, StringComparison.Ordinal);
if (endIdx <= startIdx)
{
if (buffer.Length - startIdx <= 15)
_thinkTail.Append(buffer.Substring(startIdx));
else
OnReceive(buffer.Substring(startIdx));
}
else if (endIdx < startIdx + 15)
{
var tag = buffer.Substring(startIdx + 1, endIdx - startIdx - 1);
if (_thinkTags.Contains(tag))
_thinkTail.Append(buffer.Substring(startIdx));
else
OnReceive(buffer.Substring(startIdx));
}
else
{
OnReceive(buffer.Substring(startIdx));
}
}
else
{
OnReceive(buffer);
}
}
public void End()
{
if (_thinkTail.Length > 0)
{
OnReceive(_thinkTail.ToString());
_thinkTail.Clear();
}
}
private void OnReceive(string text)
{
if (!_hasTrimmedStart)
{
text = text.TrimStart();
if (string.IsNullOrEmpty(text))
return;
_hasTrimmedStart = true;
}
_onUpdate.Invoke(text);
}
[GeneratedRegex(@"<(think|thought|thinking|thought_chain)>.*?</\1>", RegexOptions.Singleline)]
private static partial Regex REG_COT();
private Action<string> _onUpdate = null;
private StringBuilder _thinkTail = new StringBuilder();
private HashSet<string> _thinkTags = ["think", "thought", "thinking", "thought_chain"];
private bool _hasTrimmedStart = false;
}
public class OpenAIService : ObservableObject public class OpenAIService : ObservableObject
{ {
public string Name public string Name
@ -42,6 +130,12 @@ namespace SourceGit.Models
set => SetProperty(ref _model, value); set => SetProperty(ref _model, value);
} }
public bool Streaming
{
get => _streaming;
set => SetProperty(ref _streaming, value);
}
public string AnalyzeDiffPrompt public string AnalyzeDiffPrompt
{ {
get => _analyzeDiffPrompt; get => _analyzeDiffPrompt;
@ -89,33 +183,48 @@ namespace SourceGit.Models
public void Chat(string prompt, string question, CancellationToken cancellation, Action<string> onUpdate) public void Chat(string prompt, string question, CancellationToken cancellation, Action<string> onUpdate)
{ {
Uri server = new(Server); var server = new Uri(_server);
ApiKeyCredential key = new(ApiKey); var key = new ApiKeyCredential(_apiKey);
ChatClient client = null; var client = null as ChatClient;
if (Server.Contains("openai.azure.com/", StringComparison.Ordinal)) if (_server.Contains("openai.azure.com/", StringComparison.Ordinal))
{ {
var azure = new AzureOpenAIClient(server, key); var azure = new AzureOpenAIClient(server, key);
client = azure.GetChatClient(Model); client = azure.GetChatClient(_model);
} }
else else
{ {
var openai = new OpenAIClient(key, new() { Endpoint = server }); var openai = new OpenAIClient(key, new() { Endpoint = server });
client = openai.GetChatClient(Model); client = openai.GetChatClient(_model);
} }
var messages = new List<ChatMessage>();
messages.Add(_model.Equals("o1-mini", StringComparison.Ordinal) ? new UserChatMessage(prompt) : new SystemChatMessage(prompt));
messages.Add(new UserChatMessage(question));
try try
{ {
var updates = client.CompleteChatStreaming([ var rsp = new OpenAIResponse(onUpdate);
_model.Equals("o1-mini", StringComparison.Ordinal) ? new UserChatMessage(prompt) : new SystemChatMessage(prompt),
new UserChatMessage(question), if (_streaming)
], null, cancellation); {
var updates = client.CompleteChatStreaming(messages, null, cancellation);
foreach (var update in updates) foreach (var update in updates)
{ {
if (update.ContentUpdate.Count > 0) if (update.ContentUpdate.Count > 0)
onUpdate.Invoke(update.ContentUpdate[0].Text); rsp.Append(update.ContentUpdate[0].Text);
} }
} }
else
{
var completion = client.CompleteChat(messages, null, cancellation);
if (completion.Value.Content.Count > 0)
rsp.Append(completion.Value.Content[0].Text);
}
rsp.End();
}
catch catch
{ {
if (!cancellation.IsCancellationRequested) if (!cancellation.IsCancellationRequested)
@ -127,6 +236,7 @@ namespace SourceGit.Models
private string _server; private string _server;
private string _apiKey; private string _apiKey;
private string _model; private string _model;
private bool _streaming = true;
private string _analyzeDiffPrompt; private string _analyzeDiffPrompt;
private string _generateSubjectPrompt; private string _generateSubjectPrompt;
} }

View file

@ -459,6 +459,7 @@
<x:String x:Key="Text.Preferences.AI.Model" xml:space="preserve">Model</x:String> <x:String x:Key="Text.Preferences.AI.Model" xml:space="preserve">Model</x:String>
<x:String x:Key="Text.Preferences.AI.Name" xml:space="preserve">Name</x:String> <x:String x:Key="Text.Preferences.AI.Name" xml:space="preserve">Name</x:String>
<x:String x:Key="Text.Preferences.AI.Server" xml:space="preserve">Server</x:String> <x:String x:Key="Text.Preferences.AI.Server" xml:space="preserve">Server</x:String>
<x:String x:Key="Text.Preferences.AI.Streaming" xml:space="preserve">Enable Streaming</x:String>
<x:String x:Key="Text.Preferences.Appearance" xml:space="preserve">APPEARANCE</x:String> <x:String x:Key="Text.Preferences.Appearance" xml:space="preserve">APPEARANCE</x:String>
<x:String x:Key="Text.Preferences.Appearance.DefaultFont" xml:space="preserve">Default Font</x:String> <x:String x:Key="Text.Preferences.Appearance.DefaultFont" xml:space="preserve">Default Font</x:String>
<x:String x:Key="Text.Preferences.Appearance.FontSize" xml:space="preserve">Font Size</x:String> <x:String x:Key="Text.Preferences.Appearance.FontSize" xml:space="preserve">Font Size</x:String>

View file

@ -462,6 +462,7 @@
<x:String x:Key="Text.Preferences.AI.Model" xml:space="preserve">模型</x:String> <x:String x:Key="Text.Preferences.AI.Model" xml:space="preserve">模型</x:String>
<x:String x:Key="Text.Preferences.AI.Name" xml:space="preserve">配置名称</x:String> <x:String x:Key="Text.Preferences.AI.Name" xml:space="preserve">配置名称</x:String>
<x:String x:Key="Text.Preferences.AI.Server" xml:space="preserve">服务地址</x:String> <x:String x:Key="Text.Preferences.AI.Server" xml:space="preserve">服务地址</x:String>
<x:String x:Key="Text.Preferences.AI.Streaming" xml:space="preserve">启用流式输出</x:String>
<x:String x:Key="Text.Preferences.Appearance" xml:space="preserve">外观配置</x:String> <x:String x:Key="Text.Preferences.Appearance" xml:space="preserve">外观配置</x:String>
<x:String x:Key="Text.Preferences.Appearance.DefaultFont" xml:space="preserve">缺省字体</x:String> <x:String x:Key="Text.Preferences.Appearance.DefaultFont" xml:space="preserve">缺省字体</x:String>
<x:String x:Key="Text.Preferences.Appearance.FontSize" xml:space="preserve">字体大小</x:String> <x:String x:Key="Text.Preferences.Appearance.FontSize" xml:space="preserve">字体大小</x:String>

View file

@ -456,12 +456,13 @@
<x:String x:Key="Text.Period.YearsAgo" xml:space="preserve">{0} 年前</x:String> <x:String x:Key="Text.Period.YearsAgo" xml:space="preserve">{0} 年前</x:String>
<x:String x:Key="Text.Preferences" xml:space="preserve">偏好設定</x:String> <x:String x:Key="Text.Preferences" xml:space="preserve">偏好設定</x:String>
<x:String x:Key="Text.Preferences.AI" xml:space="preserve">AI</x:String> <x:String x:Key="Text.Preferences.AI" xml:space="preserve">AI</x:String>
<x:String x:Key="Text.Preferences.AI.Server" xml:space="preserve">伺服器</x:String> <x:String x:Key="Text.Preferences.AI.AnalyzeDiffPrompt" xml:space="preserve">分析變更差異提示詞</x:String>
<x:String x:Key="Text.Preferences.AI.ApiKey" xml:space="preserve">API 金鑰</x:String> <x:String x:Key="Text.Preferences.AI.ApiKey" xml:space="preserve">API 金鑰</x:String>
<x:String x:Key="Text.Preferences.AI.GenerateSubjectPrompt" xml:space="preserve">產生提交訊息提示詞</x:String>
<x:String x:Key="Text.Preferences.AI.Model" xml:space="preserve">模型</x:String> <x:String x:Key="Text.Preferences.AI.Model" xml:space="preserve">模型</x:String>
<x:String x:Key="Text.Preferences.AI.Name" xml:space="preserve">名稱</x:String> <x:String x:Key="Text.Preferences.AI.Name" xml:space="preserve">名稱</x:String>
<x:String x:Key="Text.Preferences.AI.AnalyzeDiffPrompt" xml:space="preserve">分析變更差異提示詞</x:String> <x:String x:Key="Text.Preferences.AI.Server" xml:space="preserve">伺服器</x:String>
<x:String x:Key="Text.Preferences.AI.GenerateSubjectPrompt" xml:space="preserve">產生提交訊息提示詞</x:String> <x:String x:Key="Text.Preferences.AI.Streaming" xml:space="preserve">啟用串流輸出</x:String>
<x:String x:Key="Text.Preferences.Appearance" xml:space="preserve">外觀設定</x:String> <x:String x:Key="Text.Preferences.Appearance" xml:space="preserve">外觀設定</x:String>
<x:String x:Key="Text.Preferences.Appearance.DefaultFont" xml:space="preserve">預設字型</x:String> <x:String x:Key="Text.Preferences.Appearance.DefaultFont" xml:space="preserve">預設字型</x:String>
<x:String x:Key="Text.Preferences.Appearance.FontSize" xml:space="preserve">字型大小</x:String> <x:String x:Key="Text.Preferences.Appearance.FontSize" xml:space="preserve">字型大小</x:String>

View file

@ -616,6 +616,10 @@
Text="{Binding GenerateSubjectPrompt, Mode=TwoWay}" Text="{Binding GenerateSubjectPrompt, Mode=TwoWay}"
AcceptsReturn="true" AcceptsReturn="true"
TextWrapping="Wrap"/> TextWrapping="Wrap"/>
<CheckBox Margin="0,12,0,0"
Content="{DynamicResource Text.Preferences.AI.Streaming}"
IsChecked="{Binding Streaming, Mode=TwoWay}"/>
</StackPanel> </StackPanel>
</DataTemplate> </DataTemplate>
</ContentControl.DataTemplates> </ContentControl.DataTemplates>