mirror of
https://github.com/sourcegit-scm/sourcegit
synced 2025-05-21 04:04:59 +00:00
refactor: OpenAI integration (#996)
- Add `OpenAIResponse` to trim the `<think>...</think>` block - Add an `Enable Streaming` option to fix the issue that some services do not support streaming output Signed-off-by: leo <longshuang@msn.cn>
This commit is contained in:
parent
69d107430a
commit
c3eca0d7fd
6 changed files with 145 additions and 85 deletions
|
@ -1,7 +1,6 @@
|
|||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Text;
|
||||
using System.Text.RegularExpressions;
|
||||
using System.Threading;
|
||||
|
||||
using Avalonia.Threading;
|
||||
|
@ -36,6 +35,8 @@ namespace SourceGit.Commands
|
|||
{
|
||||
try
|
||||
{
|
||||
_onResponse?.Invoke("Waiting for pre-file analyzing to completed...\n\n");
|
||||
|
||||
var responseBuilder = new StringBuilder();
|
||||
var summaryBuilder = new StringBuilder();
|
||||
foreach (var change in _changes)
|
||||
|
@ -49,18 +50,17 @@ namespace SourceGit.Commands
|
|||
var rs = new GetDiffContent(_repo, new Models.DiffOption(change, false)).ReadToEnd();
|
||||
if (rs.IsSuccess)
|
||||
{
|
||||
var hasFirstValidChar = false;
|
||||
var thinkingBuffer = new StringBuilder();
|
||||
_service.Chat(
|
||||
_service.AnalyzeDiffPrompt,
|
||||
$"Here is the `git diff` output: {rs.StdOut}",
|
||||
_cancelToken,
|
||||
update =>
|
||||
ProcessChatResponse(update, ref hasFirstValidChar, thinkingBuffer,
|
||||
(responseBuilder, text =>
|
||||
_onResponse?.Invoke(
|
||||
$"Waiting for pre-file analyzing to completed...\n\n{text}")),
|
||||
(summaryBuilder, null)));
|
||||
{
|
||||
responseBuilder.Append(update);
|
||||
summaryBuilder.Append(update);
|
||||
|
||||
_onResponse?.Invoke($"Waiting for pre-file analyzing to completed...\n\n{responseBuilder}");
|
||||
});
|
||||
}
|
||||
|
||||
responseBuilder.Append("\n");
|
||||
|
@ -74,15 +74,15 @@ namespace SourceGit.Commands
|
|||
|
||||
var responseBody = responseBuilder.ToString();
|
||||
var subjectBuilder = new StringBuilder();
|
||||
var hasSubjectFirstValidChar = false;
|
||||
var subjectThinkingBuffer = new StringBuilder();
|
||||
_service.Chat(
|
||||
_service.GenerateSubjectPrompt,
|
||||
$"Here are the summaries changes:\n{summaryBuilder}",
|
||||
_cancelToken,
|
||||
update =>
|
||||
ProcessChatResponse(update, ref hasSubjectFirstValidChar, subjectThinkingBuffer,
|
||||
(subjectBuilder, text => _onResponse?.Invoke($"{text}\n\n{responseBody}"))));
|
||||
{
|
||||
subjectBuilder.Append(update);
|
||||
_onResponse?.Invoke($"{subjectBuilder}\n\n{responseBody}");
|
||||
});
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
|
@ -90,67 +90,10 @@ namespace SourceGit.Commands
|
|||
}
|
||||
}
|
||||
|
||||
private void ProcessChatResponse(
|
||||
string update,
|
||||
ref bool hasFirstValidChar,
|
||||
StringBuilder thinkingBuffer,
|
||||
params (StringBuilder builder, Action<string> callback)[] outputs)
|
||||
{
|
||||
if (!hasFirstValidChar)
|
||||
{
|
||||
update = update.TrimStart();
|
||||
if (string.IsNullOrEmpty(update))
|
||||
return;
|
||||
if (update.StartsWith("<", StringComparison.Ordinal))
|
||||
thinkingBuffer.Append(update);
|
||||
hasFirstValidChar = true;
|
||||
}
|
||||
|
||||
if (thinkingBuffer.Length > 0)
|
||||
thinkingBuffer.Append(update);
|
||||
|
||||
if (thinkingBuffer.Length > 15)
|
||||
{
|
||||
var match = REG_COT.Match(thinkingBuffer.ToString());
|
||||
if (match.Success)
|
||||
{
|
||||
update = REG_COT.Replace(thinkingBuffer.ToString(), "").TrimStart();
|
||||
if (update.Length > 0)
|
||||
{
|
||||
foreach (var output in outputs)
|
||||
output.builder.Append(update);
|
||||
thinkingBuffer.Clear();
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
match = REG_THINK_START.Match(thinkingBuffer.ToString());
|
||||
if (!match.Success)
|
||||
{
|
||||
foreach (var output in outputs)
|
||||
output.builder.Append(thinkingBuffer);
|
||||
thinkingBuffer.Clear();
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if (thinkingBuffer.Length == 0)
|
||||
{
|
||||
foreach (var output in outputs)
|
||||
{
|
||||
output.builder.Append(update);
|
||||
output.callback?.Invoke(output.builder.ToString());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private Models.OpenAIService _service;
|
||||
private string _repo;
|
||||
private List<Models.Change> _changes;
|
||||
private CancellationToken _cancelToken;
|
||||
private Action<string> _onResponse;
|
||||
|
||||
private static readonly Regex REG_COT = new(@"^<(think|thought|thinking|thought_chain)>(.*?)</\1>", RegexOptions.Singleline);
|
||||
private static readonly Regex REG_THINK_START = new(@"^<(think|thought|thinking|thought_chain)>", RegexOptions.Singleline);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,8 @@
|
|||
using System;
|
||||
using System.ClientModel;
|
||||
using System.Collections.Generic;
|
||||
using System.Text;
|
||||
using System.Text.RegularExpressions;
|
||||
using System.Threading;
|
||||
using Azure.AI.OpenAI;
|
||||
using CommunityToolkit.Mvvm.ComponentModel;
|
||||
|
@ -8,6 +11,91 @@ using OpenAI.Chat;
|
|||
|
||||
namespace SourceGit.Models
|
||||
{
|
||||
public partial class OpenAIResponse
|
||||
{
|
||||
public OpenAIResponse(Action<string> onUpdate)
|
||||
{
|
||||
_onUpdate = onUpdate;
|
||||
}
|
||||
|
||||
public void Append(string text)
|
||||
{
|
||||
var buffer = text;
|
||||
|
||||
if (_thinkTail.Length > 0)
|
||||
{
|
||||
_thinkTail.Append(buffer);
|
||||
buffer = _thinkTail.ToString();
|
||||
_thinkTail.Clear();
|
||||
}
|
||||
|
||||
buffer = REG_COT().Replace(buffer, "");
|
||||
|
||||
var startIdx = buffer.IndexOf('<', StringComparison.Ordinal);
|
||||
if (startIdx >= 0)
|
||||
{
|
||||
if (startIdx > 0)
|
||||
OnReceive(buffer.Substring(0, startIdx));
|
||||
|
||||
var endIdx = buffer.IndexOf(">", startIdx + 1, StringComparison.Ordinal);
|
||||
if (endIdx <= startIdx)
|
||||
{
|
||||
if (buffer.Length - startIdx <= 15)
|
||||
_thinkTail.Append(buffer.Substring(startIdx));
|
||||
else
|
||||
OnReceive(buffer.Substring(startIdx));
|
||||
}
|
||||
else if (endIdx < startIdx + 15)
|
||||
{
|
||||
var tag = buffer.Substring(startIdx + 1, endIdx - startIdx - 1);
|
||||
if (_thinkTags.Contains(tag))
|
||||
_thinkTail.Append(buffer.Substring(startIdx));
|
||||
else
|
||||
OnReceive(buffer.Substring(startIdx));
|
||||
}
|
||||
else
|
||||
{
|
||||
OnReceive(buffer.Substring(startIdx));
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
OnReceive(buffer);
|
||||
}
|
||||
}
|
||||
|
||||
public void End()
|
||||
{
|
||||
if (_thinkTail.Length > 0)
|
||||
{
|
||||
OnReceive(_thinkTail.ToString());
|
||||
_thinkTail.Clear();
|
||||
}
|
||||
}
|
||||
|
||||
private void OnReceive(string text)
|
||||
{
|
||||
if (!_hasTrimmedStart)
|
||||
{
|
||||
text = text.TrimStart();
|
||||
if (string.IsNullOrEmpty(text))
|
||||
return;
|
||||
|
||||
_hasTrimmedStart = true;
|
||||
}
|
||||
|
||||
_onUpdate.Invoke(text);
|
||||
}
|
||||
|
||||
[GeneratedRegex(@"<(think|thought|thinking|thought_chain)>.*?</\1>", RegexOptions.Singleline)]
|
||||
private static partial Regex REG_COT();
|
||||
|
||||
private Action<string> _onUpdate = null;
|
||||
private StringBuilder _thinkTail = new StringBuilder();
|
||||
private HashSet<string> _thinkTags = ["think", "thought", "thinking", "thought_chain"];
|
||||
private bool _hasTrimmedStart = false;
|
||||
}
|
||||
|
||||
public class OpenAIService : ObservableObject
|
||||
{
|
||||
public string Name
|
||||
|
@ -42,6 +130,12 @@ namespace SourceGit.Models
|
|||
set => SetProperty(ref _model, value);
|
||||
}
|
||||
|
||||
public bool Streaming
|
||||
{
|
||||
get => _streaming;
|
||||
set => SetProperty(ref _streaming, value);
|
||||
}
|
||||
|
||||
public string AnalyzeDiffPrompt
|
||||
{
|
||||
get => _analyzeDiffPrompt;
|
||||
|
@ -89,32 +183,47 @@ namespace SourceGit.Models
|
|||
|
||||
public void Chat(string prompt, string question, CancellationToken cancellation, Action<string> onUpdate)
|
||||
{
|
||||
Uri server = new(Server);
|
||||
ApiKeyCredential key = new(ApiKey);
|
||||
ChatClient client = null;
|
||||
if (Server.Contains("openai.azure.com/", StringComparison.Ordinal))
|
||||
var server = new Uri(_server);
|
||||
var key = new ApiKeyCredential(_apiKey);
|
||||
var client = null as ChatClient;
|
||||
if (_server.Contains("openai.azure.com/", StringComparison.Ordinal))
|
||||
{
|
||||
var azure = new AzureOpenAIClient(server, key);
|
||||
client = azure.GetChatClient(Model);
|
||||
client = azure.GetChatClient(_model);
|
||||
}
|
||||
else
|
||||
{
|
||||
var openai = new OpenAIClient(key, new() { Endpoint = server });
|
||||
client = openai.GetChatClient(Model);
|
||||
client = openai.GetChatClient(_model);
|
||||
}
|
||||
|
||||
var messages = new List<ChatMessage>();
|
||||
messages.Add(_model.Equals("o1-mini", StringComparison.Ordinal) ? new UserChatMessage(prompt) : new SystemChatMessage(prompt));
|
||||
messages.Add(new UserChatMessage(question));
|
||||
|
||||
try
|
||||
{
|
||||
var updates = client.CompleteChatStreaming([
|
||||
_model.Equals("o1-mini", StringComparison.Ordinal) ? new UserChatMessage(prompt) : new SystemChatMessage(prompt),
|
||||
new UserChatMessage(question),
|
||||
], null, cancellation);
|
||||
var rsp = new OpenAIResponse(onUpdate);
|
||||
|
||||
foreach (var update in updates)
|
||||
if (_streaming)
|
||||
{
|
||||
if (update.ContentUpdate.Count > 0)
|
||||
onUpdate.Invoke(update.ContentUpdate[0].Text);
|
||||
var updates = client.CompleteChatStreaming(messages, null, cancellation);
|
||||
|
||||
foreach (var update in updates)
|
||||
{
|
||||
if (update.ContentUpdate.Count > 0)
|
||||
rsp.Append(update.ContentUpdate[0].Text);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
var completion = client.CompleteChat(messages, null, cancellation);
|
||||
|
||||
if (completion.Value.Content.Count > 0)
|
||||
rsp.Append(completion.Value.Content[0].Text);
|
||||
}
|
||||
|
||||
rsp.End();
|
||||
}
|
||||
catch
|
||||
{
|
||||
|
@ -127,6 +236,7 @@ namespace SourceGit.Models
|
|||
private string _server;
|
||||
private string _apiKey;
|
||||
private string _model;
|
||||
private bool _streaming = true;
|
||||
private string _analyzeDiffPrompt;
|
||||
private string _generateSubjectPrompt;
|
||||
}
|
||||
|
|
|
@ -459,6 +459,7 @@
|
|||
<x:String x:Key="Text.Preferences.AI.Model" xml:space="preserve">Model</x:String>
|
||||
<x:String x:Key="Text.Preferences.AI.Name" xml:space="preserve">Name</x:String>
|
||||
<x:String x:Key="Text.Preferences.AI.Server" xml:space="preserve">Server</x:String>
|
||||
<x:String x:Key="Text.Preferences.AI.Streaming" xml:space="preserve">Enable Streaming</x:String>
|
||||
<x:String x:Key="Text.Preferences.Appearance" xml:space="preserve">APPEARANCE</x:String>
|
||||
<x:String x:Key="Text.Preferences.Appearance.DefaultFont" xml:space="preserve">Default Font</x:String>
|
||||
<x:String x:Key="Text.Preferences.Appearance.FontSize" xml:space="preserve">Font Size</x:String>
|
||||
|
|
|
@ -462,6 +462,7 @@
|
|||
<x:String x:Key="Text.Preferences.AI.Model" xml:space="preserve">模型</x:String>
|
||||
<x:String x:Key="Text.Preferences.AI.Name" xml:space="preserve">配置名称</x:String>
|
||||
<x:String x:Key="Text.Preferences.AI.Server" xml:space="preserve">服务地址</x:String>
|
||||
<x:String x:Key="Text.Preferences.AI.Streaming" xml:space="preserve">启用流式输出</x:String>
|
||||
<x:String x:Key="Text.Preferences.Appearance" xml:space="preserve">外观配置</x:String>
|
||||
<x:String x:Key="Text.Preferences.Appearance.DefaultFont" xml:space="preserve">缺省字体</x:String>
|
||||
<x:String x:Key="Text.Preferences.Appearance.FontSize" xml:space="preserve">字体大小</x:String>
|
||||
|
|
|
@ -456,12 +456,13 @@
|
|||
<x:String x:Key="Text.Period.YearsAgo" xml:space="preserve">{0} 年前</x:String>
|
||||
<x:String x:Key="Text.Preferences" xml:space="preserve">偏好設定</x:String>
|
||||
<x:String x:Key="Text.Preferences.AI" xml:space="preserve">AI</x:String>
|
||||
<x:String x:Key="Text.Preferences.AI.Server" xml:space="preserve">伺服器</x:String>
|
||||
<x:String x:Key="Text.Preferences.AI.AnalyzeDiffPrompt" xml:space="preserve">分析變更差異提示詞</x:String>
|
||||
<x:String x:Key="Text.Preferences.AI.ApiKey" xml:space="preserve">API 金鑰</x:String>
|
||||
<x:String x:Key="Text.Preferences.AI.GenerateSubjectPrompt" xml:space="preserve">產生提交訊息提示詞</x:String>
|
||||
<x:String x:Key="Text.Preferences.AI.Model" xml:space="preserve">模型</x:String>
|
||||
<x:String x:Key="Text.Preferences.AI.Name" xml:space="preserve">名稱</x:String>
|
||||
<x:String x:Key="Text.Preferences.AI.AnalyzeDiffPrompt" xml:space="preserve">分析變更差異提示詞</x:String>
|
||||
<x:String x:Key="Text.Preferences.AI.GenerateSubjectPrompt" xml:space="preserve">產生提交訊息提示詞</x:String>
|
||||
<x:String x:Key="Text.Preferences.AI.Server" xml:space="preserve">伺服器</x:String>
|
||||
<x:String x:Key="Text.Preferences.AI.Streaming" xml:space="preserve">啟用串流輸出</x:String>
|
||||
<x:String x:Key="Text.Preferences.Appearance" xml:space="preserve">外觀設定</x:String>
|
||||
<x:String x:Key="Text.Preferences.Appearance.DefaultFont" xml:space="preserve">預設字型</x:String>
|
||||
<x:String x:Key="Text.Preferences.Appearance.FontSize" xml:space="preserve">字型大小</x:String>
|
||||
|
|
|
@ -616,6 +616,10 @@
|
|||
Text="{Binding GenerateSubjectPrompt, Mode=TwoWay}"
|
||||
AcceptsReturn="true"
|
||||
TextWrapping="Wrap"/>
|
||||
|
||||
<CheckBox Margin="0,12,0,0"
|
||||
Content="{DynamicResource Text.Preferences.AI.Streaming}"
|
||||
IsChecked="{Binding Streaming, Mode=TwoWay}"/>
|
||||
</StackPanel>
|
||||
</DataTemplate>
|
||||
</ContentControl.DataTemplates>
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue