refactor: use non-streaming chat API

Signed-off-by: leo <longshuang@msn.cn>
This commit is contained in:
leo
2026-03-24 19:19:47 +08:00
parent 86c481d41f
commit 7758e57d7f
4 changed files with 30 additions and 161 deletions

View File

@@ -36,14 +36,14 @@ The project uses the following third-party libraries or assets
### OpenAI .NET SDK
- **Source**: https://github.com/openai/openai-dotnet
- **Version**: 2.8.0
- **Version**: 2.9.1
- **License**: MIT License
- **License Link**: https://github.com/openai/openai-dotnet/blob/main/LICENSE
### Azure.AI.OpenAI
- **Source**: https://github.com/Azure/azure-sdk-for-net
- **Version**: 2.8.0-beta.1
- **Version**: 2.9.0-beta.1
- **License**: MIT License
- **License Link**: https://github.com/Azure/azure-sdk-for-net/blob/main/LICENSE.txt

View File

@@ -33,6 +33,7 @@ namespace SourceGit.AI
userMessageBuilder
.AppendLine("Generate a commit message (follow the rule of conventional commit message) for given git repository.")
.AppendLine("- Read all given changed files before generating. Do not skip any one file.")
.AppendLine("- Output the conventional commit message (with detail changes in list) directly. Do not explain your output nor introduce your answer.")
.AppendLine(string.IsNullOrEmpty(_ai.AdditionalPrompt) ? string.Empty : _ai.AdditionalPrompt)
.Append("Reposiory path: ").AppendLine(repo.Quoted())
.AppendLine("Changed files: ")
@@ -42,51 +43,38 @@ namespace SourceGit.AI
do
{
ChatCompletion completion = await chatClient.CompleteChatAsync(messages, options, cancellation);
var inProgress = false;
var updates = chatClient.CompleteChatStreamingAsync(messages, options).WithCancellation(cancellation);
var toolCalls = new ToolCallsBuilder();
var contentBuilder = new StringBuilder();
await foreach (var update in updates)
switch (completion.FinishReason)
{
foreach (var contentPart in update.ContentUpdate)
contentBuilder.Append(contentPart.Text);
case ChatFinishReason.Stop:
onUpdate?.Invoke(string.Empty);
onUpdate?.Invoke("[Assistant]:");
if (completion.Content.Count > 0)
onUpdate?.Invoke(completion.Content[0].Text);
else
onUpdate?.Invoke("[No content was generated.]");
break;
case ChatFinishReason.Length:
throw new Exception("The response was cut off because it reached the maximum length. Consider increasing the max tokens limit.");
case ChatFinishReason.ToolCalls:
{
messages.Add(new AssistantChatMessage(completion));
foreach (var toolCall in update.ToolCallUpdates)
toolCalls.Append(toolCall);
switch (update.FinishReason)
{
case ChatFinishReason.Stop:
onUpdate?.Invoke(string.Empty);
onUpdate?.Invoke("[Assistant]:");
onUpdate?.Invoke(contentBuilder.ToString());
break;
case ChatFinishReason.Length:
throw new Exception("The response was cut off because it reached the maximum length. Consider increasing the max tokens limit.");
case ChatFinishReason.ToolCalls:
foreach (var call in completion.ToolCalls)
{
var calls = toolCalls.Build();
var assistantMessage = new AssistantChatMessage(calls);
if (contentBuilder.Length > 0)
assistantMessage.Content.Add(ChatMessageContentPart.CreateTextPart(contentBuilder.ToString()));
messages.Add(assistantMessage);
foreach (var call in calls)
{
var result = await ChatTools.Process(call, onUpdate);
messages.Add(result);
}
inProgress = true;
break;
var result = await ChatTools.Process(call, onUpdate);
messages.Add(result);
}
case ChatFinishReason.ContentFilter:
throw new Exception("Ommitted content due to a content filter flag");
default:
break;
}
inProgress = true;
break;
}
case ChatFinishReason.ContentFilter:
throw new Exception("Ommitted content due to a content filter flag");
default:
break;
}
if (!inProgress)

View File

@@ -1,119 +0,0 @@
using System;
using System.Buffers;
using System.Collections.Generic;
using System.Diagnostics;
using OpenAI.Chat;
namespace SourceGit.AI
{
public class ToolCallsBuilder
{
private readonly Dictionary<int, string> _indexToToolCallId = [];
private readonly Dictionary<int, string> _indexToFunctionName = [];
private readonly Dictionary<int, SequenceBuilder<byte>> _indexToFunctionArguments = [];
public void Append(StreamingChatToolCallUpdate toolCallUpdate)
{
if (toolCallUpdate.ToolCallId != null)
{
_indexToToolCallId[toolCallUpdate.Index] = toolCallUpdate.ToolCallId;
}
if (toolCallUpdate.FunctionName != null)
{
_indexToFunctionName[toolCallUpdate.Index] = toolCallUpdate.FunctionName;
}
if (toolCallUpdate.FunctionArgumentsUpdate != null && !toolCallUpdate.FunctionArgumentsUpdate.ToMemory().IsEmpty)
{
if (!_indexToFunctionArguments.TryGetValue(toolCallUpdate.Index, out SequenceBuilder<byte> argumentsBuilder))
{
argumentsBuilder = new SequenceBuilder<byte>();
_indexToFunctionArguments[toolCallUpdate.Index] = argumentsBuilder;
}
argumentsBuilder.Append(toolCallUpdate.FunctionArgumentsUpdate);
}
}
public IReadOnlyList<ChatToolCall> Build()
{
List<ChatToolCall> toolCalls = [];
foreach ((int index, string toolCallId) in _indexToToolCallId)
{
ReadOnlySequence<byte> sequence = _indexToFunctionArguments[index].Build();
ChatToolCall toolCall = ChatToolCall.CreateFunctionToolCall(
id: toolCallId,
functionName: _indexToFunctionName[index],
functionArguments: BinaryData.FromBytes(sequence.ToArray()));
toolCalls.Add(toolCall);
}
return toolCalls;
}
}
public class SequenceBuilder<T>
{
Segment _first;
Segment _last;
public void Append(ReadOnlyMemory<T> data)
{
if (_first == null)
{
Debug.Assert(_last == null);
_first = new Segment(data);
_last = _first;
}
else
{
_last = _last!.Append(data);
}
}
public ReadOnlySequence<T> Build()
{
if (_first == null)
{
Debug.Assert(_last == null);
return ReadOnlySequence<T>.Empty;
}
if (_first == _last)
{
Debug.Assert(_first.Next == null);
return new ReadOnlySequence<T>(_first.Memory);
}
return new ReadOnlySequence<T>(_first, 0, _last!, _last!.Memory.Length);
}
private sealed class Segment : ReadOnlySequenceSegment<T>
{
public Segment(ReadOnlyMemory<T> items) : this(items, 0)
{
}
private Segment(ReadOnlyMemory<T> items, long runningIndex)
{
Debug.Assert(runningIndex >= 0);
Memory = items;
RunningIndex = runningIndex;
}
public Segment Append(ReadOnlyMemory<T> items)
{
long runningIndex;
checked
{ runningIndex = RunningIndex + Memory.Length; }
Segment segment = new(items, runningIndex);
Next = segment;
return segment;
}
}
}
}

View File

@@ -53,11 +53,11 @@
<PackageReference Include="Avalonia.Fonts.Inter" Version="11.3.12" />
<PackageReference Include="Avalonia.Themes.Fluent" Version="11.3.12" />
<PackageReference Include="Avalonia.Diagnostics" Version="11.3.12" Condition="'$(Configuration)' == 'Debug'" />
<PackageReference Include="Azure.AI.OpenAI" Version="2.8.0-beta.1" />
<PackageReference Include="Azure.AI.OpenAI" Version="2.9.0-beta.1" />
<PackageReference Include="BitMiracle.LibTiff.NET" Version="2.4.660" />
<PackageReference Include="CommunityToolkit.Mvvm" Version="8.4.0" />
<PackageReference Include="LiveChartsCore.SkiaSharpView.Avalonia" Version="2.0.0-rc6.1" />
<PackageReference Include="OpenAI" Version="2.8.0" />
<PackageReference Include="OpenAI" Version="2.9.1" />
<PackageReference Include="Pfim" Version="0.11.4" />
<ProjectReference Include="../depends/AvaloniaEdit/src/AvaloniaEdit.TextMate/AvaloniaEdit.TextMate.csproj" />