refactor: move Models.AIProvider to AI.Service

Signed-off-by: leo <longshuang@msn.cn>
This commit is contained in:
leo
2026-03-25 11:21:22 +08:00
parent 420676445b
commit f2a2c09b18
9 changed files with 107 additions and 105 deletions

84
src/AI/Agent.cs Normal file
View File

@@ -0,0 +1,84 @@
using System;
using System.Collections.Generic;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using Azure.AI.OpenAI;
using OpenAI;
using OpenAI.Chat;
namespace SourceGit.AI
{
public class Agent
{
public Agent(Service service)
{
_service = service;
}
public async Task GenerateCommitMessage(string repo, string changeList, Action<string> onUpdate, CancellationToken cancellation)
{
var endPoint = new Uri(_service.Server);
var client = _service.Server.Contains("openai.azure.com/", StringComparison.Ordinal)
? new AzureOpenAIClient(endPoint, _service.Credential)
: new OpenAIClient(_service.Credential, new() { Endpoint = endPoint });
var chatClient = client.GetChatClient(_service.Model);
var options = new ChatCompletionOptions() { Tools = { ChatTools.Tool_GetDetailChangesInFile } };
var userMessageBuilder = new StringBuilder();
userMessageBuilder
.AppendLine("Generate a commit message (follow the rule of conventional commit message) for given git repository.")
.AppendLine("- Read all given changed files before generating. Do not skip any one file.")
.AppendLine("- Output the conventional commit message (with detail changes in list) directly. Do not explain your output nor introduce your answer.")
.AppendLine(string.IsNullOrEmpty(_service.AdditionalPrompt) ? string.Empty : _service.AdditionalPrompt)
.Append("Reposiory path: ").AppendLine(repo.Quoted())
.AppendLine("Changed files: ")
.Append(changeList);
var messages = new List<ChatMessage>() { new UserChatMessage(userMessageBuilder.ToString()) };
do
{
ChatCompletion completion = await chatClient.CompleteChatAsync(messages, options, cancellation);
var inProgress = false;
switch (completion.FinishReason)
{
case ChatFinishReason.Stop:
onUpdate?.Invoke(string.Empty);
onUpdate?.Invoke("[Assistant]:");
if (completion.Content.Count > 0)
onUpdate?.Invoke(completion.Content[0].Text);
else
onUpdate?.Invoke("[No content was generated.]");
break;
case ChatFinishReason.Length:
throw new Exception("The response was cut off because it reached the maximum length. Consider increasing the max tokens limit.");
case ChatFinishReason.ToolCalls:
{
messages.Add(new AssistantChatMessage(completion));
foreach (var call in completion.ToolCalls)
{
var result = await ChatTools.Process(call, onUpdate);
messages.Add(result);
}
inProgress = true;
break;
}
case ChatFinishReason.ContentFilter:
throw new Exception("Ommitted content due to a content filter flag");
default:
break;
}
if (!inProgress)
break;
} while (true);
}
private readonly Service _service;
}
}

View File

@@ -25,7 +25,7 @@ namespace SourceGit.AI
},
"originalFile": {
"type": "string",
"description": "The path to the original file when it has been renamed."
"description": "The path to the original file when it has been renamed (marked as 'R' or 'C')."
}
},
"required": ["repo", "file"]

View File

@@ -1,87 +1,16 @@
using System;
using System.ClientModel;
using System.Collections.Generic;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using Azure.AI.OpenAI;
using OpenAI;
using OpenAI.Chat;
namespace SourceGit.AI
{
public class Service
{
public Service(Models.AIProvider ai)
{
_ai = ai;
}
public async Task GenerateCommitMessage(string repo, string changeList, Action<string> onUpdate, CancellationToken cancellation)
{
var key = _ai.ReadApiKeyFromEnv ? Environment.GetEnvironmentVariable(_ai.ApiKey) : _ai.ApiKey;
var endPoint = new Uri(_ai.Server);
var credential = new ApiKeyCredential(key);
var client = _ai.Server.Contains("openai.azure.com/", StringComparison.Ordinal)
? new AzureOpenAIClient(endPoint, credential)
: new OpenAIClient(credential, new() { Endpoint = endPoint });
var chatClient = client.GetChatClient(_ai.Model);
var options = new ChatCompletionOptions() { Tools = { ChatTools.Tool_GetDetailChangesInFile } };
var userMessageBuilder = new StringBuilder();
userMessageBuilder
.AppendLine("Generate a commit message (follow the rule of conventional commit message) for given git repository.")
.AppendLine("- Read all given changed files before generating. Do not skip any one file.")
.AppendLine("- Output the conventional commit message (with detail changes in list) directly. Do not explain your output nor introduce your answer.")
.AppendLine(string.IsNullOrEmpty(_ai.AdditionalPrompt) ? string.Empty : _ai.AdditionalPrompt)
.Append("Reposiory path: ").AppendLine(repo.Quoted())
.AppendLine("Changed files: ")
.Append(changeList);
var messages = new List<ChatMessage>() { new UserChatMessage(userMessageBuilder.ToString()) };
do
{
ChatCompletion completion = await chatClient.CompleteChatAsync(messages, options, cancellation);
var inProgress = false;
switch (completion.FinishReason)
{
case ChatFinishReason.Stop:
onUpdate?.Invoke(string.Empty);
onUpdate?.Invoke("[Assistant]:");
if (completion.Content.Count > 0)
onUpdate?.Invoke(completion.Content[0].Text);
else
onUpdate?.Invoke("[No content was generated.]");
break;
case ChatFinishReason.Length:
throw new Exception("The response was cut off because it reached the maximum length. Consider increasing the max tokens limit.");
case ChatFinishReason.ToolCalls:
{
messages.Add(new AssistantChatMessage(completion));
foreach (var call in completion.ToolCalls)
{
var result = await ChatTools.Process(call, onUpdate);
messages.Add(result);
}
inProgress = true;
break;
}
case ChatFinishReason.ContentFilter:
throw new Exception("Ommitted content due to a content filter flag");
default:
break;
}
if (!inProgress)
break;
} while (true);
}
private readonly Models.AIProvider _ai;
public string Name { get; set; }
public string Server { get; set; }
public string Model { get; set; }
public string ApiKey { get; set; }
public bool ReadApiKeyFromEnv { get; set; }
public string AdditionalPrompt { get; set; }
public ApiKeyCredential Credential => new ApiKeyCredential(ReadApiKeyFromEnv ? Environment.GetEnvironmentVariable(ApiKey) : ApiKey);
}
}

View File

@@ -1,12 +0,0 @@
namespace SourceGit.Models
{
public class AIProvider
{
public string Name { get; set; }
public string Server { get; set; }
public string Model { get; set; }
public string ApiKey { get; set; }
public bool ReadApiKeyFromEnv { get; set; }
public string AdditionalPrompt { get; set; }
}
}

View File

@@ -24,10 +24,10 @@ namespace SourceGit.ViewModels
private set => SetProperty(ref _text, value);
}
public AIAssistant(string repo, Models.AIProvider provider, List<Models.Change> changes)
public AIAssistant(string repo, AI.Service service, List<Models.Change> changes)
{
_repo = repo;
_provider = provider;
_service = service;
_cancel = new CancellationTokenSource();
var builder = new StringBuilder();
@@ -80,14 +80,14 @@ namespace SourceGit.ViewModels
_cancel = new CancellationTokenSource();
Task.Run(async () =>
{
var server = new AI.Service(_provider);
var agent = new AI.Agent(_service);
var builder = new StringBuilder();
builder.AppendLine("Asking AI to generate commit message...").AppendLine();
Dispatcher.UIThread.Post(() => Text = builder.ToString());
try
{
await server.GenerateCommitMessage(_repo, _changeList, message =>
await agent.GenerateCommitMessage(_repo, _changeList, message =>
{
builder.AppendLine(message);
Dispatcher.UIThread.Post(() => Text = builder.ToString());
@@ -103,7 +103,7 @@ namespace SourceGit.ViewModels
}
private readonly string _repo = null;
private readonly Models.AIProvider _provider = null;
private readonly AI.Service _service = null;
private readonly string _changeList = null;
private CancellationTokenSource _cancel = null;
private bool _isGenerating = false;

View File

@@ -480,7 +480,7 @@ namespace SourceGit.ViewModels
set;
} = [];
public AvaloniaList<Models.AIProvider> OpenAIServices
public AvaloniaList<AI.Service> OpenAIServices
{
get;
set;

View File

@@ -1599,7 +1599,7 @@ namespace SourceGit.ViewModels
log.Complete();
}
public List<Models.AIProvider> GetPreferredOpenAIServices()
public List<AI.Service> GetPreferredOpenAIServices()
{
var services = Preferences.Instance.OpenAIServices;
if (services == null || services.Count == 0)
@@ -1609,7 +1609,7 @@ namespace SourceGit.ViewModels
return [services[0]];
var preferred = _settings.PreferredOpenAIService;
var all = new List<Models.AIProvider>();
var all = new List<AI.Service>();
foreach (var service in services)
{
if (service.Name.Equals(preferred, StringComparison.Ordinal))

View File

@@ -3,6 +3,7 @@
xmlns:d="http://schemas.microsoft.com/expression/blend/2008"
xmlns:mc="http://schemas.openxmlformats.org/markup-compatibility/2006"
xmlns:s="using:SourceGit"
xmlns:ai="using:SourceGit.AI"
xmlns:m="using:SourceGit.Models"
xmlns:c="using:SourceGit.Converters"
xmlns:vm="using:SourceGit.ViewModels"
@@ -822,7 +823,7 @@
</ListBox.ItemsPanel>
<ListBox.ItemTemplate>
<DataTemplate DataType="m:AIProvider">
<DataTemplate DataType="ai:Service">
<Grid ColumnDefinitions="Auto,*">
<Path Grid.Column="0" Width="14" Height="14" Data="{StaticResource Icons.AIAssist}"/>
<TextBlock Grid.Column="1" Text="{Binding Name}" Margin="8,0" TextTrimming="CharacterEllipsis"/>
@@ -859,7 +860,7 @@
</ContentControl.Content>
<ContentControl.DataTemplates>
<DataTemplate DataType="m:AIProvider">
<DataTemplate DataType="ai:Service">
<StackPanel Orientation="Vertical" MaxWidth="680">
<TextBlock Text="{DynamicResource Text.Preferences.AI.Name}"/>
<TextBox Margin="0,4,0,0" CornerRadius="3" Height="28" Text="{Binding Name, Mode=TwoWay}"/>

View File

@@ -95,10 +95,10 @@ namespace SourceGit.Views
set;
} = false;
public static readonly StyledProperty<Models.AIProvider> SelectedOpenAIServiceProperty =
AvaloniaProperty.Register<Preferences, Models.AIProvider>(nameof(SelectedOpenAIService));
public static readonly StyledProperty<AI.Service> SelectedOpenAIServiceProperty =
AvaloniaProperty.Register<Preferences, AI.Service>(nameof(SelectedOpenAIService));
public Models.AIProvider SelectedOpenAIService
public AI.Service SelectedOpenAIService
{
get => GetValue(SelectedOpenAIServiceProperty);
set => SetValue(SelectedOpenAIServiceProperty, value);
@@ -397,7 +397,7 @@ namespace SourceGit.Views
private void OnAddOpenAIService(object sender, RoutedEventArgs e)
{
var service = new Models.AIProvider() { Name = "Unnamed Service" };
var service = new AI.Service() { Name = "Unnamed Service" };
ViewModels.Preferences.Instance.OpenAIServices.Add(service);
SelectedOpenAIService = service;