enhance: added cancellation logic to the OpenAI chat response API call.

This commit is contained in:
leo 2024-09-13 14:25:38 +08:00
parent f7f549f86d
commit 886b242b66
No known key found for this signature in database
2 changed files with 22 additions and 11 deletions

View file

@ -78,7 +78,7 @@ namespace SourceGit.Commands
prompt.AppendLine("- Simply describe the MAIN GOAL of the changes.");
prompt.AppendLine("- Output directly the summary in plain text.`");
var rsp = Models.OpenAI.Chat(prompt.ToString(), $"Here is the `git diff` output: {diff}");
var rsp = Models.OpenAI.Chat(prompt.ToString(), $"Here is the `git diff` output: {diff}", _cancelToken);
if (rsp != null && rsp.Choices.Count > 0)
return rsp.Choices[0].Message.Content;
@ -104,7 +104,7 @@ namespace SourceGit.Commands
prompt.AppendLine("- Output directly only one commit message in plain text with the next format: {type}: {commit_message}.");
prompt.AppendLine("- Be as concise as possible, keep the message under 50 characters.");
var rsp = Models.OpenAI.Chat(prompt.ToString(), $"Here are the summaries changes: {summary}");
var rsp = Models.OpenAI.Chat(prompt.ToString(), $"Here are the summaries changes: {summary}", _cancelToken);
if (rsp != null && rsp.Choices.Count > 0)
return rsp.Choices[0].Message.Content;

View file

@ -3,6 +3,7 @@ using System.Collections.Generic;
using System.Net.Http;
using System.Text.Json;
using System.Text.Json.Serialization;
using System.Threading;
namespace SourceGit.Models
{
@ -97,7 +98,7 @@ namespace SourceGit.Models
get => !string.IsNullOrEmpty(Server) && !string.IsNullOrEmpty(ApiKey) && !string.IsNullOrEmpty(Model);
}
public static OpenAIChatResponse Chat(string prompt, string question)
public static OpenAIChatResponse Chat(string prompt, string question, CancellationToken cancellation)
{
var chat = new OpenAIChatRequest() { Model = Model };
chat.AddMessage("system", prompt);
@ -107,17 +108,27 @@ namespace SourceGit.Models
client.DefaultRequestHeaders.Add("Authorization", $"Bearer {ApiKey}");
var req = new StringContent(JsonSerializer.Serialize(chat, JsonCodeGen.Default.OpenAIChatRequest));
var task = client.PostAsync(Server, req);
task.Wait();
try
{
var task = client.PostAsync(Server, req, cancellation);
task.Wait();
var rsp = task.Result;
if (!rsp.IsSuccessStatusCode)
throw new Exception($"AI service returns error code {rsp.StatusCode}");
var rsp = task.Result;
if (!rsp.IsSuccessStatusCode)
throw new Exception($"AI service returns error code {rsp.StatusCode}");
var reader = rsp.Content.ReadAsStringAsync();
reader.Wait();
var reader = rsp.Content.ReadAsStringAsync(cancellation);
reader.Wait();
return JsonSerializer.Deserialize(reader.Result, JsonCodeGen.Default.OpenAIChatResponse);
return JsonSerializer.Deserialize(reader.Result, JsonCodeGen.Default.OpenAIChatResponse);
}
catch
{
if (cancellation.IsCancellationRequested)
return null;
throw;
}
}
}
}