enhance: added cancellation logic to the OpenAI chat response API call.

This commit is contained in:
leo 2024-09-13 14:25:38 +08:00
parent f7f549f86d
commit 886b242b66
No known key found for this signature in database
2 changed files with 22 additions and 11 deletions

View file

@ -78,7 +78,7 @@ namespace SourceGit.Commands
prompt.AppendLine("- Simply describe the MAIN GOAL of the changes."); prompt.AppendLine("- Simply describe the MAIN GOAL of the changes.");
prompt.AppendLine("- Output directly the summary in plain text.`"); prompt.AppendLine("- Output directly the summary in plain text.`");
var rsp = Models.OpenAI.Chat(prompt.ToString(), $"Here is the `git diff` output: {diff}"); var rsp = Models.OpenAI.Chat(prompt.ToString(), $"Here is the `git diff` output: {diff}", _cancelToken);
if (rsp != null && rsp.Choices.Count > 0) if (rsp != null && rsp.Choices.Count > 0)
return rsp.Choices[0].Message.Content; return rsp.Choices[0].Message.Content;
@ -104,7 +104,7 @@ namespace SourceGit.Commands
prompt.AppendLine("- Output directly only one commit message in plain text with the next format: {type}: {commit_message}."); prompt.AppendLine("- Output directly only one commit message in plain text with the next format: {type}: {commit_message}.");
prompt.AppendLine("- Be as concise as possible, keep the message under 50 characters."); prompt.AppendLine("- Be as concise as possible, keep the message under 50 characters.");
var rsp = Models.OpenAI.Chat(prompt.ToString(), $"Here are the summaries changes: {summary}"); var rsp = Models.OpenAI.Chat(prompt.ToString(), $"Here are the summaries changes: {summary}", _cancelToken);
if (rsp != null && rsp.Choices.Count > 0) if (rsp != null && rsp.Choices.Count > 0)
return rsp.Choices[0].Message.Content; return rsp.Choices[0].Message.Content;

View file

@ -3,6 +3,7 @@ using System.Collections.Generic;
using System.Net.Http; using System.Net.Http;
using System.Text.Json; using System.Text.Json;
using System.Text.Json.Serialization; using System.Text.Json.Serialization;
using System.Threading;
namespace SourceGit.Models namespace SourceGit.Models
{ {
@ -97,7 +98,7 @@ namespace SourceGit.Models
get => !string.IsNullOrEmpty(Server) && !string.IsNullOrEmpty(ApiKey) && !string.IsNullOrEmpty(Model); get => !string.IsNullOrEmpty(Server) && !string.IsNullOrEmpty(ApiKey) && !string.IsNullOrEmpty(Model);
} }
public static OpenAIChatResponse Chat(string prompt, string question) public static OpenAIChatResponse Chat(string prompt, string question, CancellationToken cancellation)
{ {
var chat = new OpenAIChatRequest() { Model = Model }; var chat = new OpenAIChatRequest() { Model = Model };
chat.AddMessage("system", prompt); chat.AddMessage("system", prompt);
@ -107,17 +108,27 @@ namespace SourceGit.Models
client.DefaultRequestHeaders.Add("Authorization", $"Bearer {ApiKey}"); client.DefaultRequestHeaders.Add("Authorization", $"Bearer {ApiKey}");
var req = new StringContent(JsonSerializer.Serialize(chat, JsonCodeGen.Default.OpenAIChatRequest)); var req = new StringContent(JsonSerializer.Serialize(chat, JsonCodeGen.Default.OpenAIChatRequest));
var task = client.PostAsync(Server, req); try
{
var task = client.PostAsync(Server, req, cancellation);
task.Wait(); task.Wait();
var rsp = task.Result; var rsp = task.Result;
if (!rsp.IsSuccessStatusCode) if (!rsp.IsSuccessStatusCode)
throw new Exception($"AI service returns error code {rsp.StatusCode}"); throw new Exception($"AI service returns error code {rsp.StatusCode}");
var reader = rsp.Content.ReadAsStringAsync(); var reader = rsp.Content.ReadAsStringAsync(cancellation);
reader.Wait(); reader.Wait();
return JsonSerializer.Deserialize(reader.Result, JsonCodeGen.Default.OpenAIChatResponse); return JsonSerializer.Deserialize(reader.Result, JsonCodeGen.Default.OpenAIChatResponse);
} }
catch
{
if (cancellation.IsCancellationRequested)
return null;
throw;
}
}
} }
} }