Mango.Nop.Plugins/Nop.Plugin.Misc.AIPlugin/Services/OpenAIApiService.cs

352 lines
15 KiB
C#

using Microsoft.Extensions.Configuration;
using Nop.Plugin.Misc.FruitBankPlugin.Models;
using Nop.Plugin.Misc.FruitBankPlugin.Services;
using Nop.Services.Configuration;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Text;
using System.Text.Json;
namespace Nop.Plugin.Misc.FruitBankPlugin.Services
{
public class OpenAIApiService : IAIAPIService
{
private readonly ISettingService _settingService;
private readonly FruitBankSettings _fruitBankSettings;
private readonly HttpClient _httpClient;
private static Action<string, string>? _callback;
private static Action<string>? _onComplete;
private static Action<string, string>? _onError;
private const string OpenAiEndpoint = "https://api.openai.com/v1/chat/completions";
private const string OpenAiImageEndpoint = "https://api.openai.com/v1/images/generations";
private const string OpenAiFileEndpoint = "https://api.openai.com/v1/files";
public OpenAIApiService(ISettingService settingService, HttpClient httpClient)
{
_settingService = settingService;
_fruitBankSettings = _settingService.LoadSetting<FruitBankSettings>();
_httpClient = httpClient;
_httpClient.DefaultRequestHeaders.Add("Authorization", $"Bearer {GetApiKey()}");
}
public string GetApiKey() => _fruitBankSettings.OpenAIApiKey;
public string GetModelName() => _fruitBankSettings.OpenAIModelName;
public void RegisterCallback(Action<string, string> callback, Action<string> onCompleteCallback, Action<string, string> onErrorCallback)
{
_callback = callback;
_onComplete = onCompleteCallback;
_onError = onErrorCallback;
}
#region === CHAT (TEXT INPUT) ===
public async Task<string> GetSimpleResponseAsync(string systemMessage, string userMessage, string? assistantMessage = null)
{
string modelName = GetModelName();
StringContent requestContent = new("");
if (modelName == "gpt-4.1-mini" || modelName == "gpt-4o-mini" || modelName == "gpt-4.1-nano" || modelName == "gpt-5-nano")
{
var requestBody = new OpenAIGpt4MiniAIChatRequest
{
Model = modelName,
Temperature = 0.2,
Messages = assistantMessage == null || assistantMessage == string.Empty
? new[]
{
new AIChatMessage { Role = "system", Content = systemMessage },
new AIChatMessage { Role = "user", Content = userMessage }
}
: new[]
{
new AIChatMessage { Role = "system", Content = systemMessage },
new AIChatMessage { Role = "assistant", Content = assistantMessage },
new AIChatMessage { Role = "user", Content = userMessage }
},
Stream = false
};
var requestJson = JsonSerializer.Serialize(requestBody, new JsonSerializerOptions
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
});
requestContent = new StringContent(requestJson, Encoding.UTF8, "application/json");
}
else
{
var requestBody = new OpenAIGpt5AIChatRequest
{
Model = modelName,
Temperature = 1,
Messages = assistantMessage == null || assistantMessage == string.Empty
? new[]
{
new AIChatMessage { Role = "system", Content = systemMessage },
new AIChatMessage { Role = "user", Content = userMessage }
}
: new[]
{
new AIChatMessage { Role = "system", Content = systemMessage },
new AIChatMessage { Role = "assistant", Content = assistantMessage },
new AIChatMessage { Role = "user", Content = userMessage }
},
ReasoningEffort = "minimal",
Verbosity = "high",
Stream = false
};
var requestJson = JsonSerializer.Serialize(requestBody, new JsonSerializerOptions
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
});
requestContent = new StringContent(requestJson, Encoding.UTF8, "application/json");
}
using var response = await _httpClient.PostAsync(OpenAiEndpoint, requestContent);
response.EnsureSuccessStatusCode();
using var responseStream = await response.Content.ReadAsStreamAsync();
using var document = await JsonDocument.ParseAsync(responseStream);
var inputTokens = document.RootElement.GetProperty("usage").GetProperty("prompt_tokens").GetInt32();
var outputTokens = document.RootElement.GetProperty("usage").GetProperty("completion_tokens").GetInt32();
Console.WriteLine($"USAGE STATS - Tokens: {inputTokens} + {outputTokens} = {inputTokens + outputTokens}");
return document.RootElement
.GetProperty("choices")[0]
.GetProperty("message")
.GetProperty("content")
.GetString() ?? "No response";
}
#endregion
#region === CHAT (STREAMING) ===
public async Task<string> GetStreamedResponseAsync(string sessionId, string systemMessage, string userMessage, string? assistantMessage = null)
{
string modelName = GetModelName();
StringContent requestContent = new("");
if (modelName == "gpt-4.1-mini" || modelName == "gpt-4o-mini" || modelName == "gpt-4.1-nano" || modelName == "gpt-5-nano")
{
var requestBody = new OpenAIGpt4MiniAIChatRequest
{
Model = modelName,
Temperature = 0.2,
Messages = assistantMessage == null || assistantMessage == string.Empty
? new[]
{
new AIChatMessage { Role = "system", Content = systemMessage },
new AIChatMessage { Role = "user", Content = userMessage }
}
: new[]
{
new AIChatMessage { Role = "system", Content = systemMessage },
new AIChatMessage { Role = "assistant", Content = assistantMessage },
new AIChatMessage { Role = "user", Content = userMessage }
},
Stream = true
};
var requestJson = JsonSerializer.Serialize(requestBody, new JsonSerializerOptions
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
});
requestContent = new StringContent(requestJson, Encoding.UTF8, "application/json");
}
else
{
var requestBody = new OpenAIGpt5AIChatRequest
{
Model = modelName,
Temperature = 1,
Messages = assistantMessage == null || assistantMessage == string.Empty
? new[]
{
new AIChatMessage { Role = "system", Content = systemMessage },
new AIChatMessage { Role = "user", Content = userMessage }
}
: new[]
{
new AIChatMessage { Role = "system", Content = systemMessage },
new AIChatMessage { Role = "assistant", Content = assistantMessage },
new AIChatMessage { Role = "user", Content = userMessage }
},
Stream = true
};
var requestJson = JsonSerializer.Serialize(requestBody, new JsonSerializerOptions
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
});
requestContent = new StringContent(requestJson, Encoding.UTF8, "application/json");
}
using var httpRequest = new HttpRequestMessage(HttpMethod.Post, OpenAiEndpoint)
{
Content = requestContent
};
using var response = await _httpClient.SendAsync(httpRequest, HttpCompletionOption.ResponseHeadersRead);
response.EnsureSuccessStatusCode();
var stringBuilder = new StringBuilder();
using var responseStream = await response.Content.ReadAsStreamAsync();
using var reader = new StreamReader(responseStream);
try
{
while (!reader.EndOfStream)
{
var line = await reader.ReadLineAsync();
if (string.IsNullOrWhiteSpace(line) || !line.StartsWith("data: ")) continue;
var jsonResponse = line.Substring(6);
if (jsonResponse == "[DONE]")
{
_onComplete?.Invoke(sessionId);
break;
}
try
{
using var jsonDoc = JsonDocument.Parse(jsonResponse);
if (jsonDoc.RootElement.TryGetProperty("choices", out var choices) &&
choices[0].TryGetProperty("delta", out var delta) &&
delta.TryGetProperty("content", out var contentElement))
{
var content = contentElement.GetString();
if (!string.IsNullOrEmpty(content))
{
stringBuilder.Append(content);
_callback?.Invoke(sessionId, stringBuilder.ToString());
}
}
}
catch (JsonException ex)
{
_onError?.Invoke(sessionId, $"Malformed JSON: {ex.Message}");
break;
}
}
if (reader.EndOfStream && !stringBuilder.ToString().EndsWith("[DONE]"))
{
_onError?.Invoke(sessionId, "Unexpected end of stream");
}
}
catch (Exception ex)
{
_onError?.Invoke(sessionId, $"Exception: {ex.Message}");
}
return stringBuilder.ToString();
}
#endregion
#region === IMAGE GENERATION ===
public async Task<string?> GenerateImageAsync(string prompt)
{
var request = new HttpRequestMessage(HttpMethod.Post, OpenAiImageEndpoint);
var requestBody = new
{
model = "gpt-image-1",
prompt = prompt,
n = 1,
size = "1024x1024"
};
request.Content = new StringContent(JsonSerializer.Serialize(requestBody), Encoding.UTF8, "application/json");
var response = await _httpClient.SendAsync(request);
if (!response.IsSuccessStatusCode)
{
var error = await response.Content.ReadAsStringAsync();
Console.WriteLine($"Image generation failed: {error}");
return null;
}
using var content = await response.Content.ReadAsStreamAsync();
var json = await JsonDocument.ParseAsync(content);
var base64Image = json.RootElement
.GetProperty("data")[0]
.GetProperty("b64_json")
.GetString();
return $"data:image/png;base64,{base64Image}";
}
#endregion
#region === PDF ANALYSIS (NEW) ===
public async Task<string?> AnalyzePdfAsync(string filePath, string userPrompt)
{
// Step 1: Upload PDF
using var form = new MultipartFormDataContent();
using var fileStream = File.OpenRead(filePath);
var fileContent = new StreamContent(fileStream);
fileContent.Headers.ContentType = new MediaTypeHeaderValue("application/pdf");
form.Add(fileContent, "file", Path.GetFileName(filePath));
form.Add(new StringContent("assistants"), "purpose");
var uploadResponse = await _httpClient.PostAsync(OpenAiFileEndpoint, form);
if (!uploadResponse.IsSuccessStatusCode)
{
var error = await uploadResponse.Content.ReadAsStringAsync();
throw new Exception($"File upload failed: {error}");
}
using var uploadJson = await JsonDocument.ParseAsync(await uploadResponse.Content.ReadAsStreamAsync());
var fileId = uploadJson.RootElement.GetProperty("id").GetString();
// Step 2: Ask model with file reference
var requestBody = new
{
model = "gpt-4.1", // must support file_search
messages = new[]
{
new { role = "system", content = "You are an assistant that analyzes uploaded PDF files." },
new { role = "user", content = userPrompt }
},
tools = new[]
{
new { type = "file_search" }
},
tool_resources = new
{
file_search = new
{
vector_store_ids = new string[] { fileId! }
}
}
};
var requestJson = JsonSerializer.Serialize(requestBody, new JsonSerializerOptions
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
});
var requestContent = new StringContent(requestJson, Encoding.UTF8, "application/json");
var chatResponse = await _httpClient.PostAsync(OpenAiEndpoint, requestContent);
chatResponse.EnsureSuccessStatusCode();
using var responseJson = await JsonDocument.ParseAsync(await chatResponse.Content.ReadAsStreamAsync());
var result = responseJson.RootElement
.GetProperty("choices")[0]
.GetProperty("message")
.GetProperty("content")
.GetString();
return result ?? "No response from model";
}
#endregion
}
}