| | | 1 | | using EHonda.KicktippAi.Core; |
| | | 2 | | using Microsoft.Extensions.DependencyInjection; |
| | | 3 | | using Microsoft.Extensions.DependencyInjection.Extensions; |
| | | 4 | | using Microsoft.Extensions.Configuration; |
| | | 5 | | using Microsoft.Extensions.FileProviders; |
| | | 6 | | using Microsoft.Extensions.Logging; |
| | | 7 | | using OpenAI.Chat; |
| | | 8 | | |
| | | 9 | | namespace OpenAiIntegration; |
| | | 10 | | |
| | | 11 | | /// <summary> |
| | | 12 | | /// Extension methods for configuring OpenAI services in dependency injection |
| | | 13 | | /// </summary> |
| | | 14 | | public static class ServiceCollectionExtensions |
| | | 15 | | { |
| | | 16 | | /// <summary> |
| | | 17 | | /// Adds OpenAI predictor services to the service collection |
| | | 18 | | /// </summary> |
| | | 19 | | /// <param name="services">The service collection</param> |
| | | 20 | | /// <param name="apiKey">The OpenAI API key</param> |
| | | 21 | | /// <param name="model">The OpenAI model to use (defaults to gpt-4o-mini)</param> |
| | | 22 | | /// <returns>The service collection for chaining</returns> |
| | | 23 | | public static IServiceCollection AddOpenAiPredictor( |
| | | 24 | | this IServiceCollection services, |
| | | 25 | | string apiKey, |
| | | 26 | | string model = "gpt-4o-mini") |
| | | 27 | | { |
| | 0 | 28 | | if (string.IsNullOrWhiteSpace(apiKey)) |
| | | 29 | | { |
| | 0 | 30 | | throw new ArgumentException("OpenAI API key cannot be null or empty", nameof(apiKey)); |
| | | 31 | | } |
| | | 32 | | |
| | | 33 | | // Register the ChatClient as a singleton |
| | 0 | 34 | | services.TryAddSingleton<ChatClient>(serviceProvider => |
| | 0 | 35 | | { |
| | 0 | 36 | | return new ChatClient(model, apiKey); |
| | 0 | 37 | | }); |
| | | 38 | | |
| | | 39 | | // Register the predictor context |
| | 0 | 40 | | services.TryAddScoped<PredictorContext>(_ => PredictorContext.CreateBasic()); |
| | | 41 | | |
| | | 42 | | // Register the predictor implementation |
| | 0 | 43 | | services.TryAddScoped<IPredictor<PredictorContext>, OpenAiPredictor>(); |
| | | 44 | | |
| | | 45 | | // Register the cost calculation service |
| | 0 | 46 | | services.TryAddScoped<ICostCalculationService, CostCalculationService>(); |
| | | 47 | | |
| | | 48 | | // Register the file provider for prompts |
| | 0 | 49 | | services.TryAddSingleton(PromptsFileProvider.Create()); |
| | | 50 | | |
| | | 51 | | // Register the instructions template provider |
| | 0 | 52 | | services.TryAddSingleton<IInstructionsTemplateProvider, InstructionsTemplateProvider>(); |
| | | 53 | | |
| | | 54 | | // Register the token usage tracker as singleton (to accumulate across requests) |
| | 0 | 55 | | services.TryAddSingleton<ITokenUsageTracker>(serviceProvider => |
| | 0 | 56 | | new TokenUsageTracker( |
| | 0 | 57 | | serviceProvider.GetRequiredService<ILogger<TokenUsageTracker>>(), |
| | 0 | 58 | | serviceProvider.GetRequiredService<ICostCalculationService>())); |
| | | 59 | | |
| | | 60 | | // Register the prediction service with model parameter |
| | 0 | 61 | | services.TryAddScoped<IPredictionService>(serviceProvider => |
| | 0 | 62 | | new PredictionService( |
| | 0 | 63 | | serviceProvider.GetRequiredService<ChatClient>(), |
| | 0 | 64 | | serviceProvider.GetRequiredService<ILogger<PredictionService>>(), |
| | 0 | 65 | | serviceProvider.GetRequiredService<ICostCalculationService>(), |
| | 0 | 66 | | serviceProvider.GetRequiredService<ITokenUsageTracker>(), |
| | 0 | 67 | | serviceProvider.GetRequiredService<IInstructionsTemplateProvider>(), |
| | 0 | 68 | | model)); |
| | | 69 | | |
| | 0 | 70 | | return services; |
| | | 71 | | } |
| | | 72 | | |
| | | 73 | | /// <summary> |
| | | 74 | | /// Adds OpenAI predictor services to the service collection using configuration |
| | | 75 | | /// </summary> |
| | | 76 | | /// <param name="services">The service collection</param> |
| | | 77 | | /// <param name="configuration">The configuration containing OpenAI settings</param> |
| | | 78 | | /// <returns>The service collection for chaining</returns> |
| | | 79 | | public static IServiceCollection AddOpenAiPredictor( |
| | | 80 | | this IServiceCollection services, |
| | | 81 | | IConfiguration configuration) |
| | | 82 | | { |
| | 0 | 83 | | var apiKey = configuration["OPENAI_API_KEY"] ?? |
| | 0 | 84 | | Environment.GetEnvironmentVariable("OPENAI_API_KEY"); |
| | 0 | 85 | | var model = configuration["OPENAI_MODEL"] ?? "gpt-4o-mini"; |
| | | 86 | | |
| | 0 | 87 | | return services.AddOpenAiPredictor(apiKey!, model); |
| | | 88 | | } |
| | | 89 | | } |