< Summary

Information
Class: OpenAiIntegration.ServiceCollectionExtensions
Assembly: OpenAiIntegration
File(s): /home/runner/work/KicktippAi/KicktippAi/src/OpenAiIntegration/ServiceCollectionExtensions.cs
Line coverage
0%
Covered lines: 0
Uncovered lines: 29
Coverable lines: 29
Total lines: 91
Line coverage: 0%
Branch coverage
0%
Covered branches: 0
Total branches: 10
Branch coverage: 0%
Method coverage

Feature is only available for sponsors

Upgrade to PRO version

Metrics

MethodBranch coverage Crap Score Cyclomatic complexity Line coverage
AddOpenAiPredictor(...)0%4260%
AddOpenAiPredictor(...)0%2040%

File(s)

/home/runner/work/KicktippAi/KicktippAi/src/OpenAiIntegration/ServiceCollectionExtensions.cs

#LineLine coverage
 1using EHonda.KicktippAi.Core;
 2using Microsoft.Extensions.DependencyInjection;
 3using Microsoft.Extensions.DependencyInjection.Extensions;
 4using Microsoft.Extensions.Configuration;
 5using Microsoft.Extensions.FileProviders;
 6using Microsoft.Extensions.Logging;
 7using OpenAI.Chat;
 8
 9namespace OpenAiIntegration;
 10
 11/// <summary>
 12/// Extension methods for configuring OpenAI services in dependency injection
 13/// </summary>
 14public static class ServiceCollectionExtensions
 15{
 16    /// <summary>
 17    /// Adds OpenAI predictor services to the service collection
 18    /// </summary>
 19    /// <param name="services">The service collection</param>
 20    /// <param name="apiKey">The OpenAI API key</param>
 21    /// <param name="model">The OpenAI model to use (defaults to gpt-4o-mini)</param>
 22    /// <returns>The service collection for chaining</returns>
 23    public static IServiceCollection AddOpenAiPredictor(
 24        this IServiceCollection services,
 25        string apiKey,
 26        string model = "gpt-4o-mini")
 27    {
 028        if (string.IsNullOrWhiteSpace(apiKey))
 29        {
 030            throw new ArgumentException("OpenAI API key cannot be null or empty", nameof(apiKey));
 31        }
 32
 33        // Register the ChatClient as a singleton
 034        services.TryAddSingleton<ChatClient>(serviceProvider =>
 035        {
 036            return new ChatClient(model, apiKey);
 037        });
 38
 39        // Register the predictor context
 040        services.TryAddScoped<PredictorContext>(_ => PredictorContext.CreateBasic());
 41
 42        // Register the predictor implementation
 043        services.TryAddScoped<IPredictor<PredictorContext>, OpenAiPredictor>();
 44
 45        // Register the cost calculation service
 046        services.TryAddScoped<ICostCalculationService, CostCalculationService>();
 47
 48        // Register the file provider for prompts
 049        services.TryAddSingleton(PromptsFileProvider.Create());
 50
 51        // Register the instructions template provider
 052        services.TryAddSingleton<IInstructionsTemplateProvider, InstructionsTemplateProvider>();
 53
 054        services.TryAddScoped<IMatchPromptReconstructionService, MatchPromptReconstructionService>();
 55
 56        // Register the token usage tracker as singleton (to accumulate across requests)
 057        services.TryAddSingleton<ITokenUsageTracker>(serviceProvider =>
 058            new TokenUsageTracker(
 059                serviceProvider.GetRequiredService<ILogger<TokenUsageTracker>>(),
 060                serviceProvider.GetRequiredService<ICostCalculationService>()));
 61
 62        // Register the prediction service with model parameter
 063        services.TryAddScoped<IPredictionService>(serviceProvider =>
 064            new PredictionService(
 065                serviceProvider.GetRequiredService<ChatClient>(),
 066                serviceProvider.GetRequiredService<ILogger<PredictionService>>(),
 067                serviceProvider.GetRequiredService<ICostCalculationService>(),
 068                serviceProvider.GetRequiredService<ITokenUsageTracker>(),
 069                serviceProvider.GetRequiredService<IInstructionsTemplateProvider>(),
 070                model));
 71
 072        return services;
 73    }
 74
 75    /// <summary>
 76    /// Adds OpenAI predictor services to the service collection using configuration
 77    /// </summary>
 78    /// <param name="services">The service collection</param>
 79    /// <param name="configuration">The configuration containing OpenAI settings</param>
 80    /// <returns>The service collection for chaining</returns>
 81    public static IServiceCollection AddOpenAiPredictor(
 82        this IServiceCollection services,
 83        IConfiguration configuration)
 84    {
 085        var apiKey = configuration["OPENAI_API_KEY"] ??
 086                    Environment.GetEnvironmentVariable("OPENAI_API_KEY");
 087        var model = configuration["OPENAI_MODEL"] ?? "gpt-4o-mini";
 88
 089        return services.AddOpenAiPredictor(apiKey!, model);
 90    }
 91}