< Summary

Information
Class: OpenAiIntegration.ServiceCollectionExtensions
Assembly: OpenAiIntegration
File(s): /home/runner/work/KicktippAi/KicktippAi/src/OpenAiIntegration/ServiceCollectionExtensions.cs
Line coverage
0%
Covered lines: 0
Uncovered lines: 28
Coverable lines: 28
Total lines: 89
Line coverage: 0%
Branch coverage
0%
Covered branches: 0
Total branches: 10
Branch coverage: 0%
Method coverage

Feature is only available for sponsors

Upgrade to PRO version

Metrics

MethodBranch coverage Crap Score Cyclomatic complexity Line coverage
AddOpenAiPredictor(...)0%4260%
AddOpenAiPredictor(...)0%2040%

File(s)

/home/runner/work/KicktippAi/KicktippAi/src/OpenAiIntegration/ServiceCollectionExtensions.cs

#LineLine coverage
 1using EHonda.KicktippAi.Core;
 2using Microsoft.Extensions.DependencyInjection;
 3using Microsoft.Extensions.DependencyInjection.Extensions;
 4using Microsoft.Extensions.Configuration;
 5using Microsoft.Extensions.FileProviders;
 6using Microsoft.Extensions.Logging;
 7using OpenAI.Chat;
 8
 9namespace OpenAiIntegration;
 10
 11/// <summary>
 12/// Extension methods for configuring OpenAI services in dependency injection
 13/// </summary>
 14public static class ServiceCollectionExtensions
 15{
 16    /// <summary>
 17    /// Adds OpenAI predictor services to the service collection
 18    /// </summary>
 19    /// <param name="services">The service collection</param>
 20    /// <param name="apiKey">The OpenAI API key</param>
 21    /// <param name="model">The OpenAI model to use (defaults to gpt-4o-mini)</param>
 22    /// <returns>The service collection for chaining</returns>
 23    public static IServiceCollection AddOpenAiPredictor(
 24        this IServiceCollection services,
 25        string apiKey,
 26        string model = "gpt-4o-mini")
 27    {
 028        if (string.IsNullOrWhiteSpace(apiKey))
 29        {
 030            throw new ArgumentException("OpenAI API key cannot be null or empty", nameof(apiKey));
 31        }
 32
 33        // Register the ChatClient as a singleton
 034        services.TryAddSingleton<ChatClient>(serviceProvider =>
 035        {
 036            return new ChatClient(model, apiKey);
 037        });
 38
 39        // Register the predictor context
 040        services.TryAddScoped<PredictorContext>(_ => PredictorContext.CreateBasic());
 41
 42        // Register the predictor implementation
 043        services.TryAddScoped<IPredictor<PredictorContext>, OpenAiPredictor>();
 44
 45        // Register the cost calculation service
 046        services.TryAddScoped<ICostCalculationService, CostCalculationService>();
 47
 48        // Register the file provider for prompts
 049        services.TryAddSingleton(PromptsFileProvider.Create());
 50
 51        // Register the instructions template provider
 052        services.TryAddSingleton<IInstructionsTemplateProvider, InstructionsTemplateProvider>();
 53
 54        // Register the token usage tracker as singleton (to accumulate across requests)
 055        services.TryAddSingleton<ITokenUsageTracker>(serviceProvider =>
 056            new TokenUsageTracker(
 057                serviceProvider.GetRequiredService<ILogger<TokenUsageTracker>>(),
 058                serviceProvider.GetRequiredService<ICostCalculationService>()));
 59
 60        // Register the prediction service with model parameter
 061        services.TryAddScoped<IPredictionService>(serviceProvider =>
 062            new PredictionService(
 063                serviceProvider.GetRequiredService<ChatClient>(),
 064                serviceProvider.GetRequiredService<ILogger<PredictionService>>(),
 065                serviceProvider.GetRequiredService<ICostCalculationService>(),
 066                serviceProvider.GetRequiredService<ITokenUsageTracker>(),
 067                serviceProvider.GetRequiredService<IInstructionsTemplateProvider>(),
 068                model));
 69
 070        return services;
 71    }
 72
 73    /// <summary>
 74    /// Adds OpenAI predictor services to the service collection using configuration
 75    /// </summary>
 76    /// <param name="services">The service collection</param>
 77    /// <param name="configuration">The configuration containing OpenAI settings</param>
 78    /// <returns>The service collection for chaining</returns>
 79    public static IServiceCollection AddOpenAiPredictor(
 80        this IServiceCollection services,
 81        IConfiguration configuration)
 82    {
 083        var apiKey = configuration["OPENAI_API_KEY"] ??
 084                    Environment.GetEnvironmentVariable("OPENAI_API_KEY");
 085        var model = configuration["OPENAI_MODEL"] ?? "gpt-4o-mini";
 86
 087        return services.AddOpenAiPredictor(apiKey!, model);
 88    }
 89}