< Summary

Information
Class: OpenAiIntegration.PredictionService.JustificationContextSourceEntry
Assembly: OpenAiIntegration
File(s): /home/runner/work/KicktippAi/KicktippAi/src/OpenAiIntegration/PredictionService.cs
Line coverage
100%
Covered lines: 2
Uncovered lines: 0
Coverable lines: 2
Total lines: 685
Line coverage: 100%
Branch coverage
N/A
Covered branches: 0
Total branches: 0
Branch coverage: N/A
Method coverage

Feature is only available for sponsors

Upgrade to PRO version

Metrics

MethodBranch coverage Crap Score Cyclomatic complexity Line coverage
get_DocumentName()100%11100%
set_DocumentName(...)100%11100%
.ctor()100%11100%
get_Details()100%11100%
set_Details(...)100%11100%

File(s)

/home/runner/work/KicktippAi/KicktippAi/src/OpenAiIntegration/PredictionService.cs

#LineLine coverage
 1using System.Collections.Generic;
 2using System.Diagnostics;
 3using System.Linq;
 4using System.Text.Json;
 5using System.Text.Json.Serialization;
 6using EHonda.KicktippAi.Core;
 7using Microsoft.Extensions.Logging;
 8using OpenAI.Chat;
 9
 10namespace OpenAiIntegration;
 11
 12/// <summary>
 13/// Service for predicting match outcomes using OpenAI models
 14/// </summary>
 15public class PredictionService : IPredictionService
 16{
 17    private readonly ChatClient _chatClient;
 18    private readonly ILogger<PredictionService> _logger;
 19    private readonly ICostCalculationService _costCalculationService;
 20    private readonly ITokenUsageTracker _tokenUsageTracker;
 21    private readonly IInstructionsTemplateProvider _templateProvider;
 22    private readonly string _model;
 23    private readonly string _instructionsTemplate;
 24    private readonly string _instructionsTemplateWithJustification;
 25    private readonly string _bonusInstructionsTemplate;
 26    private readonly string _matchPromptPath;
 27    private readonly string _matchPromptPathWithJustification;
 28    private readonly string _bonusPromptPath;
 29
 30    public PredictionService(
 31        ChatClient chatClient,
 32        ILogger<PredictionService> logger,
 33        ICostCalculationService costCalculationService,
 34        ITokenUsageTracker tokenUsageTracker,
 35        IInstructionsTemplateProvider templateProvider,
 36        string model)
 37    {
 38        _chatClient = chatClient ?? throw new ArgumentNullException(nameof(chatClient));
 39        _logger = logger ?? throw new ArgumentNullException(nameof(logger));
 40        _costCalculationService = costCalculationService ?? throw new ArgumentNullException(nameof(costCalculationServic
 41        _tokenUsageTracker = tokenUsageTracker ?? throw new ArgumentNullException(nameof(tokenUsageTracker));
 42        _templateProvider = templateProvider ?? throw new ArgumentNullException(nameof(templateProvider));
 43        _model = model ?? throw new ArgumentNullException(nameof(model));
 44
 45        var (matchTemplate, matchPath) = _templateProvider.LoadMatchTemplate(_model, includeJustification: false);
 46        var (matchJustificationTemplate, matchJustificationPath) = _templateProvider.LoadMatchTemplate(_model, includeJu
 47        var (bonusTemplate, bonusPath) = _templateProvider.LoadBonusTemplate(_model);
 48
 49        _instructionsTemplate = matchTemplate;
 50        _instructionsTemplateWithJustification = matchJustificationTemplate;
 51        _bonusInstructionsTemplate = bonusTemplate;
 52        _matchPromptPath = matchPath;
 53        _matchPromptPathWithJustification = matchJustificationPath;
 54        _bonusPromptPath = bonusPath;
 55    }
 56
 57    public async Task<Prediction?> PredictMatchAsync(
 58        Match match,
 59        IEnumerable<DocumentContext> contextDocuments,
 60        bool includeJustification = false,
 61        PredictionTelemetryMetadata? telemetryMetadata = null,
 62        CancellationToken cancellationToken = default)
 63    {
 64        _logger.LogInformation("Generating prediction for match: {HomeTeam} vs {AwayTeam} at {StartTime}",
 65            match.HomeTeam, match.AwayTeam, match.StartsAt);
 66
 67        try
 68        {
 69            // Build the instructions by combining template with context
 70            var instructions = BuildInstructions(contextDocuments, includeJustification);
 71
 72            // Create match JSON
 73            var matchJson = PredictionPromptComposer.CreateMatchJson(match);
 74
 75            _logger.LogDebug("Instructions length: {InstructionsLength} characters", instructions.Length);
 76            _logger.LogDebug("Context documents: {ContextCount}", contextDocuments.Count());
 77            _logger.LogDebug("Match JSON: {MatchJson}", matchJson);
 78
 79            // Create messages for the chat completion
 80            var messages = new List<ChatMessage>
 81            {
 82                new SystemChatMessage(instructions),
 83                new UserChatMessage(matchJson)
 84            };
 85
 86            _logger.LogDebug("Calling OpenAI API for prediction");
 87
 88            // Start an OTel activity for Langfuse generation tracking
 89            using var activity = Telemetry.Source.StartActivity("predict-match");
 90
 91            // Call OpenAI with structured output format
 92            var response = await _chatClient.CompleteChatAsync(
 93                messages,
 94                new ChatCompletionOptions
 95                {
 96                    MaxOutputTokenCount = 10_000, // Safeguard against high costs
 97                    ResponseFormat = ChatResponseFormat.CreateJsonSchemaFormat(
 98                        jsonSchemaFormatName: "match_prediction",
 99                        jsonSchema: BinaryData.FromBytes(BuildPredictionJsonSchema(includeJustification)),
 100                        jsonSchemaIsStrict: true)
 101                },
 102                cancellationToken);
 103
 104            // Parse the structured response
 105            var predictionJson = response.Value.Content[0].Text;
 106            _logger.LogDebug("Received prediction JSON: {PredictionJson}", predictionJson);
 107
 108            var prediction = ParsePrediction(predictionJson);
 109
 110            _logger.LogInformation("Prediction generated: {HomeGoals}-{AwayGoals} for {HomeTeam} vs {AwayTeam}",
 111                prediction.HomeGoals, prediction.AwayGoals, match.HomeTeam, match.AwayTeam);
 112
 113            // Log token usage and cost breakdown
 114            var usage = response.Value.Usage;
 115            _logger.LogDebug("Token usage - Input: {InputTokens}, Output: {OutputTokens}, Total: {TotalTokens}",
 116                usage.InputTokenCount, usage.OutputTokenCount, usage.TotalTokenCount);
 117
 118            // Set Langfuse generation attributes on the activity
 119            SetLangfuseGenerationAttributes(activity, messages, predictionJson, usage, telemetryMetadata);
 120
 121            // Add usage to tracker
 122            _tokenUsageTracker.AddUsage(_model, usage);
 123
 124            // Calculate and log costs
 125            _costCalculationService.LogCostBreakdown(_model, usage);
 126
 127            return prediction;
 128        }
 129        catch (Exception ex)
 130        {
 131            _logger.LogError(ex, "Error generating prediction for match: {HomeTeam} vs {AwayTeam}",
 132                match.HomeTeam, match.AwayTeam);
 133            Console.Error.WriteLine($"Prediction error for {match.HomeTeam} vs {match.AwayTeam}: {ex.Message}");
 134
 135            return null;
 136        }
 137    }
 138
 139    public async Task<BonusPrediction?> PredictBonusQuestionAsync(
 140        BonusQuestion bonusQuestion,
 141        IEnumerable<DocumentContext> contextDocuments,
 142        PredictionTelemetryMetadata? telemetryMetadata = null,
 143        CancellationToken cancellationToken = default)
 144    {
 145        _logger.LogInformation("Generating prediction for bonus question: {QuestionText}", bonusQuestion.Text);
 146
 147        try
 148        {
 149            // Build the instructions by combining template with context
 150            var instructions = BuildBonusInstructions(contextDocuments);
 151
 152            // Create bonus question JSON
 153            var questionJson = PredictionPromptComposer.CreateBonusQuestionJson(bonusQuestion);
 154
 155            _logger.LogDebug("Instructions length: {InstructionsLength} characters", instructions.Length);
 156            _logger.LogDebug("Context documents: {ContextCount}", contextDocuments.Count());
 157            _logger.LogDebug("Question JSON: {QuestionJson}", questionJson);
 158
 159            // Create messages for the chat completion
 160            var messages = new List<ChatMessage>
 161            {
 162                new SystemChatMessage(instructions),
 163                new UserChatMessage(questionJson)
 164            };
 165
 166            _logger.LogDebug("Calling OpenAI API for bonus prediction");
 167
 168            // Create JSON schema based on the question
 169            var jsonSchema = CreateSingleBonusPredictionJsonSchema(bonusQuestion);
 170
 171            // Start an OTel activity for Langfuse generation tracking
 172            using var activity = Telemetry.Source.StartActivity("predict-bonus");
 173
 174            // Call OpenAI with structured output format
 175            var response = await _chatClient.CompleteChatAsync(
 176                messages,
 177                new ChatCompletionOptions
 178                {
 179                    MaxOutputTokenCount = 10_000, // Standard limit for single question
 180                    ResponseFormat = ChatResponseFormat.CreateJsonSchemaFormat(
 181                        jsonSchemaFormatName: "bonus_prediction",
 182                        jsonSchema: BinaryData.FromBytes(jsonSchema),
 183                        jsonSchemaIsStrict: true)
 184                },
 185                cancellationToken);
 186
 187            // Parse the structured response
 188            var predictionJson = response.Value.Content[0].Text;
 189            _logger.LogDebug("Received bonus prediction JSON: {PredictionJson}", predictionJson);
 190
 191            var prediction = ParseSingleBonusPrediction(predictionJson, bonusQuestion);
 192
 193            if (prediction != null)
 194            {
 195                _logger.LogInformation("Generated prediction for bonus question: {SelectedOptions}",
 196                    string.Join(", ", prediction.SelectedOptionIds));
 197            }
 198
 199            // Log token usage and cost breakdown
 200            var usage = response.Value.Usage;
 201            _logger.LogDebug("Token usage - Input: {InputTokens}, Output: {OutputTokens}, Total: {TotalTokens}",
 202                usage.InputTokenCount, usage.OutputTokenCount, usage.TotalTokenCount);
 203
 204            // Set Langfuse generation attributes on the activity
 205            SetLangfuseGenerationAttributes(activity, messages, predictionJson, usage, telemetryMetadata);
 206
 207            // Add usage to tracker
 208            _tokenUsageTracker.AddUsage(_model, usage);
 209
 210            // Calculate and log costs
 211            _costCalculationService.LogCostBreakdown(_model, usage);
 212
 213            return prediction;
 214        }
 215        catch (Exception ex)
 216        {
 217            _logger.LogError(ex, "Error generating bonus prediction for question: {QuestionText}", bonusQuestion.Text);
 218            return null;
 219        }
 220    }
 221
 222    private string BuildInstructions(IEnumerable<DocumentContext> contextDocuments, bool includeJustification)
 223    {
 224        var template = includeJustification
 225            ? _instructionsTemplateWithJustification
 226            : _instructionsTemplate;
 227
 228        var contextList = contextDocuments.ToList();
 229        if (contextList.Any())
 230        {
 231            _logger.LogDebug("Added {ContextCount} context documents to instructions", contextList.Count);
 232        }
 233        else
 234        {
 235            _logger.LogDebug("No context documents provided");
 236        }
 237
 238        return PredictionPromptComposer.BuildSystemPrompt(template, contextList);
 239    }
 240
 241    private static byte[] BuildPredictionJsonSchema(bool includeJustification)
 242    {
 243        var properties = new Dictionary<string, object?>
 244        {
 245            ["home"] = new Dictionary<string, object?>
 246            {
 247                ["type"] = "integer",
 248                ["description"] = "Predicted goals for the home team"
 249            },
 250            ["away"] = new Dictionary<string, object?>
 251            {
 252                ["type"] = "integer",
 253                ["description"] = "Predicted goals for the away team"
 254            }
 255        };
 256
 257        var required = new List<string> { "home", "away" };
 258
 259        if (includeJustification)
 260        {
 261            var mostValuableContextSourceItem = new Dictionary<string, object?>
 262            {
 263                ["type"] = "object",
 264                ["properties"] = new Dictionary<string, object?>
 265                {
 266                    ["documentName"] = new Dictionary<string, object?>
 267                    {
 268                        ["type"] = "string",
 269                        ["description"] = "Name of the context document referenced"
 270                    },
 271                    ["details"] = new Dictionary<string, object?>
 272                    {
 273                        ["type"] = "string",
 274                        ["description"] = "Brief summary of why the document or parts of it were useful"
 275                    }
 276                },
 277                ["required"] = new[] { "documentName", "details" },
 278                ["additionalProperties"] = false
 279            };
 280
 281            var leastValuableContextSourceItem = new Dictionary<string, object?>
 282            {
 283                ["type"] = "object",
 284                ["properties"] = new Dictionary<string, object?>
 285                {
 286                    ["documentName"] = new Dictionary<string, object?>
 287                    {
 288                        ["type"] = "string",
 289                        ["description"] = "Name of the context document referenced"
 290                    },
 291                    ["details"] = new Dictionary<string, object?>
 292                    {
 293                        ["type"] = "string",
 294                        ["description"] = "Brief summary explaining why the document or parts of it offered limited insi
 295                    }
 296                },
 297                ["required"] = new[] { "documentName", "details" },
 298                ["additionalProperties"] = false
 299            };
 300
 301            var contextSources = new Dictionary<string, object?>
 302            {
 303                ["type"] = "object",
 304                ["properties"] = new Dictionary<string, object?>
 305                {
 306                    ["mostValuable"] = new Dictionary<string, object?>
 307                    {
 308                        ["type"] = "array",
 309                        ["items"] = mostValuableContextSourceItem,
 310                        ["description"] = "Context documents that most influenced the prediction",
 311                        ["minItems"] = 0
 312                    },
 313                    ["leastValuable"] = new Dictionary<string, object?>
 314                    {
 315                        ["type"] = "array",
 316                        ["items"] = leastValuableContextSourceItem,
 317                        ["description"] = "Context documents that provided limited or no valuable insight",
 318                        ["minItems"] = 0
 319                    }
 320                },
 321                ["required"] = new[] { "leastValuable", "mostValuable" },
 322                ["additionalProperties"] = false
 323            };
 324
 325            properties["justification"] = new Dictionary<string, object?>
 326            {
 327                ["type"] = "object",
 328                ["properties"] = new Dictionary<string, object?>
 329                {
 330                    ["keyReasoning"] = new Dictionary<string, object?>
 331                    {
 332                        ["type"] = "string",
 333                        ["description"] = "Concise analytic summary motivating the predicted scoreline"
 334                    },
 335                    ["contextSources"] = contextSources,
 336                    ["uncertainties"] = new Dictionary<string, object?>
 337                    {
 338                        ["type"] = "array",
 339                        ["items"] = new Dictionary<string, object?>
 340                        {
 341                            ["type"] = "string",
 342                            ["description"] = "Single uncertainty or external factor affecting confidence"
 343                        },
 344                        ["description"] = "Factors that could alter the predicted outcome",
 345                        ["minItems"] = 0
 346                    }
 347                },
 348                ["required"] = new[] { "contextSources", "keyReasoning", "uncertainties" },
 349                ["additionalProperties"] = false
 350            };
 351            required.Add("justification");
 352        }
 353
 354        var schema = new Dictionary<string, object?>
 355        {
 356            ["type"] = "object",
 357            ["properties"] = properties,
 358            ["required"] = required,
 359            ["additionalProperties"] = false
 360        };
 361
 362        return JsonSerializer.SerializeToUtf8Bytes(schema);
 363    }
 364
 365    private Prediction ParsePrediction(string predictionJson)
 366    {
 367        try
 368        {
 369            _logger.LogDebug("Parsing prediction JSON: {PredictionJson}", predictionJson);
 370
 371            var predictionResponse = JsonSerializer.Deserialize<PredictionResponse>(predictionJson);
 372            if (predictionResponse == null)
 373            {
 374                LogRawModelResponse(predictionJson);
 375                throw new InvalidOperationException("Failed to deserialize prediction response");
 376            }
 377
 378            _logger.LogDebug("Parsed prediction response - Home: {Home}, Away: {Away}", predictionResponse.Home, predict
 379
 380            PredictionJustification? justification = null;
 381
 382            if (predictionResponse.Justification != null)
 383            {
 384                var justificationResponse = predictionResponse.Justification;
 385
 386                var mostValuable = justificationResponse.ContextSources?.MostValuable?
 387                    .Where(entry => entry != null)
 388                    .Select(entry => new PredictionJustificationContextSource(
 389                        entry!.DocumentName?.Trim() ?? string.Empty,
 390                        entry.Details?.Trim() ?? string.Empty))
 391                    .ToList() ?? new List<PredictionJustificationContextSource>();
 392
 393                var leastValuable = justificationResponse.ContextSources?.LeastValuable?
 394                    .Where(entry => entry != null)
 395                    .Select(entry => new PredictionJustificationContextSource(
 396                        entry!.DocumentName?.Trim() ?? string.Empty,
 397                        entry.Details?.Trim() ?? string.Empty))
 398                    .ToList() ?? new List<PredictionJustificationContextSource>();
 399
 400                var uncertainties = justificationResponse.Uncertainties?
 401                    .Where(item => !string.IsNullOrWhiteSpace(item))
 402                    .Select(item => item.Trim())
 403                    .ToList() ?? new List<string>();
 404
 405                justification = new PredictionJustification(
 406                    justificationResponse.KeyReasoning?.Trim() ?? string.Empty,
 407                    new PredictionJustificationContextSources(mostValuable, leastValuable),
 408                    uncertainties);
 409
 410                _logger.LogDebug(
 411                    "Parsed justification with key reasoning: {KeyReasoning}; Most valuable sources: {MostValuableCount}
 412                    justification.KeyReasoning,
 413                    justification.ContextSources.MostValuable.Count,
 414                    justification.ContextSources.LeastValuable.Count,
 415                    justification.Uncertainties.Count);
 416            }
 417
 418            return new Prediction(predictionResponse.Home, predictionResponse.Away, justification);
 419        }
 420        catch (JsonException ex)
 421        {
 422            _logger.LogError(ex, "Failed to parse prediction JSON: {PredictionJson}", predictionJson);
 423            LogRawModelResponse(predictionJson);
 424            throw new InvalidOperationException($"Failed to parse prediction response: {ex.Message}", ex);
 425        }
 426    }
 427
 428    private void LogRawModelResponse(string rawResponse)
 429    {
 430        if (string.IsNullOrWhiteSpace(rawResponse))
 431        {
 432            const string message = "Raw model response from OpenAI was empty or whitespace.";
 433            _logger.LogError(message);
 434            Console.Error.WriteLine(message);
 435            return;
 436        }
 437
 438        _logger.LogError("Raw model response from OpenAI: {RawResponse}", rawResponse);
 439        Console.Error.WriteLine("Raw model response from OpenAI:");
 440        Console.Error.WriteLine(rawResponse);
 441    }
 442
 443    private string BuildBonusInstructions(IEnumerable<DocumentContext> contextDocuments)
 444    {
 445        // Use the pre-loaded bonus instructions template
 446        var bonusInstructionsTemplate = _bonusInstructionsTemplate;
 447
 448        var contextList = contextDocuments.ToList();
 449        if (contextList.Any())
 450        {
 451            _logger.LogDebug("Added {ContextCount} context documents to bonus instructions", contextList.Count);
 452        }
 453        else
 454        {
 455            _logger.LogDebug("No context documents provided for bonus predictions");
 456        }
 457
 458        return PredictionPromptComposer.BuildSystemPrompt(bonusInstructionsTemplate, contextList);
 459    }
 460
 461    private static byte[] CreateSingleBonusPredictionJsonSchema(BonusQuestion question)
 462    {
 463        // For multi-selection questions, require exactly MaxSelections answers
 464        // For single-selection questions, require exactly 1 answer
 465        var requiredSelections = question.MaxSelections;
 466
 467        var schema = new
 468        {
 469            type = "object",
 470            properties = new
 471            {
 472                selectedOptionIds = new
 473                {
 474                    type = "array",
 475                    items = new { type = "string", @enum = question.Options.Select(o => o.Id).ToArray() },
 476                    minItems = requiredSelections,
 477                    maxItems = requiredSelections
 478                }
 479            },
 480            required = new[] { "selectedOptionIds" },
 481            additionalProperties = false
 482        };
 483
 484        return JsonSerializer.SerializeToUtf8Bytes(schema);
 485    }
 486
 487    private BonusPrediction? ParseSingleBonusPrediction(string predictionJson, BonusQuestion question)
 488    {
 489        try
 490        {
 491            _logger.LogDebug("Parsing single bonus prediction JSON: {PredictionJson}", predictionJson);
 492
 493            var response = JsonSerializer.Deserialize<SingleBonusPredictionResponse>(predictionJson);
 494            if (response?.SelectedOptionIds?.Any() != true)
 495            {
 496                throw new InvalidOperationException("Failed to deserialize bonus prediction response or no options selec
 497            }
 498
 499            // Validate that all selected options exist for this question
 500            var validOptionIds = question.Options.Select(o => o.Id).ToHashSet();
 501            var invalidOptions = response.SelectedOptionIds.Where(id => !validOptionIds.Contains(id)).ToArray();
 502
 503            if (invalidOptions.Any())
 504            {
 505                _logger.LogWarning("Invalid option IDs for question '{QuestionText}': {InvalidOptions}",
 506                    question.Text, string.Join(", ", invalidOptions));
 507                return null;
 508            }
 509
 510            // Validate no duplicate selections
 511            var duplicateOptions = response.SelectedOptionIds
 512                .GroupBy(id => id)
 513                .Where(g => g.Count() > 1)
 514                .Select(g => g.Key)
 515                .ToArray();
 516
 517            if (duplicateOptions.Any())
 518            {
 519                _logger.LogWarning("Duplicate option IDs for question '{QuestionText}': {DuplicateOptions}",
 520                    question.Text, string.Join(", ", duplicateOptions));
 521                return null;
 522            }
 523
 524            // Validate selection count - must match exactly MaxSelections for full predictions
 525            if (response.SelectedOptionIds.Length != question.MaxSelections)
 526            {
 527                _logger.LogWarning("Invalid selection count for question '{QuestionText}': expected exactly {MaxSelectio
 528                    question.Text, question.MaxSelections, response.SelectedOptionIds.Length);
 529                return null;
 530            }
 531
 532            var prediction = new BonusPrediction(response.SelectedOptionIds.ToList());
 533
 534            _logger.LogDebug("Parsed prediction: {SelectedOptions}",
 535                string.Join(", ", response.SelectedOptionIds));
 536
 537            return prediction;
 538        }
 539        catch (JsonException ex)
 540        {
 541            _logger.LogError(ex, "Failed to parse bonus prediction JSON: {PredictionJson}", predictionJson);
 542            return null;
 543        }
 544    }
 545
 546    /// <summary>
 547    /// Gets the file path of the match prediction prompt being used by this service
 548    /// </summary>
 549    /// <returns>The absolute file path to the match prompt file</returns>
 550    public string GetMatchPromptPath(bool includeJustification = false) => includeJustification ? _matchPromptPathWithJu
 551
 552    /// <summary>
 553    /// Gets the file path of the bonus question prediction prompt being used by this service
 554    /// </summary>
 555    /// <returns>The absolute file path to the bonus prompt file</returns>
 556    public string GetBonusPromptPath() => _bonusPromptPath;
 557
 558    /// <summary>
 559    /// Internal class for deserializing the structured prediction response
 560    /// </summary>
 561    private class PredictionResponse
 562    {
 563        [JsonPropertyName("home")]
 564        public int Home { get; set; }
 565
 566        [JsonPropertyName("away")]
 567        public int Away { get; set; }
 568
 569        [JsonPropertyName("justification")]
 570        public JustificationResponse? Justification { get; set; }
 571    }
 572
 573    private class JustificationResponse
 574    {
 575        [JsonPropertyName("keyReasoning")]
 576        public string KeyReasoning { get; set; } = string.Empty;
 577
 578        [JsonPropertyName("contextSources")]
 579        public JustificationContextSourcesResponse ContextSources { get; set; } = new();
 580
 581        [JsonPropertyName("uncertainties")]
 582        public string[] Uncertainties { get; set; } = Array.Empty<string>();
 583    }
 584
 585    private class JustificationContextSourcesResponse
 586    {
 587        [JsonPropertyName("mostValuable")]
 588        public JustificationContextSourceEntry[] MostValuable { get; set; } = Array.Empty<JustificationContextSourceEntr
 589
 590        [JsonPropertyName("leastValuable")]
 591        public JustificationContextSourceEntry[] LeastValuable { get; set; } = Array.Empty<JustificationContextSourceEnt
 592    }
 593
 594    private class JustificationContextSourceEntry
 595    {
 596        [JsonPropertyName("documentName")]
 1597        public string DocumentName { get; set; } = string.Empty;
 598
 599        [JsonPropertyName("details")]
 1600        public string Details { get; set; } = string.Empty;
 601    }
 602
 603    /// <summary>
 604    /// Internal class for deserializing the bonus predictions response
 605    /// </summary>
 606    private class BonusPredictionsResponse
 607    {
 608        [JsonPropertyName("predictions")]
 609        public BonusPredictionEntry[]? Predictions { get; set; }
 610    }
 611
 612    /// <summary>
 613    /// Internal class for deserializing individual bonus prediction entries
 614    /// </summary>
 615    private class BonusPredictionEntry
 616    {
 617        [JsonPropertyName("questionId")]
 618        public string QuestionId { get; set; } = string.Empty;
 619
 620        [JsonPropertyName("selectedOptionIds")]
 621        public string[] SelectedOptionIds { get; set; } = Array.Empty<string>();
 622    }
 623
 624    /// <summary>
 625    /// Internal class for deserializing single bonus prediction response
 626    /// </summary>
 627    private class SingleBonusPredictionResponse
 628    {
 629        [JsonPropertyName("selectedOptionIds")]
 630        public string[] SelectedOptionIds { get; set; } = Array.Empty<string>();
 631    }
 632
 633    /// <summary>
 634    /// Sets Langfuse-mapped OpenTelemetry attributes on the given activity.
 635    /// If <paramref name="activity"/> is <c>null</c> (no OTel listener registered), this is a no-op.
 636    /// </summary>
 637    private void SetLangfuseGenerationAttributes(
 638        Activity? activity,
 639        List<ChatMessage> messages,
 640        string responseJson,
 641        ChatTokenUsage usage,
 642        PredictionTelemetryMetadata? telemetryMetadata)
 643    {
 644        if (activity is null)
 645            return;
 646
 647        activity.SetTag("langfuse.observation.type", "generation");
 648        activity.SetTag("gen_ai.request.model", _model);
 649
 650        // Serialize messages as input (system prompt + user message)
 651        var inputMessages = messages.Select(m => new
 652        {
 653            role = m switch
 654            {
 655                SystemChatMessage => "system",
 656                UserChatMessage => "user",
 657                _ => "unknown"
 658            },
 659            content = m switch
 660            {
 661                SystemChatMessage s => s.Content[0].Text,
 662                UserChatMessage u => u.Content[0].Text,
 663                _ => string.Empty
 664            }
 665        });
 666        activity.SetTag("langfuse.observation.input", JsonSerializer.Serialize(inputMessages));
 667        activity.SetTag("langfuse.observation.output", responseJson);
 668        telemetryMetadata?.ApplyToObservation(activity);
 669
 670        // Token usage details
 671        var usageDetails = new
 672        {
 673            input = usage.InputTokenCount,
 674            output = usage.OutputTokenCount,
 675            cache_read_input_tokens = usage.InputTokenDetails?.CachedTokenCount ?? 0,
 676            reasoning_tokens = usage.OutputTokenDetails?.ReasoningTokenCount ?? 0
 677        };
 678        activity.SetTag("langfuse.observation.usage_details", JsonSerializer.Serialize(usageDetails));
 679
 680        // Cost details are intentionally omitted here: Langfuse automatically infers costs from the
 681        // model name and usage_details using its maintained pricing tables, which are more up-to-date
 682        // than the prices kept in this repository. Explicitly ingesting cost_details would override
 683        // that inference (ingested values take priority over inferred ones).
 684    }
 685}