< Summary

Information
Class: OpenAiIntegration.PredictionService
Assembly: OpenAiIntegration
File(s): /home/runner/work/KicktippAi/KicktippAi/src/OpenAiIntegration/PredictionService.cs
Line coverage
89%
Covered lines: 337
Uncovered lines: 41
Coverable lines: 378
Total lines: 685
Line coverage: 89.1%
Branch coverage
68%
Covered branches: 75
Total branches: 110
Branch coverage: 68.1%
Method coverage

Feature is only available for sponsors

Upgrade to PRO version

Metrics

File(s)

/home/runner/work/KicktippAi/KicktippAi/src/OpenAiIntegration/PredictionService.cs

#LineLine coverage
 1using System.Collections.Generic;
 2using System.Diagnostics;
 3using System.Linq;
 4using System.Text.Json;
 5using System.Text.Json.Serialization;
 6using EHonda.KicktippAi.Core;
 7using Microsoft.Extensions.Logging;
 8using OpenAI.Chat;
 9
 10namespace OpenAiIntegration;
 11
 12/// <summary>
 13/// Service for predicting match outcomes using OpenAI models
 14/// </summary>
 15public class PredictionService : IPredictionService
 16{
 17    private readonly ChatClient _chatClient;
 18    private readonly ILogger<PredictionService> _logger;
 19    private readonly ICostCalculationService _costCalculationService;
 20    private readonly ITokenUsageTracker _tokenUsageTracker;
 21    private readonly IInstructionsTemplateProvider _templateProvider;
 22    private readonly string _model;
 23    private readonly string _instructionsTemplate;
 24    private readonly string _instructionsTemplateWithJustification;
 25    private readonly string _bonusInstructionsTemplate;
 26    private readonly string _matchPromptPath;
 27    private readonly string _matchPromptPathWithJustification;
 28    private readonly string _bonusPromptPath;
 29
 130    public PredictionService(
 131        ChatClient chatClient,
 132        ILogger<PredictionService> logger,
 133        ICostCalculationService costCalculationService,
 134        ITokenUsageTracker tokenUsageTracker,
 135        IInstructionsTemplateProvider templateProvider,
 136        string model)
 37    {
 138        _chatClient = chatClient ?? throw new ArgumentNullException(nameof(chatClient));
 139        _logger = logger ?? throw new ArgumentNullException(nameof(logger));
 140        _costCalculationService = costCalculationService ?? throw new ArgumentNullException(nameof(costCalculationServic
 141        _tokenUsageTracker = tokenUsageTracker ?? throw new ArgumentNullException(nameof(tokenUsageTracker));
 142        _templateProvider = templateProvider ?? throw new ArgumentNullException(nameof(templateProvider));
 143        _model = model ?? throw new ArgumentNullException(nameof(model));
 44
 145        var (matchTemplate, matchPath) = _templateProvider.LoadMatchTemplate(_model, includeJustification: false);
 146        var (matchJustificationTemplate, matchJustificationPath) = _templateProvider.LoadMatchTemplate(_model, includeJu
 147        var (bonusTemplate, bonusPath) = _templateProvider.LoadBonusTemplate(_model);
 48
 149        _instructionsTemplate = matchTemplate;
 150        _instructionsTemplateWithJustification = matchJustificationTemplate;
 151        _bonusInstructionsTemplate = bonusTemplate;
 152        _matchPromptPath = matchPath;
 153        _matchPromptPathWithJustification = matchJustificationPath;
 154        _bonusPromptPath = bonusPath;
 155    }
 56
 57    public async Task<Prediction?> PredictMatchAsync(
 58        Match match,
 59        IEnumerable<DocumentContext> contextDocuments,
 60        bool includeJustification = false,
 61        PredictionTelemetryMetadata? telemetryMetadata = null,
 62        CancellationToken cancellationToken = default)
 63    {
 164        _logger.LogInformation("Generating prediction for match: {HomeTeam} vs {AwayTeam} at {StartTime}",
 165            match.HomeTeam, match.AwayTeam, match.StartsAt);
 66
 67        try
 68        {
 69            // Build the instructions by combining template with context
 170            var instructions = BuildInstructions(contextDocuments, includeJustification);
 71
 72            // Create match JSON
 173            var matchJson = PredictionPromptComposer.CreateMatchJson(match);
 74
 175            _logger.LogDebug("Instructions length: {InstructionsLength} characters", instructions.Length);
 176            _logger.LogDebug("Context documents: {ContextCount}", contextDocuments.Count());
 177            _logger.LogDebug("Match JSON: {MatchJson}", matchJson);
 78
 79            // Create messages for the chat completion
 180            var messages = new List<ChatMessage>
 181            {
 182                new SystemChatMessage(instructions),
 183                new UserChatMessage(matchJson)
 184            };
 85
 186            _logger.LogDebug("Calling OpenAI API for prediction");
 87
 88            // Start an OTel activity for Langfuse generation tracking
 189            using var activity = Telemetry.Source.StartActivity("predict-match");
 90
 91            // Call OpenAI with structured output format
 192            var response = await _chatClient.CompleteChatAsync(
 193                messages,
 194                new ChatCompletionOptions
 195                {
 196                    MaxOutputTokenCount = 10_000, // Safeguard against high costs
 197                    ResponseFormat = ChatResponseFormat.CreateJsonSchemaFormat(
 198                        jsonSchemaFormatName: "match_prediction",
 199                        jsonSchema: BinaryData.FromBytes(BuildPredictionJsonSchema(includeJustification)),
 1100                        jsonSchemaIsStrict: true)
 1101                },
 1102                cancellationToken);
 103
 104            // Parse the structured response
 1105            var predictionJson = response.Value.Content[0].Text;
 1106            _logger.LogDebug("Received prediction JSON: {PredictionJson}", predictionJson);
 107
 1108            var prediction = ParsePrediction(predictionJson);
 109
 1110            _logger.LogInformation("Prediction generated: {HomeGoals}-{AwayGoals} for {HomeTeam} vs {AwayTeam}",
 1111                prediction.HomeGoals, prediction.AwayGoals, match.HomeTeam, match.AwayTeam);
 112
 113            // Log token usage and cost breakdown
 1114            var usage = response.Value.Usage;
 1115            _logger.LogDebug("Token usage - Input: {InputTokens}, Output: {OutputTokens}, Total: {TotalTokens}",
 1116                usage.InputTokenCount, usage.OutputTokenCount, usage.TotalTokenCount);
 117
 118            // Set Langfuse generation attributes on the activity
 1119            SetLangfuseGenerationAttributes(activity, messages, predictionJson, usage, telemetryMetadata);
 120
 121            // Add usage to tracker
 1122            _tokenUsageTracker.AddUsage(_model, usage);
 123
 124            // Calculate and log costs
 1125            _costCalculationService.LogCostBreakdown(_model, usage);
 126
 1127            return prediction;
 128        }
 1129        catch (Exception ex)
 130        {
 1131            _logger.LogError(ex, "Error generating prediction for match: {HomeTeam} vs {AwayTeam}",
 1132                match.HomeTeam, match.AwayTeam);
 1133            Console.Error.WriteLine($"Prediction error for {match.HomeTeam} vs {match.AwayTeam}: {ex.Message}");
 134
 1135            return null;
 136        }
 1137    }
 138
 139    public async Task<BonusPrediction?> PredictBonusQuestionAsync(
 140        BonusQuestion bonusQuestion,
 141        IEnumerable<DocumentContext> contextDocuments,
 142        PredictionTelemetryMetadata? telemetryMetadata = null,
 143        CancellationToken cancellationToken = default)
 144    {
 1145        _logger.LogInformation("Generating prediction for bonus question: {QuestionText}", bonusQuestion.Text);
 146
 147        try
 148        {
 149            // Build the instructions by combining template with context
 1150            var instructions = BuildBonusInstructions(contextDocuments);
 151
 152            // Create bonus question JSON
 1153            var questionJson = PredictionPromptComposer.CreateBonusQuestionJson(bonusQuestion);
 154
 1155            _logger.LogDebug("Instructions length: {InstructionsLength} characters", instructions.Length);
 1156            _logger.LogDebug("Context documents: {ContextCount}", contextDocuments.Count());
 1157            _logger.LogDebug("Question JSON: {QuestionJson}", questionJson);
 158
 159            // Create messages for the chat completion
 1160            var messages = new List<ChatMessage>
 1161            {
 1162                new SystemChatMessage(instructions),
 1163                new UserChatMessage(questionJson)
 1164            };
 165
 1166            _logger.LogDebug("Calling OpenAI API for bonus prediction");
 167
 168            // Create JSON schema based on the question
 1169            var jsonSchema = CreateSingleBonusPredictionJsonSchema(bonusQuestion);
 170
 171            // Start an OTel activity for Langfuse generation tracking
 1172            using var activity = Telemetry.Source.StartActivity("predict-bonus");
 173
 174            // Call OpenAI with structured output format
 1175            var response = await _chatClient.CompleteChatAsync(
 1176                messages,
 1177                new ChatCompletionOptions
 1178                {
 1179                    MaxOutputTokenCount = 10_000, // Standard limit for single question
 1180                    ResponseFormat = ChatResponseFormat.CreateJsonSchemaFormat(
 1181                        jsonSchemaFormatName: "bonus_prediction",
 1182                        jsonSchema: BinaryData.FromBytes(jsonSchema),
 1183                        jsonSchemaIsStrict: true)
 1184                },
 1185                cancellationToken);
 186
 187            // Parse the structured response
 1188            var predictionJson = response.Value.Content[0].Text;
 1189            _logger.LogDebug("Received bonus prediction JSON: {PredictionJson}", predictionJson);
 190
 1191            var prediction = ParseSingleBonusPrediction(predictionJson, bonusQuestion);
 192
 1193            if (prediction != null)
 194            {
 1195                _logger.LogInformation("Generated prediction for bonus question: {SelectedOptions}",
 1196                    string.Join(", ", prediction.SelectedOptionIds));
 197            }
 198
 199            // Log token usage and cost breakdown
 1200            var usage = response.Value.Usage;
 1201            _logger.LogDebug("Token usage - Input: {InputTokens}, Output: {OutputTokens}, Total: {TotalTokens}",
 1202                usage.InputTokenCount, usage.OutputTokenCount, usage.TotalTokenCount);
 203
 204            // Set Langfuse generation attributes on the activity
 1205            SetLangfuseGenerationAttributes(activity, messages, predictionJson, usage, telemetryMetadata);
 206
 207            // Add usage to tracker
 1208            _tokenUsageTracker.AddUsage(_model, usage);
 209
 210            // Calculate and log costs
 1211            _costCalculationService.LogCostBreakdown(_model, usage);
 212
 1213            return prediction;
 214        }
 1215        catch (Exception ex)
 216        {
 1217            _logger.LogError(ex, "Error generating bonus prediction for question: {QuestionText}", bonusQuestion.Text);
 1218            return null;
 219        }
 1220    }
 221
 222    private string BuildInstructions(IEnumerable<DocumentContext> contextDocuments, bool includeJustification)
 223    {
 1224        var template = includeJustification
 1225            ? _instructionsTemplateWithJustification
 1226            : _instructionsTemplate;
 227
 1228        var contextList = contextDocuments.ToList();
 1229        if (contextList.Any())
 230        {
 1231            _logger.LogDebug("Added {ContextCount} context documents to instructions", contextList.Count);
 232        }
 233        else
 234        {
 1235            _logger.LogDebug("No context documents provided");
 236        }
 237
 1238        return PredictionPromptComposer.BuildSystemPrompt(template, contextList);
 239    }
 240
 241    private static byte[] BuildPredictionJsonSchema(bool includeJustification)
 242    {
 1243        var properties = new Dictionary<string, object?>
 1244        {
 1245            ["home"] = new Dictionary<string, object?>
 1246            {
 1247                ["type"] = "integer",
 1248                ["description"] = "Predicted goals for the home team"
 1249            },
 1250            ["away"] = new Dictionary<string, object?>
 1251            {
 1252                ["type"] = "integer",
 1253                ["description"] = "Predicted goals for the away team"
 1254            }
 1255        };
 256
 1257        var required = new List<string> { "home", "away" };
 258
 1259        if (includeJustification)
 260        {
 1261            var mostValuableContextSourceItem = new Dictionary<string, object?>
 1262            {
 1263                ["type"] = "object",
 1264                ["properties"] = new Dictionary<string, object?>
 1265                {
 1266                    ["documentName"] = new Dictionary<string, object?>
 1267                    {
 1268                        ["type"] = "string",
 1269                        ["description"] = "Name of the context document referenced"
 1270                    },
 1271                    ["details"] = new Dictionary<string, object?>
 1272                    {
 1273                        ["type"] = "string",
 1274                        ["description"] = "Brief summary of why the document or parts of it were useful"
 1275                    }
 1276                },
 1277                ["required"] = new[] { "documentName", "details" },
 1278                ["additionalProperties"] = false
 1279            };
 280
 1281            var leastValuableContextSourceItem = new Dictionary<string, object?>
 1282            {
 1283                ["type"] = "object",
 1284                ["properties"] = new Dictionary<string, object?>
 1285                {
 1286                    ["documentName"] = new Dictionary<string, object?>
 1287                    {
 1288                        ["type"] = "string",
 1289                        ["description"] = "Name of the context document referenced"
 1290                    },
 1291                    ["details"] = new Dictionary<string, object?>
 1292                    {
 1293                        ["type"] = "string",
 1294                        ["description"] = "Brief summary explaining why the document or parts of it offered limited insi
 1295                    }
 1296                },
 1297                ["required"] = new[] { "documentName", "details" },
 1298                ["additionalProperties"] = false
 1299            };
 300
 1301            var contextSources = new Dictionary<string, object?>
 1302            {
 1303                ["type"] = "object",
 1304                ["properties"] = new Dictionary<string, object?>
 1305                {
 1306                    ["mostValuable"] = new Dictionary<string, object?>
 1307                    {
 1308                        ["type"] = "array",
 1309                        ["items"] = mostValuableContextSourceItem,
 1310                        ["description"] = "Context documents that most influenced the prediction",
 1311                        ["minItems"] = 0
 1312                    },
 1313                    ["leastValuable"] = new Dictionary<string, object?>
 1314                    {
 1315                        ["type"] = "array",
 1316                        ["items"] = leastValuableContextSourceItem,
 1317                        ["description"] = "Context documents that provided limited or no valuable insight",
 1318                        ["minItems"] = 0
 1319                    }
 1320                },
 1321                ["required"] = new[] { "leastValuable", "mostValuable" },
 1322                ["additionalProperties"] = false
 1323            };
 324
 1325            properties["justification"] = new Dictionary<string, object?>
 1326            {
 1327                ["type"] = "object",
 1328                ["properties"] = new Dictionary<string, object?>
 1329                {
 1330                    ["keyReasoning"] = new Dictionary<string, object?>
 1331                    {
 1332                        ["type"] = "string",
 1333                        ["description"] = "Concise analytic summary motivating the predicted scoreline"
 1334                    },
 1335                    ["contextSources"] = contextSources,
 1336                    ["uncertainties"] = new Dictionary<string, object?>
 1337                    {
 1338                        ["type"] = "array",
 1339                        ["items"] = new Dictionary<string, object?>
 1340                        {
 1341                            ["type"] = "string",
 1342                            ["description"] = "Single uncertainty or external factor affecting confidence"
 1343                        },
 1344                        ["description"] = "Factors that could alter the predicted outcome",
 1345                        ["minItems"] = 0
 1346                    }
 1347                },
 1348                ["required"] = new[] { "contextSources", "keyReasoning", "uncertainties" },
 1349                ["additionalProperties"] = false
 1350            };
 1351            required.Add("justification");
 352        }
 353
 1354        var schema = new Dictionary<string, object?>
 1355        {
 1356            ["type"] = "object",
 1357            ["properties"] = properties,
 1358            ["required"] = required,
 1359            ["additionalProperties"] = false
 1360        };
 361
 1362        return JsonSerializer.SerializeToUtf8Bytes(schema);
 363    }
 364
 365    private Prediction ParsePrediction(string predictionJson)
 366    {
 367        try
 368        {
 1369            _logger.LogDebug("Parsing prediction JSON: {PredictionJson}", predictionJson);
 370
 1371            var predictionResponse = JsonSerializer.Deserialize<PredictionResponse>(predictionJson);
 1372            if (predictionResponse == null)
 373            {
 0374                LogRawModelResponse(predictionJson);
 0375                throw new InvalidOperationException("Failed to deserialize prediction response");
 376            }
 377
 1378            _logger.LogDebug("Parsed prediction response - Home: {Home}, Away: {Away}", predictionResponse.Home, predict
 379
 1380            PredictionJustification? justification = null;
 381
 1382            if (predictionResponse.Justification != null)
 383            {
 1384                var justificationResponse = predictionResponse.Justification;
 385
 1386                var mostValuable = justificationResponse.ContextSources?.MostValuable?
 1387                    .Where(entry => entry != null)
 1388                    .Select(entry => new PredictionJustificationContextSource(
 1389                        entry!.DocumentName?.Trim() ?? string.Empty,
 1390                        entry.Details?.Trim() ?? string.Empty))
 1391                    .ToList() ?? new List<PredictionJustificationContextSource>();
 392
 1393                var leastValuable = justificationResponse.ContextSources?.LeastValuable?
 0394                    .Where(entry => entry != null)
 0395                    .Select(entry => new PredictionJustificationContextSource(
 0396                        entry!.DocumentName?.Trim() ?? string.Empty,
 0397                        entry.Details?.Trim() ?? string.Empty))
 1398                    .ToList() ?? new List<PredictionJustificationContextSource>();
 399
 1400                var uncertainties = justificationResponse.Uncertainties?
 1401                    .Where(item => !string.IsNullOrWhiteSpace(item))
 1402                    .Select(item => item.Trim())
 1403                    .ToList() ?? new List<string>();
 404
 1405                justification = new PredictionJustification(
 1406                    justificationResponse.KeyReasoning?.Trim() ?? string.Empty,
 1407                    new PredictionJustificationContextSources(mostValuable, leastValuable),
 1408                    uncertainties);
 409
 1410                _logger.LogDebug(
 1411                    "Parsed justification with key reasoning: {KeyReasoning}; Most valuable sources: {MostValuableCount}
 1412                    justification.KeyReasoning,
 1413                    justification.ContextSources.MostValuable.Count,
 1414                    justification.ContextSources.LeastValuable.Count,
 1415                    justification.Uncertainties.Count);
 416            }
 417
 1418            return new Prediction(predictionResponse.Home, predictionResponse.Away, justification);
 419        }
 1420        catch (JsonException ex)
 421        {
 1422            _logger.LogError(ex, "Failed to parse prediction JSON: {PredictionJson}", predictionJson);
 1423            LogRawModelResponse(predictionJson);
 1424            throw new InvalidOperationException($"Failed to parse prediction response: {ex.Message}", ex);
 425        }
 1426    }
 427
 428    private void LogRawModelResponse(string rawResponse)
 429    {
 1430        if (string.IsNullOrWhiteSpace(rawResponse))
 431        {
 432            const string message = "Raw model response from OpenAI was empty or whitespace.";
 0433            _logger.LogError(message);
 0434            Console.Error.WriteLine(message);
 0435            return;
 436        }
 437
 1438        _logger.LogError("Raw model response from OpenAI: {RawResponse}", rawResponse);
 1439        Console.Error.WriteLine("Raw model response from OpenAI:");
 1440        Console.Error.WriteLine(rawResponse);
 1441    }
 442
 443    private string BuildBonusInstructions(IEnumerable<DocumentContext> contextDocuments)
 444    {
 445        // Use the pre-loaded bonus instructions template
 1446        var bonusInstructionsTemplate = _bonusInstructionsTemplate;
 447
 1448        var contextList = contextDocuments.ToList();
 1449        if (contextList.Any())
 450        {
 1451            _logger.LogDebug("Added {ContextCount} context documents to bonus instructions", contextList.Count);
 452        }
 453        else
 454        {
 1455            _logger.LogDebug("No context documents provided for bonus predictions");
 456        }
 457
 1458        return PredictionPromptComposer.BuildSystemPrompt(bonusInstructionsTemplate, contextList);
 459    }
 460
 461    private static byte[] CreateSingleBonusPredictionJsonSchema(BonusQuestion question)
 462    {
 463        // For multi-selection questions, require exactly MaxSelections answers
 464        // For single-selection questions, require exactly 1 answer
 1465        var requiredSelections = question.MaxSelections;
 466
 1467        var schema = new
 1468        {
 1469            type = "object",
 1470            properties = new
 1471            {
 1472                selectedOptionIds = new
 1473                {
 1474                    type = "array",
 1475                    items = new { type = "string", @enum = question.Options.Select(o => o.Id).ToArray() },
 1476                    minItems = requiredSelections,
 1477                    maxItems = requiredSelections
 1478                }
 1479            },
 1480            required = new[] { "selectedOptionIds" },
 1481            additionalProperties = false
 1482        };
 483
 1484        return JsonSerializer.SerializeToUtf8Bytes(schema);
 485    }
 486
 487    private BonusPrediction? ParseSingleBonusPrediction(string predictionJson, BonusQuestion question)
 488    {
 489        try
 490        {
 1491            _logger.LogDebug("Parsing single bonus prediction JSON: {PredictionJson}", predictionJson);
 492
 1493            var response = JsonSerializer.Deserialize<SingleBonusPredictionResponse>(predictionJson);
 1494            if (response?.SelectedOptionIds?.Any() != true)
 495            {
 1496                throw new InvalidOperationException("Failed to deserialize bonus prediction response or no options selec
 497            }
 498
 499            // Validate that all selected options exist for this question
 1500            var validOptionIds = question.Options.Select(o => o.Id).ToHashSet();
 1501            var invalidOptions = response.SelectedOptionIds.Where(id => !validOptionIds.Contains(id)).ToArray();
 502
 1503            if (invalidOptions.Any())
 504            {
 1505                _logger.LogWarning("Invalid option IDs for question '{QuestionText}': {InvalidOptions}",
 1506                    question.Text, string.Join(", ", invalidOptions));
 1507                return null;
 508            }
 509
 510            // Validate no duplicate selections
 1511            var duplicateOptions = response.SelectedOptionIds
 1512                .GroupBy(id => id)
 1513                .Where(g => g.Count() > 1)
 1514                .Select(g => g.Key)
 1515                .ToArray();
 516
 1517            if (duplicateOptions.Any())
 518            {
 1519                _logger.LogWarning("Duplicate option IDs for question '{QuestionText}': {DuplicateOptions}",
 1520                    question.Text, string.Join(", ", duplicateOptions));
 1521                return null;
 522            }
 523
 524            // Validate selection count - must match exactly MaxSelections for full predictions
 1525            if (response.SelectedOptionIds.Length != question.MaxSelections)
 526            {
 1527                _logger.LogWarning("Invalid selection count for question '{QuestionText}': expected exactly {MaxSelectio
 1528                    question.Text, question.MaxSelections, response.SelectedOptionIds.Length);
 1529                return null;
 530            }
 531
 1532            var prediction = new BonusPrediction(response.SelectedOptionIds.ToList());
 533
 1534            _logger.LogDebug("Parsed prediction: {SelectedOptions}",
 1535                string.Join(", ", response.SelectedOptionIds));
 536
 1537            return prediction;
 538        }
 1539        catch (JsonException ex)
 540        {
 1541            _logger.LogError(ex, "Failed to parse bonus prediction JSON: {PredictionJson}", predictionJson);
 1542            return null;
 543        }
 1544    }
 545
 546    /// <summary>
 547    /// Gets the file path of the match prediction prompt being used by this service
 548    /// </summary>
 549    /// <returns>The absolute file path to the match prompt file</returns>
 1550    public string GetMatchPromptPath(bool includeJustification = false) => includeJustification ? _matchPromptPathWithJu
 551
 552    /// <summary>
 553    /// Gets the file path of the bonus question prediction prompt being used by this service
 554    /// </summary>
 555    /// <returns>The absolute file path to the bonus prompt file</returns>
 1556    public string GetBonusPromptPath() => _bonusPromptPath;
 557
 558    /// <summary>
 559    /// Internal class for deserializing the structured prediction response
 560    /// </summary>
 561    private class PredictionResponse
 562    {
 563        [JsonPropertyName("home")]
 1564        public int Home { get; set; }
 565
 566        [JsonPropertyName("away")]
 1567        public int Away { get; set; }
 568
 569        [JsonPropertyName("justification")]
 1570        public JustificationResponse? Justification { get; set; }
 571    }
 572
 573    private class JustificationResponse
 574    {
 575        [JsonPropertyName("keyReasoning")]
 1576        public string KeyReasoning { get; set; } = string.Empty;
 577
 578        [JsonPropertyName("contextSources")]
 1579        public JustificationContextSourcesResponse ContextSources { get; set; } = new();
 580
 581        [JsonPropertyName("uncertainties")]
 1582        public string[] Uncertainties { get; set; } = Array.Empty<string>();
 583    }
 584
 585    private class JustificationContextSourcesResponse
 586    {
 587        [JsonPropertyName("mostValuable")]
 1588        public JustificationContextSourceEntry[] MostValuable { get; set; } = Array.Empty<JustificationContextSourceEntr
 589
 590        [JsonPropertyName("leastValuable")]
 1591        public JustificationContextSourceEntry[] LeastValuable { get; set; } = Array.Empty<JustificationContextSourceEnt
 592    }
 593
 594    private class JustificationContextSourceEntry
 595    {
 596        [JsonPropertyName("documentName")]
 1597        public string DocumentName { get; set; } = string.Empty;
 598
 599        [JsonPropertyName("details")]
 1600        public string Details { get; set; } = string.Empty;
 601    }
 602
 603    /// <summary>
 604    /// Internal class for deserializing the bonus predictions response
 605    /// </summary>
 606    private class BonusPredictionsResponse
 607    {
 608        [JsonPropertyName("predictions")]
 0609        public BonusPredictionEntry[]? Predictions { get; set; }
 610    }
 611
 612    /// <summary>
 613    /// Internal class for deserializing individual bonus prediction entries
 614    /// </summary>
 615    private class BonusPredictionEntry
 616    {
 617        [JsonPropertyName("questionId")]
 0618        public string QuestionId { get; set; } = string.Empty;
 619
 620        [JsonPropertyName("selectedOptionIds")]
 0621        public string[] SelectedOptionIds { get; set; } = Array.Empty<string>();
 622    }
 623
 624    /// <summary>
 625    /// Internal class for deserializing single bonus prediction response
 626    /// </summary>
 627    private class SingleBonusPredictionResponse
 628    {
 629        [JsonPropertyName("selectedOptionIds")]
 1630        public string[] SelectedOptionIds { get; set; } = Array.Empty<string>();
 631    }
 632
 633    /// <summary>
 634    /// Sets Langfuse-mapped OpenTelemetry attributes on the given activity.
 635    /// If <paramref name="activity"/> is <c>null</c> (no OTel listener registered), this is a no-op.
 636    /// </summary>
 637    private void SetLangfuseGenerationAttributes(
 638        Activity? activity,
 639        List<ChatMessage> messages,
 640        string responseJson,
 641        ChatTokenUsage usage,
 642        PredictionTelemetryMetadata? telemetryMetadata)
 643    {
 1644        if (activity is null)
 1645            return;
 646
 0647        activity.SetTag("langfuse.observation.type", "generation");
 0648        activity.SetTag("gen_ai.request.model", _model);
 649
 650        // Serialize messages as input (system prompt + user message)
 0651        var inputMessages = messages.Select(m => new
 0652        {
 0653            role = m switch
 0654            {
 0655                SystemChatMessage => "system",
 0656                UserChatMessage => "user",
 0657                _ => "unknown"
 0658            },
 0659            content = m switch
 0660            {
 0661                SystemChatMessage s => s.Content[0].Text,
 0662                UserChatMessage u => u.Content[0].Text,
 0663                _ => string.Empty
 0664            }
 0665        });
 0666        activity.SetTag("langfuse.observation.input", JsonSerializer.Serialize(inputMessages));
 0667        activity.SetTag("langfuse.observation.output", responseJson);
 0668        telemetryMetadata?.ApplyToObservation(activity);
 669
 670        // Token usage details
 0671        var usageDetails = new
 0672        {
 0673            input = usage.InputTokenCount,
 0674            output = usage.OutputTokenCount,
 0675            cache_read_input_tokens = usage.InputTokenDetails?.CachedTokenCount ?? 0,
 0676            reasoning_tokens = usage.OutputTokenDetails?.ReasoningTokenCount ?? 0
 0677        };
 0678        activity.SetTag("langfuse.observation.usage_details", JsonSerializer.Serialize(usageDetails));
 679
 680        // Cost details are intentionally omitted here: Langfuse automatically infers costs from the
 681        // model name and usage_details using its maintained pricing tables, which are more up-to-date
 682        // than the prices kept in this repository. Explicitly ingesting cost_details would override
 683        // that inference (ingested values take priority over inferred ones).
 0684    }
 685}

Methods/Properties

.ctor(OpenAI.Chat.ChatClient, Microsoft.Extensions.Logging.ILogger<OpenAiIntegration.PredictionService>, OpenAiIntegration.ICostCalculationService, OpenAiIntegration.ITokenUsageTracker, OpenAiIntegration.IInstructionsTemplateProvider, string)
PredictMatchAsync()
PredictBonusQuestionAsync()
BuildInstructions(System.Collections.Generic.IEnumerable<EHonda.KicktippAi.Core.DocumentContext>, bool)
BuildPredictionJsonSchema(bool)
ParsePrediction(string)
LogRawModelResponse(string)
BuildBonusInstructions(System.Collections.Generic.IEnumerable<EHonda.KicktippAi.Core.DocumentContext>)
CreateSingleBonusPredictionJsonSchema(EHonda.KicktippAi.Core.BonusQuestion)
ParseSingleBonusPrediction(string, EHonda.KicktippAi.Core.BonusQuestion)
GetMatchPromptPath(bool)
GetBonusPromptPath()
get_Home()
set_Home(int)
get_Away()
set_Away(int)
get_Justification()
set_Justification(OpenAiIntegration.PredictionService.JustificationResponse)
get_KeyReasoning()
set_KeyReasoning(string)
.ctor()
get_ContextSources()
set_ContextSources(OpenAiIntegration.PredictionService.JustificationContextSourcesResponse)
get_Uncertainties()
set_Uncertainties(string[])
get_MostValuable()
set_MostValuable(OpenAiIntegration.PredictionService.JustificationContextSourceEntry[])
.ctor()
get_LeastValuable()
set_LeastValuable(OpenAiIntegration.PredictionService.JustificationContextSourceEntry[])
get_DocumentName()
set_DocumentName(string)
.ctor()
get_Details()
set_Details(string)
get_Predictions()
set_Predictions(OpenAiIntegration.PredictionService.BonusPredictionEntry[])
get_QuestionId()
set_QuestionId(string)
.ctor()
get_SelectedOptionIds()
set_SelectedOptionIds(string[])
get_SelectedOptionIds()
set_SelectedOptionIds(string[])
.ctor()
SetLangfuseGenerationAttributes(System.Diagnostics.Activity, System.Collections.Generic.List<OpenAI.Chat.ChatMessage>, string, OpenAI.Chat.ChatTokenUsage, OpenAiIntegration.PredictionTelemetryMetadata)