diff --git a/pkgs/sdk/server-ai/README.md b/pkgs/sdk/server-ai/README.md
index 685b3116..85997478 100644
--- a/pkgs/sdk/server-ai/README.md
+++ b/pkgs/sdk/server-ai/README.md
@@ -22,7 +22,7 @@ The .NET build tools should automatically load the most appropriate build of the
## Getting started
-Refer to the [SDK documentation](https://docs.launchdarkly.com/sdk/server-side/dotnet#getting-started) for instructions on getting started with using the SDK.
+Refer to the [SDK documentation](https://docs.launchdarkly.com/sdk/ai/dotnet) for instructions on getting started with using the SDK.
## Signing
diff --git a/pkgs/sdk/server-ai/src/Adapters/LdClientAdapter.cs b/pkgs/sdk/server-ai/src/Adapters/LdClientAdapter.cs
new file mode 100644
index 00000000..f7f53210
--- /dev/null
+++ b/pkgs/sdk/server-ai/src/Adapters/LdClientAdapter.cs
@@ -0,0 +1,32 @@
+using LaunchDarkly.Logging;
+using LaunchDarkly.Sdk.Server.Ai.Interfaces;
+
+namespace LaunchDarkly.Sdk.Server.Ai.Adapters;
+
+///
+/// Adapts an to the requirements of .
+///
+public class LdClientAdapter : ILaunchDarklyClient
+{
+ private readonly LdClient _client;
+
+ ///
+ /// Constructs the adapter from an existing client.
+ ///
+ /// the adapter
+ public LdClientAdapter(LdClient client)
+ {
+ _client = client;
+ }
+
+ ///
+ public LdValue JsonVariation(string key, Context context, LdValue defaultValue)
+ => _client.JsonVariation(key, context, defaultValue);
+
+ ///
+ public void Track(string name, Context context, LdValue data, double metricValue)
+ => _client.Track(name, context, data, metricValue);
+
+ ///
+ public ILogger GetLogger() => new LoggerAdapter(_client.GetLogger());
+}
diff --git a/pkgs/sdk/server-ai/src/Adapters/LoggerAdapter.cs b/pkgs/sdk/server-ai/src/Adapters/LoggerAdapter.cs
new file mode 100644
index 00000000..e50eeed3
--- /dev/null
+++ b/pkgs/sdk/server-ai/src/Adapters/LoggerAdapter.cs
@@ -0,0 +1,28 @@
+using LaunchDarkly.Logging;
+using LaunchDarkly.Sdk.Server.Ai.Interfaces;
+
+namespace LaunchDarkly.Sdk.Server.Ai.Adapters;
+
+///
+/// Adapts a to the requirements of the 's
+/// logger.
+///
+internal class LoggerAdapter : ILogger
+{
+ private readonly Logger _logger;
+
+ ///
+ /// Creates a new adapter.
+ ///
+ /// the existing logger
+ public LoggerAdapter(Logger logger)
+ {
+ _logger = logger;
+ }
+
+ ///
+ public void Error(string format, params object[] allParams) => _logger.Error(format, allParams);
+
+ ///
+ public void Warn(string format, params object[] allParams) => _logger.Warn(format, allParams);
+}
diff --git a/pkgs/sdk/server-ai/src/Config/LdAiConfig.cs b/pkgs/sdk/server-ai/src/Config/LdAiConfig.cs
new file mode 100644
index 00000000..321dae7b
--- /dev/null
+++ b/pkgs/sdk/server-ai/src/Config/LdAiConfig.cs
@@ -0,0 +1,191 @@
+using System.Collections.Generic;
+using System.Linq;
+using LaunchDarkly.Sdk.Server.Ai.DataModel;
+
+namespace LaunchDarkly.Sdk.Server.Ai.Config;
+
+///
+/// Represents an AI configuration, which contains model parameters and prompt messages.
+///
+public record LdAiConfig
+{
+
+ ///
+ /// Represents a single message, which is part of a prompt.
+ ///
+ public record Message
+ {
+ ///
+ /// The content of the message, which may contain Mustache templates.
+ ///
+ public readonly string Content;
+
+ ///
+ /// The role of the message.
+ ///
+ public readonly Role Role;
+
+ internal Message(string content, Role role)
+ {
+ Content = content;
+ Role = role;
+ }
+ }
+
+ ///
+ /// Builder for constructing an LdAiConfig instance, which can be passed as the default
+ /// value to the AI Client's method.
+ ///
+ public class Builder
+ {
+ private bool _enabled;
+ private readonly List _prompt;
+ private readonly Dictionary _modelParams;
+
+ internal Builder()
+ {
+ _enabled = false;
+ _prompt = new List();
+ _modelParams = new Dictionary();
+ }
+
+ ///
+ /// Adds a prompt message with the given content and role. The default role is .
+ ///
+ /// the content, which may contain Mustache templates
+ /// the role
+ /// a new builder
+ public Builder AddPromptMessage(string content, Role role = Role.User)
+ {
+ _prompt.Add(new Message(content, role));
+ return this;
+ }
+
+ ///
+ /// Disables the config.
+ ///
+ /// the builder
+ public Builder Disable() => SetEnabled(false);
+
+ ///
+ /// Enables the config.
+ ///
+ /// the builder
+ public Builder Enable() => SetEnabled(true);
+
+ ///
+ /// Sets the enabled state of the config based on a boolean.
+ ///
+ /// whether the config is enabled
+ /// the builder
+ public Builder SetEnabled(bool enabled)
+ {
+ _enabled = enabled;
+ return this;
+ }
+
+ ///
+ /// Sets a parameter for the model. The value may be any object.
+ ///
+ /// the parameter name
+ /// the parameter value
+ /// the builder
+ public Builder SetModelParam(string name, object value)
+ {
+ _modelParams[name] = value;
+ return this;
+ }
+
+ ///
+ /// Builds the LdAiConfig instance.
+ ///
+ /// a new LdAiConfig
+ public LdAiConfig Build()
+ {
+ return new LdAiConfig(_enabled, _prompt, new Meta(), _modelParams);
+ }
+ }
+
+ ///
+ /// The prompts associated with the config.
+ ///
+ public readonly IReadOnlyList Prompt;
+
+ ///
+ /// The model parameters associated with the config.
+ ///
+ public readonly IReadOnlyDictionary Model;
+
+
+
+ internal LdAiConfig(bool enabled, IEnumerable prompt, Meta meta, IReadOnlyDictionary model)
+ {
+ Model = model ?? new Dictionary();
+ Prompt = prompt?.ToList() ?? new List();
+ VersionKey = meta?.VersionKey ?? "";
+ Enabled = enabled;
+ }
+
+ private static LdValue ObjectToValue(object obj)
+ {
+ if (obj == null)
+ {
+ return LdValue.Null;
+ }
+
+ return obj switch
+ {
+ bool b => LdValue.Of(b),
+ double d => LdValue.Of(d),
+ string s => LdValue.Of(s),
+ IEnumerable list => LdValue.ArrayFrom(list.Select(ObjectToValue)),
+ IDictionary dict => LdValue.ObjectFrom(dict.ToDictionary(kv => kv.Key,
+ kv => ObjectToValue(kv.Value))),
+ _ => LdValue.Null
+ };
+ }
+
+ internal LdValue ToLdValue()
+ {
+ return LdValue.ObjectFrom(new Dictionary
+ {
+ { "_ldMeta", LdValue.ObjectFrom(
+ new Dictionary
+ {
+ { "versionKey", LdValue.Of(VersionKey) },
+ { "enabled", LdValue.Of(Enabled) }
+ }) },
+ { "prompt", LdValue.ArrayFrom(Prompt.Select(m => LdValue.ObjectFrom(new Dictionary
+ {
+ { "content", LdValue.Of(m.Content) },
+ { "role", LdValue.Of(m.Role.ToString()) }
+ }))) },
+ { "model", ObjectToValue(Model) }
+ });
+ }
+
+ ///
+ /// Creates a new LdAiConfig builder.
+ ///
+ /// a new builder
+ public static Builder New() => new();
+
+ ///
+ /// Returns true if the config is enabled.
+ ///
+ /// true if enabled
+ public bool Enabled { get; }
+
+
+ ///
+ /// This field meant for internal LaunchDarkly usage.
+ ///
+ public string VersionKey { get; }
+
+ ///
+ /// Convenient helper that returns a disabled LdAiConfig.
+ ///
+ public static LdAiConfig Disabled = New().Disable().Build();
+
+
+}
diff --git a/pkgs/sdk/server-ai/src/DataModel/DataModel.cs b/pkgs/sdk/server-ai/src/DataModel/DataModel.cs
new file mode 100644
index 00000000..a5f4a9e6
--- /dev/null
+++ b/pkgs/sdk/server-ai/src/DataModel/DataModel.cs
@@ -0,0 +1,86 @@
+using System.Collections.Generic;
+using System.Text.Json.Serialization;
+
+namespace LaunchDarkly.Sdk.Server.Ai.DataModel;
+
+///
+/// Represents the role of the prompt message.
+///
+public enum Role
+{
+ ///
+ /// User role.
+ ///
+ User,
+ ///
+ /// System role.
+ ///
+ System,
+ ///
+ /// Assistant role.
+ ///
+ Assistant
+}
+
+///
+/// Represents the JSON serialization of the Meta field.
+///
+public class Meta
+{
+ ///
+ /// The version key.
+ ///
+ [JsonPropertyName("versionKey")]
+ public string VersionKey { get; set; }
+
+ ///
+ /// If the config is enabled.
+ ///
+ [JsonPropertyName("enabled")]
+ public bool Enabled { get; set; }
+}
+
+///
+/// Represents the JSON serialization of a Message.
+///
+public class Message
+{
+ ///
+ /// The content.
+ ///
+ [JsonPropertyName("content")]
+ public string Content { get; set; }
+
+ ///
+ /// The role.
+ ///
+ [JsonPropertyName("role")]
+ [JsonConverter(typeof(JsonStringEnumConverter))]
+ public Role Role { get; set; }
+}
+
+
+///
+/// Represents the JSON serialization of an AiConfig.
+///
+
+public class AiConfig
+{
+ ///
+ /// The prompt.
+ ///
+ [JsonPropertyName("prompt")]
+ public List Prompt { get; set; }
+
+ ///
+ /// LaunchDarkly metadata.
+ ///
+ [JsonPropertyName("_ldMeta")]
+ public Meta Meta { get; set; }
+
+ ///
+ /// The model params;
+ ///
+ [JsonPropertyName("model")]
+ public Dictionary Model { get; set; }
+}
diff --git a/pkgs/sdk/server-ai/src/Interfaces/ILaunchDarklyClient.cs b/pkgs/sdk/server-ai/src/Interfaces/ILaunchDarklyClient.cs
new file mode 100644
index 00000000..0ef808e7
--- /dev/null
+++ b/pkgs/sdk/server-ai/src/Interfaces/ILaunchDarklyClient.cs
@@ -0,0 +1,32 @@
+namespace LaunchDarkly.Sdk.Server.Ai.Interfaces;
+
+///
+/// Interface representing capabilities needed by the AI Client. These are usually provided
+/// by the LaunchDarkly Server SDK.
+///
+public interface ILaunchDarklyClient
+{
+ ///
+ /// Returns a JSON variation.
+ ///
+ /// the flag key
+ /// the context
+ /// the default value
+ /// the evaluation result
+ LdValue JsonVariation(string key, Context context, LdValue defaultValue);
+
+ ///
+ /// Tracks a metric.
+ ///
+ /// metric name
+ /// context
+ /// associated data
+ /// metric value
+ void Track(string name, Context context, LdValue data, double metricValue);
+
+ ///
+ /// Returns a logger.
+ ///
+ /// a logger
+ ILogger GetLogger();
+}
diff --git a/pkgs/sdk/server-ai/src/Interfaces/ILdAiClient.cs b/pkgs/sdk/server-ai/src/Interfaces/ILdAiClient.cs
new file mode 100644
index 00000000..715d9fe3
--- /dev/null
+++ b/pkgs/sdk/server-ai/src/Interfaces/ILdAiClient.cs
@@ -0,0 +1,29 @@
+using System.Collections.Generic;
+using LaunchDarkly.Sdk.Server.Ai.Config;
+
+namespace LaunchDarkly.Sdk.Server.Ai.Interfaces;
+
+///
+/// Represents the interface of the AI client, useful for mocking.
+///
+public interface ILdAiClient
+{
+
+ ///
+ /// Retrieves a LaunchDarkly AI config identified by the given key. The return value
+ /// is an , which makes the configuration available and
+ /// provides convenience methods for generating events related to model usage.
+ ///
+ /// Any variables provided will be interpolated into the prompt's messages.
+ /// Additionally, the current LaunchDarkly context will be available as 'ldctx' within
+ /// a prompt message.
+ ///
+ ///
+ /// the flag key
+ /// the context
+ /// the default config, if unable to retrieve from LaunchDarkly
+ /// the list of variables used when interpolating the prompt
+ /// an AI config tracker
+ public ILdAiConfigTracker ModelConfig(string key, Context context, LdAiConfig defaultValue,
+ IReadOnlyDictionary variables = null);
+}
diff --git a/pkgs/sdk/server-ai/src/Interfaces/ILdAiConfigTracker.cs b/pkgs/sdk/server-ai/src/Interfaces/ILdAiConfigTracker.cs
new file mode 100644
index 00000000..335354bf
--- /dev/null
+++ b/pkgs/sdk/server-ai/src/Interfaces/ILdAiConfigTracker.cs
@@ -0,0 +1,57 @@
+using System;
+using System.Threading.Tasks;
+using LaunchDarkly.Sdk.Server.Ai.Config;
+using LaunchDarkly.Sdk.Server.Ai.Provider;
+
+namespace LaunchDarkly.Sdk.Server.Ai.Interfaces;
+
+///
+/// Represents the interface of the AI Config Tracker, useful for mocking.
+///
+public interface ILdAiConfigTracker
+{
+ ///
+ /// The retrieved AI model configuration.
+ ///
+ public LdAiConfig Config { get; }
+
+ ///
+ /// Tracks a duration metric related to this config.
+ ///
+ /// the duration in milliseconds
+ public void TrackDuration(float durationMs);
+
+ ///
+ /// Tracks the duration of a task, and returns the result of the task.
+ ///
+ /// the task
+ /// type of the task's result
+ /// the task
+ public Task TrackDurationOfTask(Task task);
+
+ ///
+ /// Tracks feedback (positive or negative) related to the output of the model.
+ ///
+ /// the feedback
+ /// thrown if the feedback value is not or
+ public void TrackFeedback(Feedback feedback);
+
+ ///
+ /// Tracks a generation event related to this config.
+ ///
+ public void TrackSuccess();
+
+ ///
+ /// Tracks a request to a provider. The request is a task that returns a , which
+ /// contains information about the request such as token usage and metrics.
+ ///
+ /// a task representing the request
+ /// the task
+ public Task TrackRequest(Task request);
+
+ ///
+ /// Tracks token usage related to this config.
+ ///
+ /// the usage
+ public void TrackTokens(Usage usage);
+}
diff --git a/pkgs/sdk/server-ai/src/Interfaces/ILogger.cs b/pkgs/sdk/server-ai/src/Interfaces/ILogger.cs
new file mode 100644
index 00000000..6c0d0c18
--- /dev/null
+++ b/pkgs/sdk/server-ai/src/Interfaces/ILogger.cs
@@ -0,0 +1,21 @@
+namespace LaunchDarkly.Sdk.Server.Ai.Interfaces;
+
+///
+/// Log interface required by the AI Client.
+///
+public interface ILogger
+{
+ ///
+ /// Log an error.
+ ///
+ /// format string
+ /// parameters
+ void Error(string format, params object[] allParams);
+
+ ///
+ /// Log a warning.
+ ///
+ /// format string
+ /// parameters
+ void Warn(string format, params object[] allParams);
+}
diff --git a/pkgs/sdk/server-ai/src/LaunchDarkly.ServerSdk.Ai.csproj b/pkgs/sdk/server-ai/src/LaunchDarkly.ServerSdk.Ai.csproj
index 4c784f1e..3e624aeb 100644
--- a/pkgs/sdk/server-ai/src/LaunchDarkly.ServerSdk.Ai.csproj
+++ b/pkgs/sdk/server-ai/src/LaunchDarkly.ServerSdk.Ai.csproj
@@ -15,7 +15,7 @@
Library
LaunchDarkly.ServerSdk.Ai
LaunchDarkly.Sdk.Server.Ai
- 7.3
+ 11
LaunchDarkly Server-Side .NET AI SDK
LaunchDarkly
LaunchDarkly
@@ -39,6 +39,7 @@
+
diff --git a/pkgs/sdk/server-ai/src/LdAiClient.cs b/pkgs/sdk/server-ai/src/LdAiClient.cs
new file mode 100644
index 00000000..e1c5846f
--- /dev/null
+++ b/pkgs/sdk/server-ai/src/LdAiClient.cs
@@ -0,0 +1,143 @@
+using System;
+using System.Collections.Generic;
+using System.Collections.Immutable;
+using System.Linq;
+using System.Text.Json;
+using LaunchDarkly.Sdk.Server.Ai.Config;
+using LaunchDarkly.Sdk.Server.Ai.DataModel;
+using LaunchDarkly.Sdk.Server.Ai.Interfaces;
+using Mustache;
+
+namespace LaunchDarkly.Sdk.Server.Ai;
+
+///
+/// The LaunchDarkly AI client. The client is capable of retrieving AI configurations from LaunchDarkly,
+/// and generating events specific to usage of the AI configuration when interacting with model providers.
+///
+public sealed class LdAiClient : ILdAiClient
+{
+ private readonly ILaunchDarklyClient _client;
+ private readonly ILogger _logger;
+
+ ///
+ /// Constructs a new LaunchDarkly AI client.
+ ///
+ /// Example:
+ ///
+ /// var config = Configuration.Builder("my-sdk-key").Build();
+ /// var client = new LdClient(config);
+ /// var aiClient = new LdAiClient(client);
+ ///
+ ///
+ ///
+ /// a LaunchDarkly Server-side SDK client instance
+ public LdAiClient(ILaunchDarklyClient client)
+ {
+ _client = client ?? throw new ArgumentNullException(nameof(client));
+ _logger = _client.GetLogger();
+ }
+
+
+ // This is the special Mustache variable that can be used in prompts to access the current
+ // LaunchDarkly context. For example, {{ ldctx.key }} will return the context key.
+ private const string LdContextVariable = "ldctx";
+
+ ///
+ public ILdAiConfigTracker ModelConfig(string key, Context context, LdAiConfig defaultValue,
+ IReadOnlyDictionary variables = null)
+ {
+
+ var result = _client.JsonVariation(key, context, defaultValue.ToLdValue());
+
+ var parsed = ParseConfig(result, key);
+ if (parsed == null)
+ {
+ // ParseConfig already does logging.
+ return new LdAiConfigTracker(_client, key, defaultValue, context);
+ }
+
+
+ var mergedVariables = new Dictionary { { LdContextVariable, GetAllAttributes(context) } };
+ if (variables != null)
+ {
+ foreach (var kvp in variables)
+ {
+ if (kvp.Key == LdContextVariable)
+ {
+ _logger.Warn("AI model config variables contains 'ldctx' key, which is reserved; this key will be the value of the LaunchDarkly context");
+ continue;
+ }
+ mergedVariables[kvp.Key] = kvp.Value;
+ }
+ }
+
+
+ var prompt =
+ parsed.Prompt?.Select(m => new LdAiConfig.Message(InterpolateTemplate(m.Content, mergedVariables), m.Role));
+
+ return new LdAiConfigTracker(_client, key, new LdAiConfig(parsed.Meta?.Enabled ?? false, prompt, parsed.Meta, parsed.Model), context);
+ }
+
+ ///
+ /// Retrieves all attributes from the given context, including private attributes. The attributes
+ /// are converted into C# primitives recursively.
+ ///
+ /// the context
+ /// the attributes
+ private static IDictionary GetAllAttributes(Context context)
+ {
+ var attributes = new Dictionary();
+ foreach (var key in context.OptionalAttributeNames)
+ {
+ attributes[key] = ValueToObject(context.GetValue(AttributeRef.FromLiteral(key)));
+ }
+
+ attributes["kind"] = context.Kind.ToString();
+ attributes["key"] = context.Key;
+ attributes["anonymous"] = context.Anonymous;
+
+ return attributes;
+ }
+
+ ///
+ /// Recursively converts an LdValue into a C# object.
+ ///
+ /// the LdValue
+ /// the object
+ private static object ValueToObject(LdValue value)
+ {
+ return value.Type switch
+ {
+ LdValueType.Null => null,
+ LdValueType.Bool => value.AsBool,
+ LdValueType.Number => value.AsDouble,
+ LdValueType.String => value.AsString,
+ LdValueType.Array => value.List.Select(ValueToObject).ToList(),
+ LdValueType.Object => value.Dictionary
+ .Select(kv => new KeyValuePair(kv.Key, ValueToObject(kv.Value)))
+ .ToImmutableDictionary(),
+ _ => null
+ };
+ }
+
+ private static string InterpolateTemplate(string template, IReadOnlyDictionary variables)
+ {
+ return Template.Compile(template).Render(variables);
+ }
+
+
+ private AiConfig ParseConfig(LdValue value, string key)
+ {
+
+ var serialized = value.ToJsonString();
+ try
+ {
+ return JsonSerializer.Deserialize(serialized);
+ }
+ catch (JsonException e)
+ {
+ _logger.Error("Unable to parse AI model config for key {0}: {1}", key, e.Message);
+ return null;
+ }
+ }
+}
diff --git a/pkgs/sdk/server-ai/src/LdAiConfigTracker.cs b/pkgs/sdk/server-ai/src/LdAiConfigTracker.cs
index d9d506e4..2b548df1 100644
--- a/pkgs/sdk/server-ai/src/LdAiConfigTracker.cs
+++ b/pkgs/sdk/server-ai/src/LdAiConfigTracker.cs
@@ -1,29 +1,128 @@
using System;
+using System.Collections.Generic;
+using System.Diagnostics;
+using System.Threading.Tasks;
+using LaunchDarkly.Sdk.Server.Ai.Config;
+using LaunchDarkly.Sdk.Server.Ai.Interfaces;
+using LaunchDarkly.Sdk.Server.Ai.Provider;
-namespace LaunchDarkly.Sdk.Server.Ai
+namespace LaunchDarkly.Sdk.Server.Ai;
+
+///
+/// A tracker capable of reporting events related to a particular AI configuration.
+///
+public class LdAiConfigTracker : ILdAiConfigTracker
{
+ private readonly ILaunchDarklyClient _client;
+ private readonly Context _context;
+ private readonly LdValue _trackData;
+
+ private const string Duration = "$ld:ai:duration:total";
+ private const string FeedbackPositive = "$ld:ai:feedback:user:positive";
+ private const string FeedbackNegative = "$ld:ai:feedback:user:negative";
+ private const string Generation = "$ld:ai:generation";
+ private const string TokenTotal = "$ld:ai:tokens:total";
+ private const string TokenInput = "$ld:ai:tokens:input";
+ private const string TokenOutput = "$ld:ai:tokens:output";
+
///
- /// TBD
+ /// Constructs a new AI configuration tracker. The tracker is associated with a configuration,
+ /// a context, and a key which identifies the configuration.
///
- public sealed class LdAiConfigTracker : IDisposable
+ /// the LaunchDarkly client
+ /// key of the AI config
+ /// the AI config
+ /// the context
+ ///
+ public LdAiConfigTracker(ILaunchDarklyClient client, string configKey, LdAiConfig config, Context context)
{
- private readonly LdClient _client;
+ Config = config ?? throw new ArgumentNullException(nameof(config));
+ _client = client ?? throw new ArgumentNullException(nameof(client));
+ _context = context;
+ _trackData = LdValue.ObjectFrom(new Dictionary
+ {
+ { "versionKey", LdValue.Of(Config.VersionKey)},
+ { "configKey" , LdValue.Of(configKey ?? throw new ArgumentNullException(nameof(configKey))) }
+ });
+ }
+
+
+ ///
+ public LdAiConfig Config { get; }
+
+ ///
+ public void TrackDuration(float durationMs) =>
+ _client.Track(Duration, _context, _trackData, durationMs);
+
- ///
- /// TBD
- ///
- /// an LdClient instance
- public LdAiConfigTracker(LdClient client)
+ ///
+ public async Task TrackDurationOfTask(Task task)
+ {
+ var result = await MeasureDurationOfTaskMs(task);
+ TrackDuration(result.Item2);
+ return result.Item1;
+ }
+
+ private static async Task> MeasureDurationOfTaskMs(Task task)
+ {
+ var sw = Stopwatch.StartNew();
+ var result = await task;
+ sw.Stop();
+ return Tuple.Create(result, sw.ElapsedMilliseconds);
+ }
+
+ ///
+ public void TrackFeedback(Feedback feedback)
+ {
+ switch (feedback)
{
- _client = client;
+ case Feedback.Positive:
+ _client.Track(FeedbackPositive, _context, _trackData, 1);
+ break;
+ case Feedback.Negative:
+ _client.Track(FeedbackNegative, _context, _trackData, 1);
+ break;
+ default:
+ throw new ArgumentOutOfRangeException(nameof(feedback), feedback, null);
}
+ }
- ///
- /// THD
- ///
- public void Dispose()
+ ///
+ public void TrackSuccess()
+ {
+ _client.Track(Generation, _context, _trackData, 1);
+ }
+
+ ///
+ public async Task TrackRequest(Task request)
+ {
+ var (result, durationMs) = await MeasureDurationOfTaskMs(request);
+ TrackSuccess();
+
+ TrackDuration(result.Metrics?.LatencyMs ?? durationMs);
+
+ if (result.Usage != null)
+ {
+ TrackTokens(result.Usage.Value);
+ }
+
+ return result;
+ }
+
+ ///
+ public void TrackTokens(Usage usage)
+ {
+ if (usage.Total is > 0)
+ {
+ _client.Track(TokenTotal, _context, _trackData, usage.Total.Value);
+ }
+ if (usage.Input is > 0)
+ {
+ _client.Track(TokenInput, _context, _trackData, usage.Input.Value);
+ }
+ if (usage.Output is > 0)
{
- _client?.Dispose();
+ _client.Track(TokenOutput, _context, _trackData, usage.Output.Value);
}
}
}
diff --git a/pkgs/sdk/server-ai/src/Provider/Feedback.cs b/pkgs/sdk/server-ai/src/Provider/Feedback.cs
new file mode 100644
index 00000000..8383bb6c
--- /dev/null
+++ b/pkgs/sdk/server-ai/src/Provider/Feedback.cs
@@ -0,0 +1,17 @@
+namespace LaunchDarkly.Sdk.Server.Ai.Provider;
+
+///
+/// Feedback about the generated content.
+///
+public enum Feedback
+{
+ ///
+ /// The sentiment was positive.
+ ///
+ Positive,
+
+ ///
+ /// The sentiment was negative.
+ ///
+ Negative,
+}
diff --git a/pkgs/sdk/server-ai/src/Provider/Usage.cs b/pkgs/sdk/server-ai/src/Provider/Usage.cs
new file mode 100644
index 00000000..633632d5
--- /dev/null
+++ b/pkgs/sdk/server-ai/src/Provider/Usage.cs
@@ -0,0 +1,24 @@
+namespace LaunchDarkly.Sdk.Server.Ai.Provider;
+
+///
+/// Represents metrics returned by a model provider.
+///
+/// the duration of the request in milliseconds
+public record struct Metrics(long? LatencyMs);
+
+
+///
+/// Represents token usage.
+///
+/// the total tokens used
+/// the tokens sent as input
+/// the tokens received as output
+public record struct Usage(int? Total, int? Input, int? Output);
+
+
+///
+/// Represents information returned by a model provider.
+///
+/// the token usage
+/// the metrics relevant to the request
+public record struct Response(Usage? Usage, Metrics? Metrics);
diff --git a/pkgs/sdk/server-ai/test/InterpolationTests.cs b/pkgs/sdk/server-ai/test/InterpolationTests.cs
new file mode 100644
index 00000000..d6256e9d
--- /dev/null
+++ b/pkgs/sdk/server-ai/test/InterpolationTests.cs
@@ -0,0 +1,141 @@
+using System.Collections.Generic;
+using System.Text.Json;
+using System.Text.Json.Serialization;
+using LaunchDarkly.Sdk.Server.Ai.Config;
+using LaunchDarkly.Sdk.Server.Ai.DataModel;
+using LaunchDarkly.Sdk.Server.Ai.Interfaces;
+using Moq;
+using Xunit;
+
+namespace LaunchDarkly.Sdk.Server.Ai;
+
+public class InterpolationTests
+{
+ private string Eval(string prompt, Context context, IReadOnlyDictionary variables)
+ {
+ var mockClient = new Mock();
+ var mockLogger = new Mock();
+
+
+ // The replacement is done this way because to use string.Format, we'd need to escape the curly braces.
+ var configJson = """
+ {
+ "_ldMeta": {"versionKey": "1", "enabled": true},
+ "model": {},
+ "prompt": [
+ {
+ "content": "",
+ "role": "System"
+ }
+ ]
+ }
+ """.Replace("", prompt);
+
+
+ mockClient.Setup(x =>
+ x.JsonVariation("foo", It.IsAny(), It.IsAny())).Returns(LdValue.Parse(configJson));
+
+ mockClient.Setup(x => x.GetLogger()).Returns(mockLogger.Object);
+
+ var client = new LdAiClient(mockClient.Object);
+ var tracker = client.ModelConfig("foo", context, LdAiConfig.Disabled, variables);
+
+ return tracker.Config.Prompt[0].Content;
+ }
+
+ [Theory]
+ [InlineData("{{ adjective}}")]
+ [InlineData("{{ adjective.nested.deep }}")]
+ [InlineData("{{ ldctx.this_is_not_a_variable }}")]
+ public void TestInterpolationMissingVariables(string variable)
+ {
+
+ var context = Context.New("user-key");
+ var result = Eval($"I am an ({variable}) LLM", context, null);
+ Assert.Equal("I am an () LLM", result);
+ }
+
+ [Theory]
+ [InlineData("awesome")]
+ [InlineData("slow")]
+ [InlineData("all powerful")]
+ public void TestInterpolationWithVariables(string description)
+ {
+ var context = Context.New("user-key");
+ var variables = new Dictionary
+ {
+ { "adjective", description }
+ };
+ var result = Eval("I am an {{ adjective }} LLM", context, variables);
+ Assert.Equal($"I am an {description} LLM", result);
+ }
+
+ [Fact]
+ public void TestInterpolationWithMultipleVariables()
+ {
+ var context = Context.New("user-key");
+ var variables = new Dictionary
+ {
+ { "adjective", "awesome" },
+ { "noun", "robot" },
+ { "stats", new Dictionary
+ {
+ { "power", 9000 }
+ }
+ }
+ };
+
+ var result = Eval("I am an {{ adjective }} {{ noun }} with power over {{ stats.power }}", context, variables);
+ Assert.Equal("I am an awesome robot with power over 9000", result);
+ }
+
+ [Theory]
+ [InlineData("{{ adjectives.0 }}")]
+ [InlineData("{{ adjectives[0] }}")]
+ public void TestInterpolationWithArrayAccessDoesNotWork(string accessor)
+ {
+ var context = Context.New("user-key");
+ var variables = new Dictionary
+ {
+ { "adjectives", new List { "awesome", "slow", "all powerful" } }
+ };
+
+ var result = Eval($"I am an ({accessor}) LLM", context, variables);
+ Assert.Equal("I am an () LLM", result);
+ }
+
+ [Fact]
+ public void TestInterpolationWithArraySectionWorks()
+ {
+ var context = Context.New("user-key");
+ var variables = new Dictionary
+ {
+ { "adjectives", new List { "hello", "world", "!" } }
+ };
+
+ var result = Eval("{{#adjectives}}{{.}} {{/adjectives}}", context, variables);
+ Assert.Equal("hello world ! ", result);
+ }
+
+
+ [Fact]
+ public void TestInterpolationWithBasicContext()
+ {
+ var context = Context.Builder(ContextKind.Default, "123")
+ .Set("name", "Sandy").Build();
+ var result1 = Eval("I'm a {{ ldctx.kind}} with key {{ ldctx.key }}, named {{ ldctx.name }}", context, null);
+ Assert.Equal("I'm a user with key 123, named Sandy", result1);
+ }
+
+ [Fact]
+ public void TestInterpolationWithNestedContextAttributes()
+ {
+ var context = Context.Builder(ContextKind.Default, "123")
+ .Set("stats", LdValue.ObjectFrom(new Dictionary
+ {
+ { "power", LdValue.Of(9000) }
+ })).Build();
+ var result = Eval("I can ingest over {{ ldctx.stats.power }} tokens per second!", context, null);
+ Assert.Equal("I can ingest over 9000 tokens per second!", result);
+ }
+}
diff --git a/pkgs/sdk/server-ai/test/LaunchDarkly.ServerSdk.Ai.Tests.csproj b/pkgs/sdk/server-ai/test/LaunchDarkly.ServerSdk.Ai.Tests.csproj
index caeb643e..5af9c2bf 100644
--- a/pkgs/sdk/server-ai/test/LaunchDarkly.ServerSdk.Ai.Tests.csproj
+++ b/pkgs/sdk/server-ai/test/LaunchDarkly.ServerSdk.Ai.Tests.csproj
@@ -10,6 +10,7 @@
LaunchDarkly.ServerSdk.Ai.Tests
false
true
+ 11
LaunchDarkly.Sdk.Server.Ai
@@ -20,7 +21,7 @@
-
+
diff --git a/pkgs/sdk/server-ai/test/LdAiClientTest.cs b/pkgs/sdk/server-ai/test/LdAiClientTest.cs
new file mode 100644
index 00000000..b5c237ce
--- /dev/null
+++ b/pkgs/sdk/server-ai/test/LdAiClientTest.cs
@@ -0,0 +1,138 @@
+using System.Collections.Generic;
+using LaunchDarkly.Logging;
+using LaunchDarkly.Sdk.Server.Ai.Adapters;
+using LaunchDarkly.Sdk.Server.Ai.Config;
+using LaunchDarkly.Sdk.Server.Ai.DataModel;
+using LaunchDarkly.Sdk.Server.Ai.Interfaces;
+using Moq;
+using Xunit;
+
+namespace LaunchDarkly.Sdk.Server.Ai;
+
+public class LdAiClientTest
+{
+ [Fact]
+ public void CanInstantiateWithServerSideClient()
+ {
+ var client = new LdClientAdapter(new LdClient(Configuration.Builder("key").Offline(true).Build()));
+ var aiClient = new LdAiClient(client);
+ var result= aiClient.ModelConfig("foo", Context.New("key"), LdAiConfig.Disabled);
+ Assert.False(result.Config.Enabled);
+ }
+
+ [Fact]
+ public void ThrowsIfClientIsNull()
+ {
+ Assert.Throws(() => new LdAiClient(null));
+ }
+
+ [Fact]
+ public void ReturnsDefaultConfigWhenGivenInvalidVariation()
+ {
+ var mockClient = new Mock();
+
+ var mockLogger = new Mock();
+
+ mockClient.Setup(x =>
+ x.JsonVariation("foo", It.IsAny(), It.IsAny())).Returns(LdValue.Null);
+
+
+ mockClient.Setup(x => x.GetLogger()).Returns(mockLogger.Object);
+
+
+ var client = new LdAiClient(mockClient.Object);
+
+ var defaultConfig = LdAiConfig.New().AddPromptMessage("Hello").Build();
+
+ var tracker = client.ModelConfig("foo", Context.New(ContextKind.Default, "key"), defaultConfig);
+
+ Assert.Equal(defaultConfig, tracker.Config);
+ }
+
+ private const string MetaDisabledExplicitly = """
+ {
+ "_ldMeta": {"versionKey": "1", "enabled": false},
+ "model": {},
+ "prompt": []
+ }
+ """;
+
+ private const string MetaDisabledImplicitly = """
+ {
+ "_ldMeta": {"versionKey": "1"},
+ "model": {},
+ "prompt": []
+ }
+ """;
+
+ private const string MissingMeta = """
+ {
+ "model": {},
+ "prompt": []
+ }
+ """;
+
+ private const string EmptyObject = "{}";
+
+ [Theory]
+ [InlineData(MetaDisabledExplicitly)]
+ [InlineData(MetaDisabledImplicitly)]
+ [InlineData(MissingMeta)]
+ [InlineData(EmptyObject)]
+ public void ConfigNotEnabledReturnsDisabledInstance(string json)
+ {
+ var mockClient = new Mock();
+
+ var mockLogger = new Mock();
+
+ mockClient.Setup(x =>
+ x.JsonVariation("foo", It.IsAny(), It.IsAny())).Returns(LdValue.Parse(json));
+
+ mockClient.Setup(x => x.GetLogger()).Returns(mockLogger.Object);
+
+ var client = new LdAiClient(mockClient.Object);
+
+ // All the JSON inputs here are considered disabled, either due to lack of the 'enabled' property,
+ // or if present, it is set to false. Therefore, if the default was returned, we'd see the assertion fail
+ // (since calling LdAiConfig.New() constructs an enabled config by default.)
+ var tracker = client.ModelConfig("foo", Context.New(ContextKind.Default, "key"),
+ LdAiConfig.New().AddPromptMessage("foo").Build());
+
+ Assert.False(tracker.Config.Enabled);
+ }
+
+ [Fact]
+ public void ConfigEnabledReturnsInstance()
+ {
+ var mockClient = new Mock();
+
+ var mockLogger = new Mock();
+
+ const string json = """
+ {
+ "_ldMeta": {"versionKey": "1", "enabled": true},
+ "model": {},
+ "prompt": [{"content": "Hello!", "role": "system"}]
+ }
+ """;
+
+ mockClient.Setup(x =>
+ x.JsonVariation("foo", It.IsAny(), It.IsAny())).Returns(LdValue.Parse(json));
+
+ mockClient.Setup(x => x.GetLogger()).Returns(mockLogger.Object);
+
+ var context = Context.New(ContextKind.Default, "key");
+ var client = new LdAiClient(mockClient.Object);
+
+ // We shouldn't get this default.
+ var tracker = client.ModelConfig("foo", context,
+ LdAiConfig.New().AddPromptMessage("Goodbye!").Build());
+
+ Assert.Collection(tracker.Config.Prompt,
+ message =>
+ {
+ Assert.Equal("Hello!", message.Content);
+ Assert.Equal(Role.System, message.Role);
+ });
+ }
+}
diff --git a/pkgs/sdk/server-ai/test/LdAiConfigTest.cs b/pkgs/sdk/server-ai/test/LdAiConfigTest.cs
new file mode 100644
index 00000000..1bc9eb97
--- /dev/null
+++ b/pkgs/sdk/server-ai/test/LdAiConfigTest.cs
@@ -0,0 +1,70 @@
+using LaunchDarkly.Sdk.Server.Ai.Config;
+using LaunchDarkly.Sdk.Server.Ai.DataModel;
+using Xunit;
+
+namespace LaunchDarkly.Sdk.Server.Ai;
+
+public class LdAiConfigTest
+{
+ [Fact]
+ public void CanDisableAndEnableConfig()
+ {
+ var config1 = LdAiConfig.New().Enable().Disable().Build();
+ Assert.False(config1.Enabled);
+
+ var config2 = LdAiConfig.New().Disable().Enable().Build();
+ Assert.True(config2.Enabled);
+
+ var config3 = LdAiConfig.New().SetEnabled(true).SetEnabled(false).Build();
+ Assert.False(config3.Enabled);
+
+ var config4 = LdAiConfig.New().SetEnabled(false).SetEnabled(true).Build();
+ Assert.True(config4.Enabled);
+
+ var config5 = LdAiConfig.New().SetEnabled(true).Disable().Build();
+ Assert.False(config5.Enabled);
+
+ var config6 = LdAiConfig.New().SetEnabled(false).Enable().Build();
+ Assert.True(config6.Enabled);
+ }
+
+ [Fact]
+ public void CanAddPromptMessages()
+ {
+ var config = LdAiConfig.New()
+ .AddPromptMessage("Hello")
+ .AddPromptMessage("World", Role.System)
+ .AddPromptMessage("!", Role.Assistant)
+ .Build();
+
+ Assert.Collection(config.Prompt,
+ message =>
+ {
+ Assert.Equal("Hello", message.Content);
+ Assert.Equal(Role.User, message.Role);
+ },
+ message =>
+ {
+ Assert.Equal("World", message.Content);
+ Assert.Equal(Role.System, message.Role);
+ },
+ message =>
+ {
+ Assert.Equal("!", message.Content);
+ Assert.Equal(Role.Assistant, message.Role);
+ });
+ }
+
+
+ [Fact]
+ public void CanSetModelParams()
+ {
+ var config = LdAiConfig.New()
+ .SetModelParam("foo", "bar")
+ .SetModelParam("baz", 42)
+ .Build();
+
+ Assert.Equal("bar", config.Model["foo"]);
+ Assert.Equal(42, config.Model["baz"]);
+ }
+}
diff --git a/pkgs/sdk/server-ai/test/LdAiConfigTrackerTest.cs b/pkgs/sdk/server-ai/test/LdAiConfigTrackerTest.cs
index 2a6a9d7c..a3f629b4 100644
--- a/pkgs/sdk/server-ai/test/LdAiConfigTrackerTest.cs
+++ b/pkgs/sdk/server-ai/test/LdAiConfigTrackerTest.cs
@@ -1,3 +1,9 @@
+using System.Collections.Generic;
+using System.Threading.Tasks;
+using LaunchDarkly.Sdk.Server.Ai.Config;
+using LaunchDarkly.Sdk.Server.Ai.Interfaces;
+using LaunchDarkly.Sdk.Server.Ai.Provider;
+using Moq;
using Xunit;
namespace LaunchDarkly.Sdk.Server.Ai
@@ -5,10 +11,222 @@ namespace LaunchDarkly.Sdk.Server.Ai
public class LdAiTrackerTest
{
[Fact]
- public void CanCallDispose()
+ public void ThrowsIfClientIsNull()
{
- var tracker = new LdAiConfigTracker(null);
- tracker.Dispose();
+ Assert.Throws(() =>
+ new LdAiConfigTracker(null, "key", LdAiConfig.Disabled, Context.New("key")));
+ }
+
+ [Fact]
+ public void ThrowsIfConfigIsNull()
+ {
+ var mockClient = new Mock();
+ Assert.Throws(() =>
+ new LdAiConfigTracker(mockClient.Object, "key", null, Context.New("key")));
+ }
+
+ [Fact]
+ public void ThrowsIfKeyIsNull()
+ {
+ var mockClient = new Mock();
+ Assert.Throws(() =>
+ new LdAiConfigTracker(mockClient.Object, null, LdAiConfig.Disabled, Context.New("key")));
+ }
+
+ [Fact]
+ public void CanTrackDuration()
+ {
+ var mockClient = new Mock();
+ var context = Context.New("key");
+ const string flagKey = "key";
+ var config = LdAiConfig.Disabled;
+ var data = LdValue.ObjectFrom(new Dictionary
+ {
+ { "versionKey", LdValue.Of(config.VersionKey) },
+ { "configKey", LdValue.Of(flagKey) }
+ });
+ var tracker = new LdAiConfigTracker(mockClient.Object, flagKey, config, context);
+
+ tracker.TrackDuration(1.0f);
+ mockClient.Verify(x => x.Track("$ld:ai:duration:total", context, data, 1.0f), Times.Once);
+ }
+
+
+ [Fact]
+ public void CanTrackSuccess()
+ {
+ var mockClient = new Mock();
+ var context = Context.New("key");
+ const string flagKey = "key";
+ var config = LdAiConfig.Disabled;
+ var data = LdValue.ObjectFrom(new Dictionary
+ {
+ { "versionKey", LdValue.Of(config.VersionKey) },
+ { "configKey", LdValue.Of(flagKey) }
+ });
+
+ var tracker = new LdAiConfigTracker(mockClient.Object, flagKey, config, context);
+ tracker.TrackSuccess();
+ mockClient.Verify(x => x.Track("$ld:ai:generation", context, data, 1.0f), Times.Once);
+ }
+
+
+ [Fact]
+ public async void CanTrackDurationOfTask()
+ {
+ var mockClient = new Mock();
+ var context = Context.New("key");
+ const string flagKey = "key";
+ var config = LdAiConfig.Disabled;
+ var data = LdValue.ObjectFrom(new Dictionary
+ {
+ { "versionKey", LdValue.Of(config.VersionKey) },
+ { "configKey", LdValue.Of(flagKey) }
+ });
+
+ var tracker = new LdAiConfigTracker(mockClient.Object, flagKey, config, context);
+
+
+ const int waitMs = 10;
+
+ var result = await tracker.TrackDurationOfTask(Task.Run(() =>
+ {
+ return Task.Delay(waitMs).ContinueWith(_ => "I waited");
+ }));
+
+ Assert.Equal("I waited", result);
+
+ // The metricValue here is the duration of the task. Since the task waits for 10ms, we'll add a bit of
+ // error so this isn't flaky. If this proves to be really flaky, we can at least constrain it to be
+ // between 0 and some large number.
+ mockClient.Verify(
+ x => x.Track("$ld:ai:duration:total", context, data,
+ It.IsInRange(0, 500, Range.Inclusive)), Times.Once);
+ }
+
+
+ [Fact]
+ public void CanTrackFeedback()
+ {
+ var mockClient = new Mock();
+ var context = Context.New("key");
+ const string flagKey = "key";
+ var config = LdAiConfig.Disabled;
+ var data = LdValue.ObjectFrom(new Dictionary
+ {
+ { "versionKey", LdValue.Of(config.VersionKey) },
+ { "configKey", LdValue.Of(flagKey) }
+ });
+
+ var tracker = new LdAiConfigTracker(mockClient.Object, flagKey, config, context);
+ tracker.TrackFeedback(Feedback.Positive);
+ tracker.TrackFeedback(Feedback.Negative);
+
+ mockClient.Verify(x => x.Track("$ld:ai:feedback:user:positive", context, data, 1.0f), Times.Once);
+ mockClient.Verify(x => x.Track("$ld:ai:feedback:user:negative", context, data, 1.0f), Times.Once);
+ }
+
+ [Fact]
+ public void CanTrackTokens()
+ {
+ var mockClient = new Mock();
+ var context = Context.New("key");
+ const string flagKey = "key";
+ var config = LdAiConfig.Disabled;
+ var data = LdValue.ObjectFrom(new Dictionary
+ {
+ { "versionKey", LdValue.Of(config.VersionKey) },
+ { "configKey", LdValue.Of(flagKey) }
+ });
+
+ var tracker = new LdAiConfigTracker(mockClient.Object, flagKey, config, context);
+
+ var givenUsage = new Usage
+ {
+ Total = 1,
+ Input = 2,
+ Output = 3
+ };
+
+ tracker.TrackTokens(givenUsage);
+ mockClient.Verify(x => x.Track("$ld:ai:tokens:total", context, data, 1.0f), Times.Once);
+ mockClient.Verify(x => x.Track("$ld:ai:tokens:input", context, data, 2.0f), Times.Once);
+ mockClient.Verify(x => x.Track("$ld:ai:tokens:output", context, data, 3.0f), Times.Once);
+ }
+
+ [Fact]
+ public void CanTrackResponseWithSpecificLatency()
+ {
+ var mockClient = new Mock();
+ var context = Context.New("key");
+ const string flagKey = "key";
+ var config = LdAiConfig.Disabled;
+ var data = LdValue.ObjectFrom(new Dictionary
+ {
+ { "versionKey", LdValue.Of(config.VersionKey) },
+ { "configKey", LdValue.Of(flagKey) }
+ });
+
+ var tracker = new LdAiConfigTracker(mockClient.Object, flagKey, config, context);
+
+ var givenUsage = new Usage
+ {
+ Total = 1,
+ Input = 2,
+ Output = 3
+ };
+
+ var givenStatistics = new Metrics
+ {
+ LatencyMs = 500
+ };
+
+ var givenResponse = new Response
+ {
+ Usage = givenUsage,
+ Metrics = givenStatistics
+ };
+
+ var result = tracker.TrackRequest(Task.Run(() => givenResponse));
+ Assert.Equal(givenResponse, result.Result);
+ mockClient.Verify(x => x.Track("$ld:ai:tokens:total", context, data, 1.0f), Times.Once);
+ mockClient.Verify(x => x.Track("$ld:ai:tokens:input", context, data, 2.0f), Times.Once);
+ mockClient.Verify(x => x.Track("$ld:ai:tokens:output", context, data, 3.0f), Times.Once);
+ mockClient.Verify(x => x.Track("$ld:ai:duration:total", context, data, 500.0f), Times.Once);
+ }
+
+ [Fact]
+ public void CanTrackResponseWithPartialData()
+ {
+ var mockClient = new Mock();
+ var context = Context.New("key");
+ const string flagKey = "key";
+ var config = LdAiConfig.Disabled;
+ var data = LdValue.ObjectFrom(new Dictionary
+ {
+ { "versionKey", LdValue.Of(config.VersionKey) },
+ { "configKey", LdValue.Of(flagKey) }
+ });
+
+ var tracker = new LdAiConfigTracker(mockClient.Object, flagKey, config, context);
+
+ var givenUsage = new Usage
+ {
+ Total = 1
+ };
+
+ var givenResponse = new Response
+ {
+ Usage = givenUsage,
+ Metrics = null
+ };
+
+ var result = tracker.TrackRequest(Task.Run(() => givenResponse));
+ Assert.Equal(givenResponse, result.Result);
+ mockClient.Verify(x => x.Track("$ld:ai:tokens:total", context, data, 1.0f), Times.Once);
+
+ // if latency isn't provided via Statistics, then it is automatically measured.
+ mockClient.Verify(x => x.Track("$ld:ai:duration:total", context, data, It.IsAny()), Times.Once);
}
}
}