From 2b3cc4bb7d20f1ce01f60c57d4ac09f3fabca542 Mon Sep 17 00:00:00 2001 From: "Eric St. John" Date: Thu, 19 Mar 2026 22:55:42 -0700 Subject: [PATCH 01/10] Add video generation support to Microsoft.Extensions.AI MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Introduces `IVideoGenerator`, a new modality abstraction for video generation that follows the existing patterns established by `IChatClient` and `IImageGenerator`. ## Abstractions (Microsoft.Extensions.AI.Abstractions) - `IVideoGenerator` interface with `GenerateAsync` accepting request, options, progress, and cancellation - `VideoGenerationRequest` with `Prompt` and `OriginalMedia` (provider-neutral; image content = reference for generation, video content = source for editing) - `VideoGenerationOptions` with `Count`, `Duration`, `FramesPerSecond`, `MediaType`, `ModelId`, `VideoSize`, `ResponseFormat`, `RawRepresentationFactory`, and `AdditionalProperties` - `VideoGenerationResponse` with `Contents`, `Usage`, `ModelId`, `RawRepresentation`, and `AdditionalProperties` - `VideoGenerationProgress` for reporting async job status and percent complete via `IProgress` - `VideoGenerationResponseFormat` enum (`Uri`, `Data`, `Hosted`) - `VideoGeneratorMetadata` for provider name, endpoint, and default model - `DelegatingVideoGenerator` base class for middleware - `VideoGeneratorExtensions` with `GenerateAsync`/`EditVideoAsync`/`EditVideosAsync` convenience overloads - `HostedVideoGenerationTool` and supporting tool content types for chat-client-driven video generation ## Middleware (Microsoft.Extensions.AI) - `VideoGeneratorBuilder` with DI integration (`AddVideoGenerator` on `IServiceCollection`) - `LoggingVideoGenerator` middleware with builder extension - `OpenTelemetryVideoGenerator` middleware with builder extension - `ConfigureOptionsVideoGenerator` middleware with builder extension - `VideoGeneratingChatClient` that bridges `IChatClient` tool calls to `IVideoGenerator` ## OpenAI Provider (Microsoft.Extensions.AI.OpenAI) - `OpenAIVideoGenerator` implementing `IVideoGenerator` via `VideoClient` - `AsIVideoGenerator` extension method on `VideoClient` - Routing based on request contents and `AdditionalProperties` keys: - **Text-to-video**: `POST /videos` (via SDK `CreateVideoAsync`) - **Image-to-video**: `POST /videos` with `input_reference` — URL in JSON or image bytes via multipart (via SDK `CreateVideoAsync`) - **Edit by video ID**: `POST /videos/edits` with `edit_video_id` key (via raw `ClientPipeline`) - **Edit by upload**: `POST /videos/edits` with `video/*` `OriginalMedia` as multipart (via raw `ClientPipeline`) - **Extend**: `POST /videos/extensions` with `extend_video_id` key (via raw `ClientPipeline`) - **Characters**: `characters` array forwarded in `POST /videos` body via `AdditionalProperties` - Edit, extend, and character endpoints not yet in the OpenAI SDK are supported by constructing `PipelineMessage` directly against `VideoClient.Pipeline`/`VideoClient.Endpoint` - Async create → poll → download pattern with `IProgress` reporting ## Tests - Abstraction unit tests: `DelegatingVideoGeneratorTests`, `VideoGenerationOptionsTests`, `VideoGenerationResponseTests`, `VideoGeneratorExtensionsTests`, `VideoGeneratorMetadataTests`, `VideoGeneratorTests` - Middleware tests: `ConfigureOptionsVideoGeneratorTests`, `LoggingVideoGeneratorTests`, `OpenTelemetryVideoGeneratorTests`, `VideoGeneratorBuilderTests`, `VideoGeneratorDependencyInjectionPatterns` - OpenAI tests: `OpenAIVideoGeneratorTests`, `OpenAIVideoGeneratorIntegrationTests` - Shared `TestVideoGenerator` and `VideoGeneratorIntegrationTests` base class ## POC Sample - `samples/VideoGenerationPOC` demonstrating all scenarios with `System.CommandLine` - Uses `DataContent.LoadFromAsync` for file loading with automatic media type inference - Uses `DataContent.SaveToAsync` for output - CLI args: `--input`, `--edit`, `--extend`, `--character`, `--model`, `--output` --- samples/VideoGenerationPOC/Program.cs | 218 ++++++++ .../VideoGenerationPOC.csproj | 18 + .../Video/DelegatingVideoGenerator.cs | 70 +++ .../Video/HostedVideoGenerationTool.cs | 45 ++ .../Video/IVideoGenerator.cs | 39 ++ .../Video/VideoGenerationOptions.cs | 136 +++++ .../Video/VideoGenerationProgress.cs | 64 +++ .../Video/VideoGenerationRequest.cs | 67 +++ .../Video/VideoGenerationResponse.cs | 53 ++ .../Video/VideoGenerationToolCallContent.cs | 23 + .../Video/VideoGenerationToolResultContent.cs | 37 ++ .../Video/VideoGeneratorExtensions.cs | 205 +++++++ .../Video/VideoGeneratorMetadata.cs | 44 ++ .../OpenAIClientExtensions.cs | 55 ++ .../OpenAIVideoGenerator.cs | 488 +++++++++++++++++ .../OpenTelemetryVideoGenerator.cs | 324 +++++++++++ ...elemetryVideoGeneratorBuilderExtensions.cs | 43 ++ .../VideoGeneratingChatClient.cs | 506 ++++++++++++++++++ ...eoGeneratingChatClientBuilderExtensions.cs | 47 ++ .../OpenTelemetryConsts.cs | 1 + .../Video/ConfigureOptionsVideoGenerator.cs | 54 ++ ...eOptionsVideoGeneratorBuilderExtensions.cs | 38 ++ .../Video/LoggingVideoGenerator.cs | 124 +++++ .../LoggingVideoGeneratorBuilderExtensions.cs | 58 ++ .../Video/VideoGeneratorBuilder.cs | 86 +++ ...ratorBuilderServiceCollectionExtensions.cs | 85 +++ ...eneratorBuilderVideoGeneratorExtensions.cs | 29 + src/Shared/DiagnosticIds/DiagnosticIds.cs | 2 + .../TestVideoGenerator.cs | 47 ++ .../Video/DelegatingVideoGeneratorTests.cs | 100 ++++ .../Video/VideoGenerationOptionsTests.cs | 132 +++++ .../Video/VideoGenerationResponseTests.cs | 79 +++ .../Video/VideoGeneratorExtensionsTests.cs | 167 ++++++ .../Video/VideoGeneratorMetadataTests.cs | 29 + .../Video/VideoGeneratorTests.cs | 134 +++++ .../VideoGeneratorIntegrationTests.cs | 99 ++++ .../OpenAIVideoGeneratorIntegrationTests.cs | 14 + .../OpenAIVideoGeneratorTests.cs | 47 ++ .../Microsoft.Extensions.AI.Tests.csproj | 1 + .../ConfigureOptionsVideoGeneratorTests.cs | 72 +++ .../Video/LoggingVideoGeneratorTests.cs | 142 +++++ .../Video/OpenTelemetryVideoGeneratorTests.cs | 218 ++++++++ .../SingletonVideoGeneratorExtensions.cs | 11 + .../Video/VideoGeneratorBuilderTests.cs | 103 ++++ ...deoGeneratorDependencyInjectionPatterns.cs | 178 ++++++ 45 files changed, 4532 insertions(+) create mode 100644 samples/VideoGenerationPOC/Program.cs create mode 100644 samples/VideoGenerationPOC/VideoGenerationPOC.csproj create mode 100644 src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/DelegatingVideoGenerator.cs create mode 100644 src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/HostedVideoGenerationTool.cs create mode 100644 src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/IVideoGenerator.cs create mode 100644 src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGenerationOptions.cs create mode 100644 src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGenerationProgress.cs create mode 100644 src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGenerationRequest.cs create mode 100644 src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGenerationResponse.cs create mode 100644 src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGenerationToolCallContent.cs create mode 100644 src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGenerationToolResultContent.cs create mode 100644 src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGeneratorExtensions.cs create mode 100644 src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGeneratorMetadata.cs create mode 100644 src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIVideoGenerator.cs create mode 100644 src/Libraries/Microsoft.Extensions.AI/ChatCompletion/OpenTelemetryVideoGenerator.cs create mode 100644 src/Libraries/Microsoft.Extensions.AI/ChatCompletion/OpenTelemetryVideoGeneratorBuilderExtensions.cs create mode 100644 src/Libraries/Microsoft.Extensions.AI/ChatCompletion/VideoGeneratingChatClient.cs create mode 100644 src/Libraries/Microsoft.Extensions.AI/ChatCompletion/VideoGeneratingChatClientBuilderExtensions.cs create mode 100644 src/Libraries/Microsoft.Extensions.AI/Video/ConfigureOptionsVideoGenerator.cs create mode 100644 src/Libraries/Microsoft.Extensions.AI/Video/ConfigureOptionsVideoGeneratorBuilderExtensions.cs create mode 100644 src/Libraries/Microsoft.Extensions.AI/Video/LoggingVideoGenerator.cs create mode 100644 src/Libraries/Microsoft.Extensions.AI/Video/LoggingVideoGeneratorBuilderExtensions.cs create mode 100644 src/Libraries/Microsoft.Extensions.AI/Video/VideoGeneratorBuilder.cs create mode 100644 src/Libraries/Microsoft.Extensions.AI/Video/VideoGeneratorBuilderServiceCollectionExtensions.cs create mode 100644 src/Libraries/Microsoft.Extensions.AI/Video/VideoGeneratorBuilderVideoGeneratorExtensions.cs create mode 100644 test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/TestVideoGenerator.cs create mode 100644 test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Video/DelegatingVideoGeneratorTests.cs create mode 100644 test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Video/VideoGenerationOptionsTests.cs create mode 100644 test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Video/VideoGenerationResponseTests.cs create mode 100644 test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Video/VideoGeneratorExtensionsTests.cs create mode 100644 test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Video/VideoGeneratorMetadataTests.cs create mode 100644 test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Video/VideoGeneratorTests.cs create mode 100644 test/Libraries/Microsoft.Extensions.AI.Integration.Tests/VideoGeneratorIntegrationTests.cs create mode 100644 test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIVideoGeneratorIntegrationTests.cs create mode 100644 test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIVideoGeneratorTests.cs create mode 100644 test/Libraries/Microsoft.Extensions.AI.Tests/Video/ConfigureOptionsVideoGeneratorTests.cs create mode 100644 test/Libraries/Microsoft.Extensions.AI.Tests/Video/LoggingVideoGeneratorTests.cs create mode 100644 test/Libraries/Microsoft.Extensions.AI.Tests/Video/OpenTelemetryVideoGeneratorTests.cs create mode 100644 test/Libraries/Microsoft.Extensions.AI.Tests/Video/SingletonVideoGeneratorExtensions.cs create mode 100644 test/Libraries/Microsoft.Extensions.AI.Tests/Video/VideoGeneratorBuilderTests.cs create mode 100644 test/Libraries/Microsoft.Extensions.AI.Tests/Video/VideoGeneratorDependencyInjectionPatterns.cs diff --git a/samples/VideoGenerationPOC/Program.cs b/samples/VideoGenerationPOC/Program.cs new file mode 100644 index 00000000000..944cea59bde --- /dev/null +++ b/samples/VideoGenerationPOC/Program.cs @@ -0,0 +1,218 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +// Video Generation POC - Microsoft.Extensions.AI +// Usage: set OPENAI_API_KEY environment variable, then run: +// dotnet run -- "A cat playing piano" +// dotnet run -- "She turns and smiles" --input reference.jpg +// dotnet run -- "Change the sky to sunset" --edit video_abc123 +// dotnet run -- "Continue the scene" --extend video_abc123 +// dotnet run -- "A tracking shot of Mossy" --character char_abc123 + +using System.CommandLine; +using System.Drawing; +using System.Text.Json.Nodes; +using Microsoft.Extensions.AI; +using Microsoft.Extensions.Logging; +using OpenAI; + +var promptArg = new Argument("prompt", () => "A serene lake at sunset with gentle ripples", "Text prompt describing the video to generate."); +var modelOption = new Option("--model", () => "sora-2", "Model ID to use for video generation."); +var outputOption = new Option("--output", () => $"video_{DateTime.Now:yyyyMMdd_HHmmss}.mp4", "Output file path for the generated video."); +var inputOption = new Option("--input", "Input file(s) — images for image-to-video, or a video for editing.") { AllowMultipleArgumentsPerToken = true }; +var editOption = new Option("--edit", "Video ID of an existing generation to edit (POST /videos/edits)."); +var extendOption = new Option("--extend", "Video ID of a completed video to extend (POST /videos/extensions)."); +var characterOption = new Option("--character", "Character ID(s) to include in the generation.") { AllowMultipleArgumentsPerToken = true }; + +var rootCommand = new RootCommand("Video Generation POC — demonstrates Microsoft.Extensions.AI video generation with OpenAI.") +{ + promptArg, + modelOption, + outputOption, + inputOption, + editOption, + extendOption, + characterOption, +}; + +rootCommand.SetHandler(async (context) => +{ + string prompt = context.ParseResult.GetValueForArgument(promptArg); + string model = context.ParseResult.GetValueForOption(modelOption)!; + string outputPath = context.ParseResult.GetValueForOption(outputOption)!; + string[] inputPaths = context.ParseResult.GetValueForOption(inputOption) ?? []; + string? editVideoId = context.ParseResult.GetValueForOption(editOption); + string? extendVideoId = context.ParseResult.GetValueForOption(extendOption); + string[] characterIds = context.ParseResult.GetValueForOption(characterOption) ?? []; + + // --- API key --- + string? apiKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY"); + if (string.IsNullOrEmpty(apiKey)) + { + Console.Error.WriteLine("Error: Set the OPENAI_API_KEY environment variable."); + context.ExitCode = 1; + return; + } + + Console.WriteLine($"Prompt: {prompt}"); + Console.WriteLine($"Model: {model}"); + Console.WriteLine($"Output: {outputPath}"); + if (inputPaths.Length > 0) + { + Console.WriteLine($"Inputs: {string.Join(", ", inputPaths)}"); + } + + if (editVideoId is not null) + { + Console.WriteLine($"Edit: {editVideoId}"); + } + + if (extendVideoId is not null) + { + Console.WriteLine($"Extend: {extendVideoId}"); + } + + if (characterIds.Length > 0) + { + Console.WriteLine($"Characters: {string.Join(", ", characterIds)}"); + } + + Console.WriteLine(); + + // --- Create the video generator with middleware pipeline --- + using var loggerFactory = LoggerFactory.Create(builder => builder.AddConsole().SetMinimumLevel(LogLevel.Debug)); + + var openAIClient = new OpenAIClient(apiKey); + using IVideoGenerator generator = openAIClient + .GetVideoClient() + .AsIVideoGenerator(model) + .AsBuilder() + .UseLogging(loggerFactory) + .UseOpenTelemetry(loggerFactory) + .ConfigureOptions(options => + { + options.Count ??= 1; + options.Duration ??= TimeSpan.FromSeconds(12); + options.VideoSize ??= new Size(1280, 720); + }) + .Build(); + + // --- Show metadata --- + var metadata = generator.GetService(); + if (metadata is not null) + { + Console.WriteLine($"Provider: {metadata.ProviderName}"); + Console.WriteLine($"Endpoint: {metadata.ProviderUri}"); + Console.WriteLine($"Default Model: {metadata.DefaultModelId}"); + Console.WriteLine(); + } + + // --- Build request --- + List? originalMedia = null; + if (inputPaths.Length > 0) + { + originalMedia = []; + foreach (string inputPath in inputPaths) + { + if (!File.Exists(inputPath)) + { + Console.Error.WriteLine($"Error: Input file not found: {inputPath}"); + context.ExitCode = 1; + return; + } + + DataContent loaded = await DataContent.LoadFromAsync(inputPath); + originalMedia.Add(loaded); + Console.WriteLine($" Loaded input: {inputPath} ({loaded.MediaType}, {loaded.Data.Length} bytes)"); + } + + Console.WriteLine(); + } + + // --- Generate video --- + string mode = + extendVideoId is not null ? "Extending" : + editVideoId is not null ? "Editing (by video ID)" : + originalMedia?.Exists(c => c is DataContent dc && dc.HasTopLevelMediaType("video")) == true ? "Editing (uploaded video)" : + originalMedia is not null ? "Generating (image-to-video)" : + "Generating (text-to-video)"; + Console.WriteLine($"{mode}..."); + var stopwatch = System.Diagnostics.Stopwatch.StartNew(); + + var generateOptions = new VideoGenerationOptions + { + ResponseFormat = VideoGenerationResponseFormat.Data, + }; + + if (editVideoId is not null) + { + generateOptions.AdditionalProperties ??= []; + generateOptions.AdditionalProperties["edit_video_id"] = editVideoId; + } + + if (extendVideoId is not null) + { + generateOptions.AdditionalProperties ??= []; + generateOptions.AdditionalProperties["extend_video_id"] = extendVideoId; + } + + if (characterIds.Length > 0) + { + var chars = new JsonArray(); + foreach (string charId in characterIds) + { + chars.Add(new JsonObject { ["id"] = charId }); + } + + generateOptions.AdditionalProperties ??= []; + generateOptions.AdditionalProperties["characters"] = chars; + } + + var response = await generator.GenerateAsync( + new VideoGenerationRequest(prompt, originalMedia), + generateOptions, + new Progress(p => + Console.WriteLine($" Status: {p.Status}{(p.PercentComplete.HasValue ? $" ({p.PercentComplete}%)" : string.Empty)}"))); + + stopwatch.Stop(); + Console.WriteLine($"Completed in {stopwatch.Elapsed.TotalSeconds:F1}s"); + Console.WriteLine(); + + // --- Process response --- + if (response.Usage is { } usage) + { + Console.WriteLine($"Token Usage: input={usage.InputTokenCount}, output={usage.OutputTokenCount}, total={usage.TotalTokenCount}"); + } + + Console.WriteLine($"Generated {response.Contents.Count} content item(s):"); + for (int i = 0; i < response.Contents.Count; i++) + { + var content = response.Contents[i]; + switch (content) + { + case DataContent dc: + string filePath = response.Contents.Count == 1 + ? outputPath + : Path.Combine( + Path.GetDirectoryName(outputPath) ?? ".", + $"{Path.GetFileNameWithoutExtension(outputPath)}_{i}{Path.GetExtension(outputPath)}"); + + await dc.SaveToAsync(filePath); + Console.WriteLine($" [{i}] Saved {dc.Data.Length} bytes ({dc.MediaType}) -> {filePath}"); + break; + + case UriContent uc: + Console.WriteLine($" [{i}] URI: {uc.Uri} ({uc.MediaType})"); + break; + + default: + Console.WriteLine($" [{i}] {content.GetType().Name}: {content}"); + break; + } + } + + Console.WriteLine(); + Console.WriteLine("Done!"); +}); + +return await rootCommand.InvokeAsync(args); diff --git a/samples/VideoGenerationPOC/VideoGenerationPOC.csproj b/samples/VideoGenerationPOC/VideoGenerationPOC.csproj new file mode 100644 index 00000000000..2572c21a941 --- /dev/null +++ b/samples/VideoGenerationPOC/VideoGenerationPOC.csproj @@ -0,0 +1,18 @@ + + + + Exe + net10.0 + enable + enable + $(NoWarn);MEAI001;OPENAI001 + + + + + + + + + + diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/DelegatingVideoGenerator.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/DelegatingVideoGenerator.cs new file mode 100644 index 00000000000..7dbd8330cfc --- /dev/null +++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/DelegatingVideoGenerator.cs @@ -0,0 +1,70 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System; +using System.Diagnostics.CodeAnalysis; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Shared.DiagnosticIds; +using Microsoft.Shared.Diagnostics; + +namespace Microsoft.Extensions.AI; + +/// +/// Provides an optional base class for an that passes through calls to another instance. +/// +/// +/// This is recommended as a base type when building generators that can be chained in any order around an underlying . +/// The default implementation simply passes each call to the inner generator instance. +/// +[Experimental(DiagnosticIds.Experiments.AIVideoGeneration, UrlFormat = DiagnosticIds.UrlFormat)] +public class DelegatingVideoGenerator : IVideoGenerator +{ + /// + /// Initializes a new instance of the class. + /// + /// The wrapped generator instance. + /// is . + protected DelegatingVideoGenerator(IVideoGenerator innerGenerator) + { + InnerGenerator = Throw.IfNull(innerGenerator); + } + + /// + public void Dispose() + { + Dispose(disposing: true); + GC.SuppressFinalize(this); + } + + /// Gets the inner . + protected IVideoGenerator InnerGenerator { get; } + + /// + public virtual Task GenerateAsync( + VideoGenerationRequest request, VideoGenerationOptions? options = null, IProgress? progress = null, CancellationToken cancellationToken = default) + { + return InnerGenerator.GenerateAsync(request, options, progress, cancellationToken); + } + + /// + public virtual object? GetService(Type serviceType, object? serviceKey = null) + { + _ = Throw.IfNull(serviceType); + + // If the key is non-null, we don't know what it means so pass through to the inner service. + return + serviceKey is null && serviceType.IsInstanceOfType(this) ? this : + InnerGenerator.GetService(serviceType, serviceKey); + } + + /// Provides a mechanism for releasing unmanaged resources. + /// if being called from ; otherwise, . + protected virtual void Dispose(bool disposing) + { + if (disposing) + { + InnerGenerator.Dispose(); + } + } +} diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/HostedVideoGenerationTool.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/HostedVideoGenerationTool.cs new file mode 100644 index 00000000000..910814f5c1e --- /dev/null +++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/HostedVideoGenerationTool.cs @@ -0,0 +1,45 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using Microsoft.Shared.DiagnosticIds; + +namespace Microsoft.Extensions.AI; + +/// Represents a hosted tool that can be specified to an AI service to enable it to perform video generation. +/// +/// This tool does not itself implement video generation. It is a marker that can be used to inform a service +/// that the service is allowed to perform video generation if the service is capable of doing so. +/// +[Experimental(DiagnosticIds.Experiments.AIVideoGeneration, UrlFormat = DiagnosticIds.UrlFormat)] +public class HostedVideoGenerationTool : AITool +{ + /// Any additional properties associated with the tool. + private IReadOnlyDictionary? _additionalProperties; + + /// + /// Initializes a new instance of the class with the specified options. + /// + public HostedVideoGenerationTool() + { + } + + /// Initializes a new instance of the class. + /// Any additional properties associated with the tool. + public HostedVideoGenerationTool(IReadOnlyDictionary? additionalProperties) + { + _additionalProperties = additionalProperties; + } + + /// + public override string Name => "video_generation"; + + /// + public override IReadOnlyDictionary AdditionalProperties => _additionalProperties ?? base.AdditionalProperties; + + /// + /// Gets or sets the options used to configure video generation. + /// + public VideoGenerationOptions? Options { get; set; } +} diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/IVideoGenerator.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/IVideoGenerator.cs new file mode 100644 index 00000000000..a1b146316e7 --- /dev/null +++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/IVideoGenerator.cs @@ -0,0 +1,39 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System; +using System.Diagnostics.CodeAnalysis; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Shared.DiagnosticIds; + +namespace Microsoft.Extensions.AI; + +/// +/// Represents a generator of videos. +/// +[Experimental(DiagnosticIds.Experiments.AIVideoGeneration, UrlFormat = DiagnosticIds.UrlFormat)] +public interface IVideoGenerator : IDisposable +{ + /// + /// Sends a video generation request and returns the generated video as a . + /// + /// The video generation request containing the prompt and optional original videos for editing. + /// The video generation options to configure the request. + /// An optional to receive progress updates during the generation process. + /// The to monitor for cancellation requests. The default is . + /// is . + /// The videos generated by the . + Task GenerateAsync(VideoGenerationRequest request, VideoGenerationOptions? options = null, IProgress? progress = null, CancellationToken cancellationToken = default); + + /// Asks the for an object of the specified type . + /// The type of object being requested. + /// An optional key that can be used to help identify the target service. + /// The found object, otherwise . + /// is . + /// + /// The purpose of this method is to allow for the retrieval of strongly typed services that might be provided by the , + /// including itself or any services it might be wrapping. + /// + object? GetService(Type serviceType, object? serviceKey = null); +} diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGenerationOptions.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGenerationOptions.cs new file mode 100644 index 00000000000..0375c856b03 --- /dev/null +++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGenerationOptions.cs @@ -0,0 +1,136 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System; +using System.Diagnostics.CodeAnalysis; +using System.Drawing; +using System.Text.Json.Serialization; +using Microsoft.Shared.DiagnosticIds; + +namespace Microsoft.Extensions.AI; + +/// Represents the options for a video generation request. +[Experimental(DiagnosticIds.Experiments.AIVideoGeneration, UrlFormat = DiagnosticIds.UrlFormat)] +public class VideoGenerationOptions +{ + /// Initializes a new instance of the class. + public VideoGenerationOptions() + { + } + + /// Initializes a new instance of the class, performing a shallow copy of all properties from . + protected VideoGenerationOptions(VideoGenerationOptions? other) + { + if (other is null) + { + return; + } + + AdditionalProperties = other.AdditionalProperties?.Clone(); + Count = other.Count; + Duration = other.Duration; + FramesPerSecond = other.FramesPerSecond; + MediaType = other.MediaType; + ModelId = other.ModelId; + RawRepresentationFactory = other.RawRepresentationFactory; + ResponseFormat = other.ResponseFormat; + VideoSize = other.VideoSize; + } + + /// + /// Gets or sets the number of videos to generate. + /// + public int? Count { get; set; } + + /// + /// Gets or sets the desired duration for the generated video. + /// + /// + /// If a provider only supports fixed durations, the closest supported duration is used. + /// + public TimeSpan? Duration { get; set; } + + /// + /// Gets or sets the desired frames per second for the generated video. + /// + public int? FramesPerSecond { get; set; } + + /// + /// Gets or sets the media type (also known as MIME type) of the generated video. + /// + public string? MediaType { get; set; } + + /// + /// Gets or sets the model ID to use for video generation. + /// + public string? ModelId { get; set; } + + /// + /// Gets or sets a callback responsible for creating the raw representation of the video generation options from an underlying implementation. + /// + /// + /// The underlying implementation can have its own representation of options. + /// When is invoked with a , + /// that implementation can convert the provided options into its own representation in order to use it while performing + /// the operation. For situations where a consumer knows which concrete is being used + /// and how it represents options, a new instance of that implementation-specific options type can be returned by this + /// callback for the implementation to use instead of creating a new instance. + /// Such implementations might mutate the supplied options instance further based on other settings supplied on this + /// instance or from other inputs, therefore, it is strongly recommended to not + /// return shared instances and instead make the callback return a new instance on each call. + /// This is typically used to set an implementation-specific setting that isn't otherwise exposed from the strongly typed + /// properties on . + /// + [JsonIgnore] + public Func? RawRepresentationFactory { get; set; } + + /// + /// Gets or sets the response format of the generated video. + /// + public VideoGenerationResponseFormat? ResponseFormat { get; set; } + + /// + /// Gets or sets the size (resolution) of the generated video. + /// + /// + /// If a provider only supports fixed sizes, the closest supported size is used. + /// + public Size? VideoSize { get; set; } + + /// Gets or sets any additional properties associated with the options. + /// + /// This dictionary can be used to pass provider-specific settings that are not covered by + /// the strongly typed properties on this class. Refer to provider documentation for supported keys. + /// Unknown keys are typically forwarded as-is to the provider's API request body. + /// + public AdditionalPropertiesDictionary? AdditionalProperties { get; set; } + + /// Produces a clone of the current instance. + /// A clone of the current instance. + public virtual VideoGenerationOptions Clone() => new(this); +} + +/// +/// Represents the requested response format of the generated video. +/// +/// +/// Not all implementations support all response formats and this value might be ignored by the implementation if not supported. +/// +[Experimental(DiagnosticIds.Experiments.AIVideoGeneration, UrlFormat = DiagnosticIds.UrlFormat)] +public enum VideoGenerationResponseFormat +{ + /// + /// The generated video is returned as a URI pointing to the video resource. + /// + Uri, + + /// + /// The generated video is returned as in-memory video data. + /// + Data, + + /// + /// The generated video is returned as a hosted resource identifier, which can be used to retrieve the video later. + /// + Hosted, +} diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGenerationProgress.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGenerationProgress.cs new file mode 100644 index 00000000000..afdc405fc63 --- /dev/null +++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGenerationProgress.cs @@ -0,0 +1,64 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System; +using System.Diagnostics.CodeAnalysis; +using Microsoft.Shared.DiagnosticIds; + +namespace Microsoft.Extensions.AI; + +/// +/// Represents the progress of a video generation operation. +/// +[Experimental(DiagnosticIds.Experiments.AIVideoGeneration, UrlFormat = DiagnosticIds.UrlFormat)] +public readonly struct VideoGenerationProgress : IEquatable +{ + /// Initializes a new instance of the struct. + /// The current status of the video generation (e.g. "queued", "in_progress", "completed"). + /// The completion percentage, from 0 to 100, or if not available. + public VideoGenerationProgress(string? status, int? percentComplete) + { + Status = status; + PercentComplete = percentComplete; + } + + /// + /// Gets the current status of the video generation (e.g. "queued", "in_progress", "completed", "failed"). + /// + public string? Status { get; } + + /// + /// Gets the completion percentage, from 0 to 100, or if not available. + /// + public int? PercentComplete { get; } + + /// Determines whether two instances are equal. + public static bool operator ==(VideoGenerationProgress left, VideoGenerationProgress right) + { + return left.Equals(right); + } + + /// Determines whether two instances are not equal. + public static bool operator !=(VideoGenerationProgress left, VideoGenerationProgress right) + { + return !left.Equals(right); + } + + /// + public bool Equals(VideoGenerationProgress other) => + string.Equals(Status, other.Status, StringComparison.Ordinal) && PercentComplete == other.PercentComplete; + + /// + public override bool Equals(object? obj) => obj is VideoGenerationProgress other && Equals(other); + + /// + public override int GetHashCode() + { +#if NET + return HashCode.Combine(Status, PercentComplete); +#else + int hash = Status?.GetHashCode() ?? 0; + return (hash * 397) ^ PercentComplete.GetHashCode(); +#endif + } +} diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGenerationRequest.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGenerationRequest.cs new file mode 100644 index 00000000000..d5ae3f4ff02 --- /dev/null +++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGenerationRequest.cs @@ -0,0 +1,67 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using Microsoft.Shared.DiagnosticIds; + +namespace Microsoft.Extensions.AI; + +/// Represents a request for video generation. +[Experimental(DiagnosticIds.Experiments.AIVideoGeneration, UrlFormat = DiagnosticIds.UrlFormat)] +public class VideoGenerationRequest +{ + /// Initializes a new instance of the class. + public VideoGenerationRequest() + { + } + + /// Initializes a new instance of the class. + /// The prompt to guide the video generation. + public VideoGenerationRequest(string prompt) + { + Prompt = prompt; + } + + /// Initializes a new instance of the class. + /// The prompt to guide the video generation. + /// The original media (images or videos) to base edits on. + public VideoGenerationRequest(string prompt, IEnumerable? originalMedia) + { + Prompt = prompt; + OriginalMedia = originalMedia; + } + + /// Gets or sets the prompt to guide the video generation. + public string? Prompt { get; set; } + + /// + /// Gets or sets the original media (images or videos) to use as input for the video generation. + /// + /// + /// + /// The interpretation of this property depends on the content type of the media and the capabilities + /// of the underlying provider. Common behaviors include: + /// + /// + /// + /// Image content (e.g., image/png, image/jpeg): Used as a reference image to guide new video + /// generation. The provider creates a video inspired by or based on the image. Supported by most providers. + /// + /// + /// Video content (e.g., video/mp4): Used as a source video for editing or remixing. The provider + /// modifies the existing video according to the . Not all providers support video editing. + /// + /// + /// + /// If this property is or empty, the request is treated as a text-to-video generation + /// using only the . + /// + /// + /// Provider-specific scenarios such as video continuations or character consistency can be controlled + /// via . Refer to the provider documentation + /// for supported keys. + /// + /// + public IEnumerable? OriginalMedia { get; set; } +} diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGenerationResponse.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGenerationResponse.cs new file mode 100644 index 00000000000..471888ad4fb --- /dev/null +++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGenerationResponse.cs @@ -0,0 +1,53 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Text.Json.Serialization; +using Microsoft.Shared.DiagnosticIds; + +namespace Microsoft.Extensions.AI; + +/// Represents the result of a video generation request. +[Experimental(DiagnosticIds.Experiments.AIVideoGeneration, UrlFormat = DiagnosticIds.UrlFormat)] +public class VideoGenerationResponse +{ + /// Initializes a new instance of the class. + [JsonConstructor] + public VideoGenerationResponse() + { + } + + /// Initializes a new instance of the class. + /// The contents for this response. + public VideoGenerationResponse(IList? contents) + { + Contents = contents; + } + + /// Gets or sets the raw representation of the video generation response from an underlying implementation. + /// + /// If a is created to represent some underlying object from another object + /// model, this property can be used to store that original object. This can be useful for debugging or + /// for enabling a consumer to access the underlying object model if needed. + /// + [JsonIgnore] + public object? RawRepresentation { get; set; } + + /// + /// Gets or sets the generated content items. + /// + /// + /// Content is typically for videos streamed from the generator, or for remotely hosted videos, but + /// can also be provider-specific content types that represent the generated videos. + /// + [AllowNull] + public IList Contents + { + get => field ??= []; + set; + } + + /// Gets or sets usage details for the video generation response. + public UsageDetails? Usage { get; set; } +} diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGenerationToolCallContent.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGenerationToolCallContent.cs new file mode 100644 index 00000000000..36b0fdd4f04 --- /dev/null +++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGenerationToolCallContent.cs @@ -0,0 +1,23 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System.Diagnostics.CodeAnalysis; +using Microsoft.Shared.DiagnosticIds; + +namespace Microsoft.Extensions.AI; + +/// +/// Represents the invocation of a video generation tool call by a hosted service. +/// +[Experimental(DiagnosticIds.Experiments.AIVideoGeneration, UrlFormat = DiagnosticIds.UrlFormat)] +public sealed class VideoGenerationToolCallContent : ToolCallContent +{ + /// + /// Initializes a new instance of the class. + /// + /// The tool call ID. + public VideoGenerationToolCallContent(string callId) + : base(callId) + { + } +} diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGenerationToolResultContent.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGenerationToolResultContent.cs new file mode 100644 index 00000000000..3679d7de174 --- /dev/null +++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGenerationToolResultContent.cs @@ -0,0 +1,37 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using Microsoft.Shared.DiagnosticIds; + +namespace Microsoft.Extensions.AI; + +/// +/// Represents a video generation tool call invocation by a hosted service. +/// +/// +/// This content type represents when a hosted AI service invokes a video generation tool. +/// It is informational only and represents the call itself, not the result. +/// +[Experimental(DiagnosticIds.Experiments.AIVideoGeneration, UrlFormat = DiagnosticIds.UrlFormat)] +public sealed class VideoGenerationToolResultContent : ToolResultContent +{ + /// + /// Initializes a new instance of the class. + /// + /// The tool call ID. + public VideoGenerationToolResultContent(string callId) + : base(callId) + { + } + + /// + /// Gets or sets the generated content items. + /// + /// + /// Content is typically for videos streamed from the tool, or for remotely hosted videos, but + /// can also be provider-specific content types that represent the generated videos. + /// + public IList? Outputs { get; set; } +} diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGeneratorExtensions.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGeneratorExtensions.cs new file mode 100644 index 00000000000..bf310f75424 --- /dev/null +++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGeneratorExtensions.cs @@ -0,0 +1,205 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Net.Mime; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Shared.DiagnosticIds; +using Microsoft.Shared.Diagnostics; + +namespace Microsoft.Extensions.AI; + +/// Provides extension methods for . +[Experimental(DiagnosticIds.Experiments.AIVideoGeneration, UrlFormat = DiagnosticIds.UrlFormat)] +public static class VideoGeneratorExtensions +{ + /// Asks the for an object of type . + /// The type of the object to be retrieved. + /// The generator. + /// An optional key that can be used to help identify the target service. + /// The found object, otherwise . + /// is . + /// + /// The purpose of this method is to allow for the retrieval of strongly typed services that may be provided by the , + /// including itself or any services it might be wrapping. + /// + public static TService? GetService(this IVideoGenerator generator, object? serviceKey = null) + { + _ = Throw.IfNull(generator); + + return generator.GetService(typeof(TService), serviceKey) is TService service ? service : default; + } + + /// + /// Asks the for an object of the specified type + /// and throws an exception if one isn't available. + /// + /// The generator. + /// The type of object being requested. + /// An optional key that can be used to help identify the target service. + /// The found object. + /// is . + /// is . + /// No service of the requested type for the specified key is available. + /// + /// The purpose of this method is to allow for the retrieval of services that are required to be provided by the , + /// including itself or any services it might be wrapping. + /// + public static object GetRequiredService(this IVideoGenerator generator, Type serviceType, object? serviceKey = null) + { + _ = Throw.IfNull(generator); + _ = Throw.IfNull(serviceType); + + return + generator.GetService(serviceType, serviceKey) ?? + throw Throw.CreateMissingServiceException(serviceType, serviceKey); + } + + /// + /// Asks the for an object of type + /// and throws an exception if one isn't available. + /// + /// The type of the object to be retrieved. + /// The generator. + /// An optional key that can be used to help identify the target service. + /// The found object. + /// is . + /// No service of the requested type for the specified key is available. + /// + /// The purpose of this method is to allow for the retrieval of strongly typed services that are required to be provided by the , + /// including itself or any services it might be wrapping. + /// + public static TService GetRequiredService(this IVideoGenerator generator, object? serviceKey = null) + { + _ = Throw.IfNull(generator); + + if (generator.GetService(typeof(TService), serviceKey) is not TService service) + { + throw Throw.CreateMissingServiceException(typeof(TService), serviceKey); + } + + return service; + } + + /// + /// Generates videos based on a text prompt. + /// + /// The video generator. + /// The prompt to guide the video generation. + /// The video generation options to configure the request. + /// An optional to receive progress updates during the generation process. + /// The to monitor for cancellation requests. The default is . + /// or is . + /// The videos generated by the generator. + public static Task GenerateVideosAsync( + this IVideoGenerator generator, + string prompt, + VideoGenerationOptions? options = null, + IProgress? progress = null, + CancellationToken cancellationToken = default) + { + _ = Throw.IfNull(generator); + _ = Throw.IfNull(prompt); + + return generator.GenerateAsync(new VideoGenerationRequest(prompt), options, progress, cancellationToken); + } + + /// + /// Generates or edits videos using original media and a text prompt. + /// + /// The video generator. + /// The original media (images or videos) to use as input. + /// The prompt to guide the video generation or editing. + /// The video generation options to configure the request. + /// An optional to receive progress updates during the generation process. + /// The to monitor for cancellation requests. The default is . + /// , , or is . + /// The videos generated by the generator. + public static Task EditVideosAsync( + this IVideoGenerator generator, + IEnumerable originalMedia, + string prompt, + VideoGenerationOptions? options = null, + IProgress? progress = null, + CancellationToken cancellationToken = default) + { + _ = Throw.IfNull(generator); + _ = Throw.IfNull(originalMedia); + _ = Throw.IfNull(prompt); + + return generator.GenerateAsync(new VideoGenerationRequest(prompt, originalMedia), options, progress, cancellationToken); + } + + /// + /// Generates or edits a video using the original video and the specified prompt. + /// + /// The video generator. + /// The single video to use as input. + /// The prompt to guide the video generation or editing. + /// The video generation options to configure the request. + /// An optional to receive progress updates during the generation process. + /// The to monitor for cancellation requests. The default is . + /// , , or is . + /// The videos generated by the generator. + public static Task EditVideoAsync( + this IVideoGenerator generator, + DataContent originalVideo, + string prompt, + VideoGenerationOptions? options = null, + IProgress? progress = null, + CancellationToken cancellationToken = default) + { + _ = Throw.IfNull(generator); + _ = Throw.IfNull(originalVideo); + _ = Throw.IfNull(prompt); + + return generator.GenerateAsync(new VideoGenerationRequest(prompt, [originalVideo]), options, progress, cancellationToken); + } + + /// + /// Generates or edits a video using video data provided as a byte array and the specified prompt. + /// + /// The video generator. + /// The byte array containing the video data to use as input. + /// The filename for the video data. + /// The prompt to guide the video generation. + /// The video generation options to configure the request. + /// An optional to receive progress updates during the generation process. + /// The to monitor for cancellation requests. The default is . + /// + /// , , or is . + /// + /// The videos generated by the generator. + public static Task EditVideoAsync( + this IVideoGenerator generator, + ReadOnlyMemory originalVideoData, + string fileName, + string prompt, + VideoGenerationOptions? options = null, + IProgress? progress = null, + CancellationToken cancellationToken = default) + { + _ = Throw.IfNull(generator); + _ = Throw.IfNull(fileName); + _ = Throw.IfNull(prompt); + + // Infer media type from file extension + string mediaType = GetMediaTypeFromFileName(fileName); + + var dataContent = new DataContent(originalVideoData, mediaType) { Name = fileName }; + return generator.GenerateAsync(new VideoGenerationRequest(prompt, [dataContent]), options, progress, cancellationToken); + } + + /// + /// Gets the media type based on the file extension. + /// + /// The filename to extract the media type from. + /// The inferred media type. + private static string GetMediaTypeFromFileName(string fileName) + { + return MediaTypeMap.GetMediaType(fileName) ?? "video/mp4"; // Default to MP4 if unknown extension + } +} diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGeneratorMetadata.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGeneratorMetadata.cs new file mode 100644 index 00000000000..8dd966fff4d --- /dev/null +++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGeneratorMetadata.cs @@ -0,0 +1,44 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System; +using System.Diagnostics.CodeAnalysis; +using Microsoft.Shared.DiagnosticIds; + +namespace Microsoft.Extensions.AI; + +/// Provides metadata about an . +[Experimental(DiagnosticIds.Experiments.AIVideoGeneration, UrlFormat = DiagnosticIds.UrlFormat)] +public class VideoGeneratorMetadata +{ + /// Initializes a new instance of the class. + /// + /// The name of the video generation provider, if applicable. Where possible, this should map to the + /// appropriate name defined in the OpenTelemetry Semantic Conventions for Generative AI systems. + /// + /// The URL for accessing the video generation provider, if applicable. + /// The ID of the video generation model used by default, if applicable. + public VideoGeneratorMetadata(string? providerName = null, Uri? providerUri = null, string? defaultModelId = null) + { + DefaultModelId = defaultModelId; + ProviderName = providerName; + ProviderUri = providerUri; + } + + /// Gets the name of the video generation provider. + /// + /// Where possible, this maps to the appropriate name defined in the + /// OpenTelemetry Semantic Conventions for Generative AI systems. + /// + public string? ProviderName { get; } + + /// Gets the URL for accessing the video generation provider. + public Uri? ProviderUri { get; } + + /// Gets the ID of the default model used by this video generator. + /// + /// This value can be if no default model is set on the corresponding . + /// An individual request may override this value via . + /// + public string? DefaultModelId { get; } +} diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIClientExtensions.cs b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIClientExtensions.cs index 9a5ebd0d06a..28ab454ef73 100644 --- a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIClientExtensions.cs +++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIClientExtensions.cs @@ -21,6 +21,7 @@ using OpenAI.Files; using OpenAI.Images; using OpenAI.Responses; +using OpenAI.Videos; #pragma warning disable MEAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. #pragma warning disable SA1515 // Single-line comment should be preceded by blank line @@ -184,6 +185,60 @@ public static ITextToSpeechClient AsITextToSpeechClient(this AudioClient audioCl public static IImageGenerator AsIImageGenerator(this ImageClient imageClient) => new OpenAIImageGenerator(imageClient); + /// Gets an for use with this . + /// The client. + /// The model ID to use for video generation (e.g. "sora-2"). + /// An that can be used to generate videos via the . + /// is . + /// + /// + /// The returned supports the following scenarios based on the + /// request contents and keys: + /// + /// + /// + /// Text-to-video: When is + /// and no routing keys are set, generates a new video from the + /// text prompt via POST /videos. + /// + /// + /// Image-to-video: When + /// contains image content (e.g., image/png), uses the image as an + /// input_reference to guide new video creation via POST /videos. + /// A sends the image URL in the JSON body; + /// a uploads the image bytes via multipart/form-data. + /// + /// + /// Edit by video ID: Set edit_video_id in + /// to the ID of a previously + /// generated video. The request is routed to POST /videos/edits. + /// + /// + /// Edit by upload: When + /// contains video content (e.g., video/mp4), uploads the video for editing + /// via POST /videos/edits with multipart/form-data. + /// + /// + /// Extend: Set extend_video_id in + /// to the ID of a completed + /// video. The request is routed to POST /videos/extensions. + /// + /// + /// + /// Character IDs can be included in the create request by passing a characters key + /// in as a JSON array (e.g., + /// [{ "id": "char_abc123" }]). Characters are reusable visual assets created + /// separately via POST /videos/characters. + /// + /// + /// Any other keys in are forwarded + /// as-is to the OpenAI API request body. + /// + /// + [Experimental(DiagnosticIds.Experiments.AIVideoGeneration, UrlFormat = DiagnosticIds.UrlFormat)] + public static IVideoGenerator AsIVideoGenerator(this VideoClient videoClient, string? modelId = null) => + new OpenAIVideoGenerator(videoClient, modelId); + /// Gets an for use with this . /// The client. /// The number of dimensions to generate in each embedding. diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIVideoGenerator.cs b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIVideoGenerator.cs new file mode 100644 index 00000000000..9f279f3ec27 --- /dev/null +++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIVideoGenerator.cs @@ -0,0 +1,488 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System; +using System.ClientModel; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Drawing; +using System.IO; +using System.Linq; +using System.Text; +using System.Text.Json; +using System.Text.Json.Nodes; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Shared.DiagnosticIds; +using Microsoft.Shared.Diagnostics; +using OpenAI.Videos; + +namespace Microsoft.Extensions.AI; + +/// Represents an for an OpenAI . +/// +/// +/// This implementation uses the OpenAI video generation API. Video generation is asynchronous: +/// a generation job is created, polled for completion, and then the video content is downloaded. +/// +/// The operation chosen depends on the request contents and options: +/// +/// +/// Text-to-video: When is +/// and no routing keys are set, creates a new video from the +/// text prompt via POST /videos. +/// +/// +/// Image-to-video: When +/// contains image content (e.g., image/png), uses the image as an +/// input_reference to guide new video creation via POST /videos. +/// A sends the image URL in JSON; a +/// uploads the image bytes via multipart/form-data. +/// +/// +/// Edit by video ID: When edit_video_id is set in +/// , edits the specified +/// video via POST /videos/edits. +/// +/// +/// Edit by upload: When +/// contains video content (e.g., video/mp4), uploads the video for editing +/// via POST /videos/edits with multipart/form-data. +/// +/// +/// Extend: When extend_video_id is set in +/// , extends the completed +/// video via POST /videos/extensions. +/// +/// +/// +/// Character IDs can be included in the create request by passing a characters +/// key in as a JSON array. +/// Characters are reusable visual assets created separately via +/// POST /videos/characters. +/// +/// +[Experimental(DiagnosticIds.Experiments.AIOpenAIVideoClient)] +internal sealed class OpenAIVideoGenerator : IVideoGenerator +{ + /// Default polling interval for checking video generation status. + private static readonly TimeSpan _defaultPollingInterval = TimeSpan.FromSeconds(10); + + /// + /// Well-known key that routes the + /// request to POST /videos/edits. The value should be the video ID to edit. + /// + internal const string EditVideoIdKey = "edit_video_id"; + + /// + /// Well-known key that routes the + /// request to POST /videos/extensions. The value should be the completed video ID to extend. + /// + internal const string ExtendVideoIdKey = "extend_video_id"; + + /// Metadata about the client. + private readonly VideoGeneratorMetadata _metadata; + + /// The underlying . + private readonly VideoClient _videoClient; + + /// The model to use for video generation. + private readonly string? _defaultModelId; + + /// Initializes a new instance of the class for the specified . + /// The underlying client. + /// The model ID to use for video generation (e.g. "sora-2"). + /// is . + public OpenAIVideoGenerator(VideoClient videoClient, string? modelId = null) + { + _videoClient = Throw.IfNull(videoClient); + _defaultModelId = modelId; + + _metadata = new("openai", videoClient.Endpoint, modelId); + } + + /// + public async Task GenerateAsync( + VideoGenerationRequest request, + VideoGenerationOptions? options = null, + IProgress? progress = null, + CancellationToken cancellationToken = default) + { + _ = Throw.IfNull(request); + + string? prompt = request.Prompt; + _ = Throw.IfNull(prompt); + + string modelId = options?.ModelId ?? _defaultModelId ?? "sora-2"; + + // Check for routing keys in AdditionalProperties + string? editVideoId = GetStringAdditionalProperty(options, EditVideoIdKey); + string? extendVideoId = GetStringAdditionalProperty(options, ExtendVideoIdKey); + + // Determine OriginalMedia type (only when no routing keys override the operation) + DataContent? videoEditContent = null; + DataContent? imageReferenceData = null; + UriContent? imageReferenceUri = null; + + if (editVideoId is null && extendVideoId is null && + request.OriginalMedia is { } originalMedia) + { + AIContent? firstMedia = originalMedia.FirstOrDefault(); + if (firstMedia is DataContent dc && + IsVideoMediaType(dc.MediaType) && dc.Data.Length > 0) + { + videoEditContent = dc; + } + else if (firstMedia is UriContent uc) + { + imageReferenceUri = uc; + } + else if (firstMedia is DataContent imgDc && imgDc.Data.Length > 0) + { + imageReferenceData = imgDc; + } + } + + // Route to the appropriate endpoint and create the video generation job + RequestOptions reqOpts = new() { CancellationToken = cancellationToken }; + ClientResult createResult; + + if (extendVideoId is not null) + { + // POST /videos/extensions — extend a completed video + JsonObject body = new() + { + ["prompt"] = prompt, + ["video"] = new JsonObject { ["id"] = extendVideoId }, + }; + + if (options?.Duration is TimeSpan extDuration) + { +#pragma warning disable LA0002 + body["seconds"] = ((int)extDuration.TotalSeconds) + .ToString(System.Globalization.CultureInfo.InvariantCulture); +#pragma warning restore LA0002 + } + + ForwardAdditionalProperties(body, options); + using BinaryContent extendContent = BinaryContent.Create( + new BinaryData(body.ToJsonString())); + using PipelineMessage extendMsg = CreatePipelineRequest( + _videoClient, "/videos/extensions", extendContent, + "application/json", reqOpts); + await _videoClient.Pipeline.SendAsync(extendMsg).ConfigureAwait(false); + createResult = ClientResult.FromResponse(extendMsg.Response!); + } + else if (editVideoId is not null) + { + // POST /videos/edits — edit an existing video by ID + JsonObject body = new() + { + ["prompt"] = prompt, + ["video"] = new JsonObject { ["id"] = editVideoId }, + }; + + ForwardAdditionalProperties(body, options); + using BinaryContent editContent = BinaryContent.Create( + new BinaryData(body.ToJsonString())); + using PipelineMessage editMsg = CreatePipelineRequest( + _videoClient, "/videos/edits", editContent, + "application/json", reqOpts); + await _videoClient.Pipeline.SendAsync(editMsg).ConfigureAwait(false); + createResult = ClientResult.FromResponse(editMsg.Response!); + } + else if (videoEditContent is not null) + { + // POST /videos/edits — edit an uploaded video via multipart + JsonObject formFields = new() + { + ["prompt"] = prompt, + ["model"] = modelId, + }; + + ForwardAdditionalProperties(formFields, options); + using BinaryContent editMultipart = BuildMultipartContent( + formFields, videoEditContent, "video", + out string editContentType); + using PipelineMessage editUploadMsg = CreatePipelineRequest( + _videoClient, "/videos/edits", editMultipart, + editContentType, reqOpts); + await _videoClient.Pipeline.SendAsync(editUploadMsg).ConfigureAwait(false); + createResult = ClientResult.FromResponse(editUploadMsg.Response!); + } + else + { + // POST /videos — text-to-video or image-to-video + JsonObject requestBody = new() + { + ["model"] = modelId, + ["prompt"] = prompt, + }; + + if (options?.VideoSize is Size size) + { + requestBody["size"] = $"{size.Width}x{size.Height}"; + } + + if (options?.Duration is TimeSpan duration) + { +#pragma warning disable LA0002 + requestBody["seconds"] = ((int)duration.TotalSeconds) + .ToString(System.Globalization.CultureInfo.InvariantCulture); +#pragma warning restore LA0002 + } + + if (options?.Count is int count && count > 1) + { + requestBody["n"] = count; + } + + ForwardAdditionalProperties(requestBody, options); + + if (imageReferenceUri is not null) + { + requestBody["input_reference"] = new JsonObject + { + ["image_url"] = imageReferenceUri.Uri.ToString(), + }; + } + + if (imageReferenceData is not null) + { + using BinaryContent multipartContent = BuildMultipartContent( + requestBody, imageReferenceData, "input_reference", + out string multipartContentType); + createResult = await _videoClient.CreateVideoAsync( + multipartContent, multipartContentType, + reqOpts).ConfigureAwait(false); + } + else + { + using BinaryContent content = BinaryContent.Create( + new BinaryData(requestBody.ToJsonString())); + createResult = await _videoClient.CreateVideoAsync( + content, "application/json", reqOpts).ConfigureAwait(false); + } + } + + // Parse the creation response to get the video ID and status + using JsonDocument createDoc = JsonDocument.Parse( + createResult.GetRawResponse().Content); + string videoId = createDoc.RootElement.GetProperty("id").GetString()!; + string status = createDoc.RootElement.GetProperty("status").GetString()!; + int? progressPercent = TryGetProgress(createDoc.RootElement); + + progress?.Report(new VideoGenerationProgress(status, progressPercent)); + + // Poll until the video generation is complete + string? errorMessage = null; + while (!IsTerminalStatus(status)) + { + await Task.Delay( + _defaultPollingInterval, cancellationToken).ConfigureAwait(false); + + var pollOpts = new RequestOptions { CancellationToken = cancellationToken }; + ClientResult getResult = await _videoClient.GetVideoAsync( + videoId, pollOpts).ConfigureAwait(false); + using JsonDocument statusDoc = JsonDocument.Parse( + getResult.GetRawResponse().Content); + status = statusDoc.RootElement.GetProperty("status").GetString()!; + progressPercent = TryGetProgress(statusDoc.RootElement); + + progress?.Report(new VideoGenerationProgress(status, progressPercent)); + + if (string.Equals(status, "failed", StringComparison.OrdinalIgnoreCase) && + statusDoc.RootElement.TryGetProperty("error", out JsonElement errorEl) && + errorEl.TryGetProperty("message", out JsonElement msgEl)) + { + errorMessage = msgEl.GetString(); + } + } + + if (string.Equals(status, "failed", StringComparison.OrdinalIgnoreCase)) + { + throw new InvalidOperationException( + errorMessage ?? "Video generation failed."); + } + + // Download the completed video content + var dlOpts = new RequestOptions { CancellationToken = cancellationToken }; + ClientResult downloadResult = await _videoClient.DownloadVideoAsync( + videoId, options: dlOpts).ConfigureAwait(false); + BinaryData videoData = downloadResult.GetRawResponse().Content; + + string contentType = options?.MediaType ?? "video/mp4"; + List contents = + [new DataContent(videoData.ToMemory(), contentType)]; + + return new VideoGenerationResponse(contents); + } + + /// + public object? GetService(Type serviceType, object? serviceKey = null) => + serviceType is null ? throw new ArgumentNullException(nameof(serviceType)) : + serviceKey is not null ? null : + serviceType == typeof(VideoGeneratorMetadata) ? _metadata : + serviceType == typeof(VideoClient) ? _videoClient : + serviceType.IsInstanceOfType(this) ? this : + null; + + /// + void IDisposable.Dispose() + { + // Nothing to dispose. Implementation required for the IVideoGenerator interface. + } + + /// Creates a for a POST request to a path not yet exposed by the SDK. + private static PipelineMessage CreatePipelineRequest( + VideoClient videoClient, string path, BinaryContent content, + string contentType, RequestOptions options) + { + string baseUrl = videoClient.Endpoint.ToString().TrimEnd('/'); + Uri uri = new($"{baseUrl}{path}"); + PipelineMessageClassifier classifier = PipelineMessageClassifier.Create( + stackalloc ushort[] { 200 }); + PipelineMessage message = videoClient.Pipeline.CreateMessage( + uri, "POST", classifier); + message.Request.Headers.Set("Content-Type", contentType); + message.Request.Headers.Set("Accept", "application/json"); + message.Request.Content = content; + message.Apply(options); + return message; + } + + /// Returns the string value of an additional property, or if not present. + private static string? GetStringAdditionalProperty(VideoGenerationOptions? options, string key) => + options?.AdditionalProperties is { } props && + props.TryGetValue(key, out object? value) && + value is string s ? s : null; + + /// Determines whether the given media type represents a video format. + private static bool IsVideoMediaType(string? mediaType) => + mediaType is not null && + mediaType.StartsWith("video/", StringComparison.OrdinalIgnoreCase); + + /// Determines whether the given key is a routing key consumed by this generator. + private static bool IsRoutingKey(string key) => + string.Equals(key, EditVideoIdKey, StringComparison.OrdinalIgnoreCase) || + string.Equals(key, ExtendVideoIdKey, StringComparison.OrdinalIgnoreCase); + + /// Forwards additional properties to the JSON body, skipping routing keys. + private static void ForwardAdditionalProperties(JsonObject body, VideoGenerationOptions? options) + { + if (options?.AdditionalProperties is not { } props) + { + return; + } + + foreach (KeyValuePair prop in props) + { + if (!IsRoutingKey(prop.Key)) + { + body[prop.Key] = ToJsonNode(prop.Value); + } + } + } + + /// Determines whether the given status indicates the video generation job has finished. + private static bool IsTerminalStatus(string status) => + string.Equals(status, "completed", StringComparison.OrdinalIgnoreCase) || + string.Equals(status, "failed", StringComparison.OrdinalIgnoreCase) || + string.Equals(status, "expired", StringComparison.OrdinalIgnoreCase); + + /// Tries to read the integer progress field from a video job JSON element. + private static int? TryGetProgress(JsonElement element) => + element.TryGetProperty("progress", out JsonElement el) && + el.TryGetInt32(out int val) + ? val + : null; + + /// Builds a multipart/form-data body containing the form fields and a file part. + private static BinaryContent BuildMultipartContent( + JsonObject formFields, + DataContent fileContent, + string filePartName, + out string contentType) + { + string boundary = $"----MEAI{Guid.NewGuid():N}"; + contentType = $"multipart/form-data; boundary={boundary}"; + + using var ms = new MemoryStream(); + + foreach (KeyValuePair prop in formFields) + { + if (prop.Value is null) + { + continue; + } + + WriteFormField(ms, boundary, prop.Key, prop.Value.ToString()); + } + + string fileName = fileContent.Name ?? filePartName; + string mediaType = + fileContent.MediaType ?? "application/octet-stream"; + WriteFilePart(ms, boundary, filePartName, fileName, mediaType, fileContent.Data); + + WriteString(ms, $"--{boundary}--\r\n"); + + return BinaryContent.Create(new BinaryData(ms.ToArray())); + } + + /// Writes a simple text form field to a multipart stream. + private static void WriteFormField( + MemoryStream ms, string boundary, string name, string value) + { + string header = + $"--{boundary}\r\n" + + $"Content-Disposition: form-data; name=\"{name}\"\r\n\r\n" + + $"{value}\r\n"; + WriteString(ms, header); + } + + /// Writes a file part to a multipart stream. + private static void WriteFilePart( + MemoryStream ms, + string boundary, + string name, + string fileName, + string mediaType, + ReadOnlyMemory data) + { + string header = + $"--{boundary}\r\n" + + $"Content-Disposition: form-data; name=\"{name}\"; " + + $"filename=\"{fileName}\"\r\n" + + $"Content-Type: {mediaType}\r\n\r\n"; + WriteString(ms, header); +#if NET + ms.Write(data.Span); +#else + byte[] bytes = data.ToArray(); + ms.Write(bytes, 0, bytes.Length); +#endif + WriteString(ms, "\r\n"); + } + + /// Writes a UTF-8 string to a stream. + private static void WriteString(MemoryStream ms, string value) + { + byte[] bytes = Encoding.UTF8.GetBytes(value); + ms.Write(bytes, 0, bytes.Length); + } + + /// Converts an value to a in an AOT-safe manner. + private static JsonNode? ToJsonNode(object? value) => value switch + { + null => null, + JsonNode node => node, + string s => JsonValue.Create(s), + bool b => JsonValue.Create(b), + int i => JsonValue.Create(i), + long l => JsonValue.Create(l), + float f => JsonValue.Create(f), + double d => JsonValue.Create(d), + _ => JsonValue.Create(value.ToString()!), + }; +} diff --git a/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/OpenTelemetryVideoGenerator.cs b/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/OpenTelemetryVideoGenerator.cs new file mode 100644 index 00000000000..aa5ae461d1b --- /dev/null +++ b/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/OpenTelemetryVideoGenerator.cs @@ -0,0 +1,324 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Diagnostics.CodeAnalysis; +using System.Diagnostics.Metrics; +using System.Drawing; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.Shared.DiagnosticIds; +using Microsoft.Shared.Diagnostics; + +#pragma warning disable SA1111 // Closing parenthesis should be on line of last parameter +#pragma warning disable SA1113 // Comma should be on the same line as previous parameter + +namespace Microsoft.Extensions.AI; + +/// Represents a delegating video generator that implements the OpenTelemetry Semantic Conventions for Generative AI systems. +/// +/// This class provides an implementation of the Semantic Conventions for Generative AI systems v1.40, defined at . +/// The specification is still experimental and subject to change; as such, the telemetry output by this client is also subject to change. +/// +[Experimental(DiagnosticIds.Experiments.AIVideoGeneration, UrlFormat = DiagnosticIds.UrlFormat)] +public sealed class OpenTelemetryVideoGenerator : DelegatingVideoGenerator +{ + private readonly ActivitySource _activitySource; + private readonly Meter _meter; + + private readonly Histogram _tokenUsageHistogram; + private readonly Histogram _operationDurationHistogram; + + private readonly string? _defaultModelId; + private readonly string? _providerName; + private readonly string? _serverAddress; + private readonly int _serverPort; + + private readonly ILogger? _logger; + + /// Initializes a new instance of the class. + /// The underlying . + /// The to use for emitting any logging data from the client. + /// An optional source name that will be used on the telemetry data. + public OpenTelemetryVideoGenerator(IVideoGenerator innerGenerator, ILogger? logger = null, string? sourceName = null) + : base(innerGenerator) + { + Debug.Assert(innerGenerator is not null, "Should have been validated by the base ctor"); + + _logger = logger; + + if (innerGenerator!.GetService() is VideoGeneratorMetadata metadata) + { + _defaultModelId = metadata.DefaultModelId; + _providerName = metadata.ProviderName; + _serverAddress = metadata.ProviderUri?.Host; + _serverPort = metadata.ProviderUri?.Port ?? 0; + } + + string name = string.IsNullOrEmpty(sourceName) ? OpenTelemetryConsts.DefaultSourceName : sourceName!; + _activitySource = new(name); + _meter = new(name); + + _tokenUsageHistogram = _meter.CreateHistogram( + OpenTelemetryConsts.GenAI.Client.TokenUsage.Name, + OpenTelemetryConsts.TokensUnit, + OpenTelemetryConsts.GenAI.Client.TokenUsage.Description, + advice: new() { HistogramBucketBoundaries = OpenTelemetryConsts.GenAI.Client.TokenUsage.ExplicitBucketBoundaries } + ); + + _operationDurationHistogram = _meter.CreateHistogram( + OpenTelemetryConsts.GenAI.Client.OperationDuration.Name, + OpenTelemetryConsts.SecondsUnit, + OpenTelemetryConsts.GenAI.Client.OperationDuration.Description, + advice: new() { HistogramBucketBoundaries = OpenTelemetryConsts.GenAI.Client.OperationDuration.ExplicitBucketBoundaries } + ); + } + + /// + protected override void Dispose(bool disposing) + { + if (disposing) + { + _activitySource.Dispose(); + _meter.Dispose(); + } + + base.Dispose(disposing); + } + + /// + /// Gets or sets a value indicating whether potentially sensitive information should be included in telemetry. + /// + /// + /// if potentially sensitive information should be included in telemetry; + /// if telemetry shouldn't include raw inputs and outputs. + /// The default value is , unless the OTEL_INSTRUMENTATION_GENAI_CAPTURE_MESSAGE_CONTENT + /// environment variable is set to "true" (case-insensitive). + /// + /// + /// By default, telemetry includes metadata, such as token counts, but not raw inputs + /// and outputs, such as message content, function call arguments, and function call results. + /// The default value can be overridden by setting the OTEL_INSTRUMENTATION_GENAI_CAPTURE_MESSAGE_CONTENT + /// environment variable to "true". Explicitly setting this property will override the environment variable. + /// + public bool EnableSensitiveData { get; set; } = TelemetryHelpers.EnableSensitiveDataDefault; + + /// + public override object? GetService(Type serviceType, object? serviceKey = null) => + serviceType == typeof(ActivitySource) ? _activitySource : + base.GetService(serviceType, serviceKey); + + /// + public async override Task GenerateAsync( + VideoGenerationRequest request, VideoGenerationOptions? options = null, IProgress? progress = null, CancellationToken cancellationToken = default) + { + _ = Throw.IfNull(request); + + using Activity? activity = CreateAndConfigureActivity(request, options); + Stopwatch? stopwatch = _operationDurationHistogram.Enabled ? Stopwatch.StartNew() : null; + string? requestModelId = options?.ModelId ?? _defaultModelId; + + VideoGenerationResponse? response = null; + Exception? error = null; + try + { + response = await base.GenerateAsync(request, options, progress, cancellationToken).ConfigureAwait(false); + return response; + } + catch (Exception ex) + { + error = ex; + throw; + } + finally + { + TraceResponse(activity, requestModelId, response, error, stopwatch); + } + } + + /// Creates an activity for a video generation request, or returns if not enabled. + private Activity? CreateAndConfigureActivity(VideoGenerationRequest request, VideoGenerationOptions? options) + { + Activity? activity = null; + if (_activitySource.HasListeners()) + { + string? modelId = options?.ModelId ?? _defaultModelId; + + activity = _activitySource.StartActivity( + string.IsNullOrWhiteSpace(modelId) ? OpenTelemetryConsts.GenAI.GenerateContentName : $"{OpenTelemetryConsts.GenAI.GenerateContentName} {modelId}", + ActivityKind.Client); + + if (activity is { IsAllDataRequested: true }) + { + _ = activity + .AddTag(OpenTelemetryConsts.GenAI.Operation.Name, OpenTelemetryConsts.GenAI.GenerateContentName) + .AddTag(OpenTelemetryConsts.GenAI.Output.Type, OpenTelemetryConsts.TypeVideo) + .AddTag(OpenTelemetryConsts.GenAI.Request.Model, modelId) + .AddTag(OpenTelemetryConsts.GenAI.Provider.Name, _providerName); + + if (_serverAddress is not null) + { + _ = activity + .AddTag(OpenTelemetryConsts.Server.Address, _serverAddress) + .AddTag(OpenTelemetryConsts.Server.Port, _serverPort); + } + + if (options is not null) + { + if (options.Count is int count) + { + _ = activity.AddTag(OpenTelemetryConsts.GenAI.Request.ChoiceCount, count); + } + + if (options.VideoSize is Size size) + { + _ = activity + .AddTag("gen_ai.request.video.width", size.Width) + .AddTag("gen_ai.request.video.height", size.Height); + } + + if (options.Duration is TimeSpan duration) + { + _ = activity.AddTag("gen_ai.request.video.duration", duration.TotalSeconds); + } + + if (options.FramesPerSecond is int fps) + { + _ = activity.AddTag("gen_ai.request.video.fps", fps); + } + } + + if (EnableSensitiveData) + { + List content = []; + + if (request.Prompt is not null) + { + content.Add(new TextContent(request.Prompt)); + } + + if (request.OriginalMedia is not null) + { + content.AddRange(request.OriginalMedia); + } + + _ = activity.AddTag( + OpenTelemetryConsts.GenAI.Input.Messages, + OpenTelemetryChatClient.SerializeChatMessages([new(ChatRole.User, content)])); + + if (options?.AdditionalProperties is { } props) + { + foreach (KeyValuePair prop in props) + { + _ = activity.AddTag(prop.Key, prop.Value); + } + } + } + } + } + + return activity; + } + + /// Adds video generation response information to the activity. + private void TraceResponse( + Activity? activity, + string? requestModelId, + VideoGenerationResponse? response, + Exception? error, + Stopwatch? stopwatch) + { + if (_operationDurationHistogram.Enabled && stopwatch is not null) + { + TagList tags = default; + + AddMetricTags(ref tags, requestModelId); + if (error is not null) + { + tags.Add(OpenTelemetryConsts.Error.Type, error.GetType().FullName); + } + + _operationDurationHistogram.Record(stopwatch.Elapsed.TotalSeconds, tags); + } + + if (error is not null) + { + _ = activity? + .AddTag(OpenTelemetryConsts.Error.Type, error.GetType().FullName) + .SetStatus(ActivityStatusCode.Error, error.Message); + + if (_logger is not null) + { + OpenTelemetryLog.OperationException(_logger, error); + } + } + + if (response is not null) + { + if (EnableSensitiveData && + response.Contents is { Count: > 0 } contents && + activity is { IsAllDataRequested: true }) + { + _ = activity.AddTag( + OpenTelemetryConsts.GenAI.Output.Messages, + OpenTelemetryChatClient.SerializeChatMessages([new(ChatRole.Assistant, contents)])); + } + + if (response.Usage is { } usage) + { + if (_tokenUsageHistogram.Enabled) + { + if (usage.InputTokenCount is long inputTokens) + { + TagList tags = default; + tags.Add(OpenTelemetryConsts.GenAI.Token.Type, OpenTelemetryConsts.TokenTypeInput); + AddMetricTags(ref tags, requestModelId); + _tokenUsageHistogram.Record((int)inputTokens, tags); + } + + if (usage.OutputTokenCount is long outputTokens) + { + TagList tags = default; + tags.Add(OpenTelemetryConsts.GenAI.Token.Type, OpenTelemetryConsts.TokenTypeOutput); + AddMetricTags(ref tags, requestModelId); + _tokenUsageHistogram.Record((int)outputTokens, tags); + } + } + + if (activity is { IsAllDataRequested: true }) + { + if (usage.InputTokenCount is long inputTokens) + { + _ = activity.AddTag(OpenTelemetryConsts.GenAI.Usage.InputTokens, (int)inputTokens); + } + + if (usage.OutputTokenCount is long outputTokens) + { + _ = activity.AddTag(OpenTelemetryConsts.GenAI.Usage.OutputTokens, (int)outputTokens); + } + } + } + } + + void AddMetricTags(ref TagList tags, string? requestModelId) + { + tags.Add(OpenTelemetryConsts.GenAI.Operation.Name, OpenTelemetryConsts.GenAI.GenerateContentName); + + if (requestModelId is not null) + { + tags.Add(OpenTelemetryConsts.GenAI.Request.Model, requestModelId); + } + + tags.Add(OpenTelemetryConsts.GenAI.Provider.Name, _providerName); + + if (_serverAddress is string endpointAddress) + { + tags.Add(OpenTelemetryConsts.Server.Address, endpointAddress); + tags.Add(OpenTelemetryConsts.Server.Port, _serverPort); + } + } + } +} diff --git a/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/OpenTelemetryVideoGeneratorBuilderExtensions.cs b/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/OpenTelemetryVideoGeneratorBuilderExtensions.cs new file mode 100644 index 00000000000..402a89e7841 --- /dev/null +++ b/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/OpenTelemetryVideoGeneratorBuilderExtensions.cs @@ -0,0 +1,43 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System; +using System.Diagnostics.CodeAnalysis; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.Shared.DiagnosticIds; +using Microsoft.Shared.Diagnostics; + +namespace Microsoft.Extensions.AI; + +/// Provides extensions for configuring instances. +[Experimental(DiagnosticIds.Experiments.AIVideoGeneration, UrlFormat = DiagnosticIds.UrlFormat)] +public static class OpenTelemetryVideoGeneratorBuilderExtensions +{ + /// + /// Adds OpenTelemetry support to the video generator pipeline, following the OpenTelemetry Semantic Conventions for Generative AI systems. + /// + /// + /// The draft specification this follows is available at . + /// The specification is still experimental and subject to change; as such, the telemetry output by this client is also subject to change. + /// + /// The . + /// An optional to use to create a logger for logging events. + /// An optional source name that will be used on the telemetry data. + /// An optional callback that can be used to configure the instance. + /// The . + public static VideoGeneratorBuilder UseOpenTelemetry( + this VideoGeneratorBuilder builder, + ILoggerFactory? loggerFactory = null, + string? sourceName = null, + Action? configure = null) => + Throw.IfNull(builder).Use((innerGenerator, services) => + { + loggerFactory ??= services.GetService(); + + var g = new OpenTelemetryVideoGenerator(innerGenerator, loggerFactory?.CreateLogger(typeof(OpenTelemetryVideoGenerator)), sourceName); + configure?.Invoke(g); + + return g; + }); +} diff --git a/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/VideoGeneratingChatClient.cs b/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/VideoGeneratingChatClient.cs new file mode 100644 index 00000000000..631307582a2 --- /dev/null +++ b/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/VideoGeneratingChatClient.cs @@ -0,0 +1,506 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Diagnostics.CodeAnalysis; +using System.Linq; +using System.Runtime.CompilerServices; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Shared.DiagnosticIds; +using Microsoft.Shared.Diagnostics; + +namespace Microsoft.Extensions.AI; + +/// A delegating chat client that enables video generation capabilities by converting instances to function tools. +/// +/// +/// The provided implementation of is thread-safe for concurrent use so long as the +/// employed is also thread-safe for concurrent use. +/// +/// +/// This client automatically detects instances in the collection +/// and replaces them with equivalent function tools that the chat client can invoke to perform video generation and editing operations. +/// +/// +[Experimental(DiagnosticIds.Experiments.AIVideoGeneration, UrlFormat = DiagnosticIds.UrlFormat)] +public sealed class VideoGeneratingChatClient : DelegatingChatClient +{ + /// + /// Specifies how video and other data content is handled when passing data to an inner client. + /// + /// + /// Use this enumeration to control whether videos in the data content are passed as-is, replaced + /// with unique identifiers, or only generated videos are replaced. This setting affects how downstream clients + /// receive and process video data. + /// Reducing what's passed downstream can help manage the context window. + /// + public enum DataContentHandling + { + /// Pass all DataContent to inner client. + None, + + /// Replace all videos with unique identifiers when passing to inner client. + AllVideos, + + /// Replace only videos that were produced by past video generation requests with unique identifiers when passing to inner client. + GeneratedVideos + } + + private const string VideoKey = "meai_video"; + + private readonly IVideoGenerator _videoGenerator; + private readonly DataContentHandling _dataContentHandling; + + /// Initializes a new instance of the class. + /// The underlying . + /// An instance that will be used for video generation operations. + /// Specifies how to handle instances when passing messages to the inner client. + /// The default is . + /// or is . + public VideoGeneratingChatClient(IChatClient innerClient, IVideoGenerator videoGenerator, DataContentHandling dataContentHandling = DataContentHandling.AllVideos) + : base(innerClient) + { + _videoGenerator = Throw.IfNull(videoGenerator); + _dataContentHandling = dataContentHandling; + } + + /// + public override async Task GetResponseAsync( + IEnumerable messages, ChatOptions? options = null, CancellationToken cancellationToken = default) + { + _ = Throw.IfNull(messages); + + var requestState = new RequestState(_videoGenerator, _dataContentHandling); + + // Process the chat options to replace HostedVideoGenerationTool with functions + var processedOptions = requestState.ProcessChatOptions(options); + var processedMessages = requestState.ProcessChatMessages(messages); + + // Get response from base implementation + var response = await base.GetResponseAsync(processedMessages, processedOptions, cancellationToken); + + // Replace FunctionResultContent instances with generated video content + foreach (var message in response.Messages) + { + message.Contents = requestState.ReplaceVideoGenerationFunctionResults(message.Contents); + } + + return response; + } + + /// + public override async IAsyncEnumerable GetStreamingResponseAsync( + IEnumerable messages, ChatOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + _ = Throw.IfNull(messages); + + var requestState = new RequestState(_videoGenerator, _dataContentHandling); + + // Process the chat options to replace HostedVideoGenerationTool with functions + var processedOptions = requestState.ProcessChatOptions(options); + var processedMessages = requestState.ProcessChatMessages(messages); + + await foreach (var update in base.GetStreamingResponseAsync(processedMessages, processedOptions, cancellationToken)) + { + // Replace any FunctionResultContent instances with generated video content + var newContents = requestState.ReplaceVideoGenerationFunctionResults(update.Contents); + + if (!ReferenceEquals(newContents, update.Contents)) + { + // Create a new update instance with modified contents + var modifiedUpdate = update.Clone(); + modifiedUpdate.Contents = newContents; + yield return modifiedUpdate; + } + else + { + yield return update; + } + } + } + + /// Provides a mechanism for releasing unmanaged resources. + /// to dispose managed resources; otherwise, . + protected override void Dispose(bool disposing) + { + if (disposing) + { + _videoGenerator.Dispose(); + } + + base.Dispose(disposing); + } + + /// + /// Contains all the per-request state and methods for handling video generation requests. + /// This class is created fresh for each request to ensure thread safety. + /// This class is not exposed publicly and does not own any of it's resources. + /// + private sealed class RequestState + { + private readonly IVideoGenerator _videoGenerator; + private readonly DataContentHandling _dataContentHandling; + private readonly HashSet _toolNames = new(StringComparer.Ordinal); + private readonly Dictionary> _videoContentByCallId = []; + private readonly Dictionary _videoContentById = new(StringComparer.OrdinalIgnoreCase); + private VideoGenerationOptions? _videoGenerationOptions; + + public RequestState(IVideoGenerator videoGenerator, DataContentHandling dataContentHandling) + { + _videoGenerator = videoGenerator; + _dataContentHandling = dataContentHandling; + } + + /// + /// Processes the chat messages to replace videos in data content with unique identifiers as needed. + /// All videos will be stored for later retrieval during video editing operations. + /// See for details on video replacement behavior. + /// + /// Messages to process. + /// Processed messages, or the original messages if no changes were made. + public IEnumerable ProcessChatMessages(IEnumerable messages) + { + List? newMessages = null; + int messageIndex = 0; + foreach (var message in messages) + { + List? newContents = null; + for (int contentIndex = 0; contentIndex < message.Contents.Count; contentIndex++) + { + var content = message.Contents[contentIndex]; + + void ReplaceVideo(string videoId, DataContent dataContent) + { + // Replace video with a placeholder text content, to give an indication to the model of its placement in the context + newContents ??= CopyList(message.Contents, contentIndex); + newContents.Add(new TextContent($"[{VideoKey}:{videoId}] available for edit.") + { + Annotations = dataContent.Annotations, + AdditionalProperties = dataContent.AdditionalProperties + }); + } + + if (content is DataContent dataContent && dataContent.HasTopLevelMediaType("video")) + { + // Store the video to make available for edit + var videoId = StoreVideo(dataContent); + + if (_dataContentHandling == DataContentHandling.AllVideos) + { + ReplaceVideo(videoId, dataContent); + continue; // Skip adding the original content + } + } + else if (content is VideoGenerationToolResultContent toolResultContent) + { + foreach (var output in toolResultContent.Outputs ?? []) + { + if (output is DataContent generatedDataContent && generatedDataContent.HasTopLevelMediaType("video")) + { + // Store the video to make available for edit + var videoId = StoreVideo(generatedDataContent, isGenerated: true); + + if (_dataContentHandling == DataContentHandling.AllVideos || + _dataContentHandling == DataContentHandling.GeneratedVideos) + { + ReplaceVideo(videoId, generatedDataContent); + } + } + } + + if (_dataContentHandling == DataContentHandling.AllVideos || + _dataContentHandling == DataContentHandling.GeneratedVideos) + { + // skip adding the generated content + continue; + } + } + + // Add the original content if no replacement was made + newContents?.Add(content); + } + + if (newContents != null) + { + newMessages ??= [.. messages.Take(messageIndex)]; + var newMessage = message.Clone(); + newMessage.Contents = newContents; + newMessages.Add(newMessage); + } + else + { + newMessages?.Add(message); + } + + messageIndex++; + } + + return newMessages ?? messages; + } + + public ChatOptions? ProcessChatOptions(ChatOptions? options) + { + if (options?.Tools is null || options.Tools.Count == 0) + { + return options; + } + + List? newTools = null; + var tools = options.Tools; + for (int i = 0; i < tools.Count; i++) + { + var tool = tools[i]; + + // remove all instances of HostedVideoGenerationTool and store the options from the last one + if (tool is HostedVideoGenerationTool videoGenerationTool) + { + _videoGenerationOptions = videoGenerationTool.Options; + + // for the first video generation tool, clone the options and insert our function tools + // remove any subsequent video generation tools + newTools ??= InitializeTools(tools, i); + } + else + { + newTools?.Add(tool); + } + } + + if (newTools is not null) + { + var newOptions = options.Clone(); + newOptions.Tools = newTools; + return newOptions; + } + + return options; + + List InitializeTools(IList existingTools, int toOffsetExclusive) + { +#if NET + ReadOnlySpan tools = +#else + AITool[] tools = +#endif + [ + AIFunctionFactory.Create(GenerateVideoAsync), + AIFunctionFactory.Create(EditVideoAsync), + AIFunctionFactory.Create(GetVideosForEdit) + ]; + + foreach (var tool in tools) + { + _toolNames.Add(tool.Name); + } + + var result = CopyList(existingTools, toOffsetExclusive, tools.Length); + result.AddRange(tools); + return result; + } + } + + /// + /// Replaces FunctionResultContent instances for video generation functions with actual generated video content. + /// + /// The list of AI content to process. + public IList ReplaceVideoGenerationFunctionResults(IList contents) + { + List? newContents = null; + + // Replace FunctionResultContent instances with generated video content + for (int i = contents.Count - 1; i >= 0; i--) + { + var content = contents[i]; + + // We must lookup by name because in the streaming case we have not yet been called to record the CallId. + if (content is FunctionCallContent functionCall && + _toolNames.Contains(functionCall.Name)) + { + // create a new list and omit the FunctionCallContent + newContents ??= CopyList(contents, i); + + if (functionCall.Name != nameof(GetVideosForEdit)) + { + newContents.Add(new VideoGenerationToolCallContent(functionCall.CallId)); + } + } + else if (content is FunctionResultContent functionResult && + _videoContentByCallId.TryGetValue(functionResult.CallId, out var videoContents)) + { + newContents ??= CopyList(contents, i); + + if (videoContents.Any()) + { + // Insert VideoGenerationToolResultContent in its place, do not preserve the FunctionResultContent + newContents.Add(new VideoGenerationToolResultContent(functionResult.CallId) + { + Outputs = videoContents + }); + } + + // Remove the mapping as it's no longer needed + _ = _videoContentByCallId.Remove(functionResult.CallId); + } + else + { + // keep the existing content if we have a new list + newContents?.Add(content); + } + } + + return newContents ?? contents; + } + + [Description("Generates videos based on a text description.")] + public async Task GenerateVideoAsync( + [Description("A detailed description of the video to generate")] string prompt, + CancellationToken cancellationToken = default) + { + // Get the call ID from the current function invocation context + var callId = FunctionInvokingChatClient.CurrentContext?.CallContent.CallId; + if (callId == null) + { + return "No call ID available for video generation."; + } + + var request = new VideoGenerationRequest(prompt); + var options = _videoGenerationOptions ?? new VideoGenerationOptions(); + options.Count ??= 1; + + var response = await _videoGenerator.GenerateAsync(request, options, cancellationToken: cancellationToken); + + if (response.Contents.Count == 0) + { + return "No video was generated."; + } + + List videoIds = []; + List videoContents = _videoContentByCallId[callId] = []; + foreach (var content in response.Contents) + { + if (content is DataContent videoContent && videoContent.MediaType.StartsWith("video/", StringComparison.OrdinalIgnoreCase)) + { + videoContents.Add(videoContent); + videoIds.Add(StoreVideo(videoContent, true)); + } + } + + return "Generated video successfully."; + } + + [Description("Lists the identifiers of all videos available for edit.")] + public IEnumerable GetVideosForEdit() + { + // Get the call ID from the current function invocation context + var callId = FunctionInvokingChatClient.CurrentContext?.CallContent.CallId; + if (callId == null) + { + return ["No call ID available for video editing."]; + } + + _videoContentByCallId[callId] = []; + + return _videoContentById.Keys.AsEnumerable(); + } + + [Description("Edits an existing video based on a text description.")] + public async Task EditVideoAsync( + [Description("A detailed description of the video to generate")] string prompt, + [Description($"The video to edit from one of the available video identifiers returned by {nameof(GetVideosForEdit)}")] string videoId, + CancellationToken cancellationToken = default) + { + // Get the call ID from the current function invocation context + var callId = FunctionInvokingChatClient.CurrentContext?.CallContent.CallId; + if (callId == null) + { + return "No call ID available for video editing."; + } + + if (string.IsNullOrEmpty(videoId)) + { + return "No videoId provided"; + } + + try + { + var originalVideo = RetrieveVideoContent(videoId); + if (originalVideo == null) + { + return $"No video found with: {videoId}"; + } + + var request = new VideoGenerationRequest(prompt, [originalVideo]); + var response = await _videoGenerator.GenerateAsync(request, _videoGenerationOptions, cancellationToken: cancellationToken); + + if (response.Contents.Count == 0) + { + return "No edited video was generated."; + } + + List videoIds = []; + List videoContents = _videoContentByCallId[callId] = []; + foreach (var content in response.Contents) + { + if (content is DataContent videoContent && videoContent.MediaType.StartsWith("video/", StringComparison.OrdinalIgnoreCase)) + { + videoContents.Add(videoContent); + videoIds.Add(StoreVideo(videoContent, true)); + } + } + + return "Edited video successfully."; + } + catch (FormatException) + { + return "Invalid video data format. Please provide a valid base64-encoded video."; + } + } + + private static List CopyList(IList original, int toOffsetExclusive, int additionalCapacity = 0) + { + var newList = new List(original.Count + additionalCapacity); + + // Copy all items up to and excluding the current index + for (int j = 0; j < toOffsetExclusive; j++) + { + newList.Add(original[j]); + } + + return newList; + } + + private DataContent? RetrieveVideoContent(string videoId) + { + if (_videoContentById.TryGetValue(videoId, out var videoContent)) + { + return videoContent as DataContent; + } + + return null; + } + + private string StoreVideo(DataContent videoContent, bool isGenerated = false) + { + // Generate a unique ID for the video if it doesn't have one + string? videoId = null; + if (videoContent.AdditionalProperties?.TryGetValue(VideoKey, out videoId) is false || videoId is null) + { + videoId = videoContent.Name ?? Guid.NewGuid().ToString(); + } + + if (isGenerated) + { + videoContent.AdditionalProperties ??= []; + videoContent.AdditionalProperties[VideoKey] = videoId; + } + + // Store the video content for later retrieval + _videoContentById[videoId] = videoContent; + + return videoId; + } + } +} diff --git a/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/VideoGeneratingChatClientBuilderExtensions.cs b/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/VideoGeneratingChatClientBuilderExtensions.cs new file mode 100644 index 00000000000..1fe30653e58 --- /dev/null +++ b/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/VideoGeneratingChatClientBuilderExtensions.cs @@ -0,0 +1,47 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System; +using System.Diagnostics.CodeAnalysis; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Shared.DiagnosticIds; +using Microsoft.Shared.Diagnostics; + +namespace Microsoft.Extensions.AI; + +/// Provides extensions for configuring instances. +[Experimental(DiagnosticIds.Experiments.AIVideoGeneration, UrlFormat = DiagnosticIds.UrlFormat)] +public static class VideoGeneratingChatClientBuilderExtensions +{ + /// Adds video generation capabilities to the chat client pipeline. + /// The . + /// + /// An optional used for video generation operations. + /// If not supplied, a required instance will be resolved from the service provider. + /// + /// An optional callback that can be used to configure the instance. + /// The . + /// is . + /// + /// + /// This method enables the chat client to handle instances by converting them + /// into function tools that can be invoked by the underlying chat model to perform video generation and editing operations. + /// + /// + public static ChatClientBuilder UseVideoGeneration( + this ChatClientBuilder builder, + IVideoGenerator? videoGenerator = null, + Action? configure = null) + { + _ = Throw.IfNull(builder); + + return builder.Use((innerClient, services) => + { + videoGenerator ??= services.GetRequiredService(); + + var chatClient = new VideoGeneratingChatClient(innerClient, videoGenerator); + configure?.Invoke(chatClient); + return chatClient; + }); + } +} diff --git a/src/Libraries/Microsoft.Extensions.AI/OpenTelemetryConsts.cs b/src/Libraries/Microsoft.Extensions.AI/OpenTelemetryConsts.cs index 8ffbd0b9dec..ba7f49a110a 100644 --- a/src/Libraries/Microsoft.Extensions.AI/OpenTelemetryConsts.cs +++ b/src/Libraries/Microsoft.Extensions.AI/OpenTelemetryConsts.cs @@ -21,6 +21,7 @@ internal static class OpenTelemetryConsts public const string TypeText = "text"; public const string TypeJson = "json"; public const string TypeImage = "image"; + public const string TypeVideo = "video"; public const string TypeAudio = "audio"; public const string TokenTypeInput = "input"; diff --git a/src/Libraries/Microsoft.Extensions.AI/Video/ConfigureOptionsVideoGenerator.cs b/src/Libraries/Microsoft.Extensions.AI/Video/ConfigureOptionsVideoGenerator.cs new file mode 100644 index 00000000000..9ce0be59397 --- /dev/null +++ b/src/Libraries/Microsoft.Extensions.AI/Video/ConfigureOptionsVideoGenerator.cs @@ -0,0 +1,54 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System; +using System.Diagnostics.CodeAnalysis; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Shared.DiagnosticIds; +using Microsoft.Shared.Diagnostics; + +namespace Microsoft.Extensions.AI; + +/// Represents a delegating video generator that configures a instance used by the remainder of the pipeline. +[Experimental(DiagnosticIds.Experiments.AIVideoGeneration, UrlFormat = DiagnosticIds.UrlFormat)] +public sealed class ConfigureOptionsVideoGenerator : DelegatingVideoGenerator +{ + /// The callback delegate used to configure options. + private readonly Action _configureOptions; + + /// Initializes a new instance of the class with the specified callback. + /// The inner generator. + /// + /// The delegate to invoke to configure the instance. It is passed a clone of the caller-supplied instance + /// (or a newly constructed instance if the caller-supplied instance is ). + /// + /// or is . + /// + /// The delegate is passed either a new instance of if + /// the caller didn't supply a instance, or a clone (via of the caller-supplied + /// instance if one was supplied. + /// + public ConfigureOptionsVideoGenerator(IVideoGenerator innerGenerator, Action configure) + : base(innerGenerator) + { + _configureOptions = Throw.IfNull(configure); + } + + /// + public override async Task GenerateAsync( + VideoGenerationRequest request, VideoGenerationOptions? options = null, IProgress? progress = null, CancellationToken cancellationToken = default) + { + return await base.GenerateAsync(request, Configure(options), progress, cancellationToken); + } + + /// Creates and configures the to pass along to the inner generator. + private VideoGenerationOptions Configure(VideoGenerationOptions? options) + { + options = options?.Clone() ?? new(); + + _configureOptions(options); + + return options; + } +} diff --git a/src/Libraries/Microsoft.Extensions.AI/Video/ConfigureOptionsVideoGeneratorBuilderExtensions.cs b/src/Libraries/Microsoft.Extensions.AI/Video/ConfigureOptionsVideoGeneratorBuilderExtensions.cs new file mode 100644 index 00000000000..25b9b958abf --- /dev/null +++ b/src/Libraries/Microsoft.Extensions.AI/Video/ConfigureOptionsVideoGeneratorBuilderExtensions.cs @@ -0,0 +1,38 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System; +using System.Diagnostics.CodeAnalysis; +using Microsoft.Shared.DiagnosticIds; +using Microsoft.Shared.Diagnostics; + +namespace Microsoft.Extensions.AI; + +/// Provides extensions for configuring instances. +[Experimental(DiagnosticIds.Experiments.AIVideoGeneration, UrlFormat = DiagnosticIds.UrlFormat)] +public static class ConfigureOptionsVideoGeneratorBuilderExtensions +{ + /// + /// Adds a callback that configures a to be passed to the next generator in the pipeline. + /// + /// The . + /// + /// The delegate to invoke to configure the instance. + /// It is passed a clone of the caller-supplied instance (or a newly constructed instance if the caller-supplied instance is ). + /// + /// or is . + /// + /// This method can be used to set default options. The delegate is passed either a new instance of + /// if the caller didn't supply a instance, or a clone (via ) + /// of the caller-supplied instance if one was supplied. + /// + /// The . + public static VideoGeneratorBuilder ConfigureOptions( + this VideoGeneratorBuilder builder, Action configure) + { + _ = Throw.IfNull(builder); + _ = Throw.IfNull(configure); + + return builder.Use(innerGenerator => new ConfigureOptionsVideoGenerator(innerGenerator, configure)); + } +} diff --git a/src/Libraries/Microsoft.Extensions.AI/Video/LoggingVideoGenerator.cs b/src/Libraries/Microsoft.Extensions.AI/Video/LoggingVideoGenerator.cs new file mode 100644 index 00000000000..67c879aa78a --- /dev/null +++ b/src/Libraries/Microsoft.Extensions.AI/Video/LoggingVideoGenerator.cs @@ -0,0 +1,124 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System; +using System.Diagnostics.CodeAnalysis; +using System.Linq; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.Shared.DiagnosticIds; +using Microsoft.Shared.Diagnostics; + +namespace Microsoft.Extensions.AI; + +/// A delegating video generator that logs video generation operations to an . +/// +/// +/// The provided implementation of is thread-safe for concurrent use so long as the +/// employed is also thread-safe for concurrent use. +/// +/// +/// When the employed enables , the contents of +/// prompts and options are logged. These prompts and options may contain sensitive application data. +/// is disabled by default and should never be enabled in a production environment. +/// Prompts and options are not logged at other logging levels. +/// +/// +[Experimental(DiagnosticIds.Experiments.AIVideoGeneration, UrlFormat = DiagnosticIds.UrlFormat)] +public partial class LoggingVideoGenerator : DelegatingVideoGenerator +{ + /// An instance used for all logging. + private readonly ILogger _logger; + + /// The to use for serialization of state written to the logger. + private JsonSerializerOptions _jsonSerializerOptions; + + /// Initializes a new instance of the class. + /// The underlying . + /// An instance that will be used for all logging. + /// or is . + public LoggingVideoGenerator(IVideoGenerator innerGenerator, ILogger logger) + : base(innerGenerator) + { + _logger = Throw.IfNull(logger); + _jsonSerializerOptions = AIJsonUtilities.DefaultOptions; + } + + /// Gets or sets JSON serialization options to use when serializing logging data. + /// The value being set is . + public JsonSerializerOptions JsonSerializerOptions + { + get => _jsonSerializerOptions; + set => _jsonSerializerOptions = Throw.IfNull(value); + } + + /// + public override async Task GenerateAsync( + VideoGenerationRequest request, VideoGenerationOptions? options = null, IProgress? progress = null, CancellationToken cancellationToken = default) + { + _ = Throw.IfNull(request); + + if (_logger.IsEnabled(LogLevel.Debug)) + { + if (_logger.IsEnabled(LogLevel.Trace)) + { + LogInvokedSensitive(nameof(GenerateAsync), request.Prompt ?? string.Empty, AsJson(options), AsJson(this.GetService())); + } + else + { + LogInvoked(nameof(GenerateAsync)); + } + } + + try + { + var response = await base.GenerateAsync(request, options, progress, cancellationToken); + + if (_logger.IsEnabled(LogLevel.Debug)) + { + if (_logger.IsEnabled(LogLevel.Trace) && response.Contents.All(c => c is not DataContent)) + { + LogCompletedSensitive(nameof(GenerateAsync), AsJson(response)); + } + else + { + LogCompleted(nameof(GenerateAsync)); + } + } + + return response; + } + catch (OperationCanceledException) + { + LogInvocationCanceled(nameof(GenerateAsync)); + throw; + } + catch (Exception ex) + { + LogInvocationFailed(nameof(GenerateAsync), ex); + throw; + } + } + + private string AsJson(T value) => TelemetryHelpers.AsJson(value, _jsonSerializerOptions); + + [LoggerMessage(LogLevel.Debug, "{MethodName} invoked.")] + private partial void LogInvoked(string methodName); + + [LoggerMessage(LogLevel.Trace, "{MethodName} invoked: Prompt: {Prompt}. Options: {VideoGenerationOptions}. Metadata: {VideoGeneratorMetadata}.")] + private partial void LogInvokedSensitive(string methodName, string prompt, string videoGenerationOptions, string videoGeneratorMetadata); + + [LoggerMessage(LogLevel.Debug, "{MethodName} completed.")] + private partial void LogCompleted(string methodName); + + [LoggerMessage(LogLevel.Trace, "{MethodName} completed: {VideoGenerationResponse}.")] + private partial void LogCompletedSensitive(string methodName, string videoGenerationResponse); + + [LoggerMessage(LogLevel.Debug, "{MethodName} canceled.")] + private partial void LogInvocationCanceled(string methodName); + + [LoggerMessage(LogLevel.Error, "{MethodName} failed.")] + private partial void LogInvocationFailed(string methodName, Exception error); +} diff --git a/src/Libraries/Microsoft.Extensions.AI/Video/LoggingVideoGeneratorBuilderExtensions.cs b/src/Libraries/Microsoft.Extensions.AI/Video/LoggingVideoGeneratorBuilderExtensions.cs new file mode 100644 index 00000000000..22d42ae6130 --- /dev/null +++ b/src/Libraries/Microsoft.Extensions.AI/Video/LoggingVideoGeneratorBuilderExtensions.cs @@ -0,0 +1,58 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System; +using System.Diagnostics.CodeAnalysis; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Shared.DiagnosticIds; +using Microsoft.Shared.Diagnostics; + +namespace Microsoft.Extensions.AI; + +/// Provides extensions for configuring instances. +[Experimental(DiagnosticIds.Experiments.AIVideoGeneration, UrlFormat = DiagnosticIds.UrlFormat)] +public static class LoggingVideoGeneratorBuilderExtensions +{ + /// Adds logging to the video generator pipeline. + /// The . + /// + /// An optional used to create a logger with which logging should be performed. + /// If not supplied, a required instance will be resolved from the service provider. + /// + /// An optional callback that can be used to configure the instance. + /// The . + /// is . + /// + /// + /// When the employed enables , the contents of + /// prompts and options are logged. These prompts and options may contain sensitive application data. + /// is disabled by default and should never be enabled in a production environment. + /// Prompts and options are not logged at other logging levels. + /// + /// + public static VideoGeneratorBuilder UseLogging( + this VideoGeneratorBuilder builder, + ILoggerFactory? loggerFactory = null, + Action? configure = null) + { + _ = Throw.IfNull(builder); + + return builder.Use((innerGenerator, services) => + { + loggerFactory ??= services.GetRequiredService(); + + // If the factory we resolve is for the null logger, the LoggingVideoGenerator will end up + // being an expensive nop, so skip adding it and just return the inner generator. + if (loggerFactory == NullLoggerFactory.Instance) + { + return innerGenerator; + } + + var videoGenerator = new LoggingVideoGenerator(innerGenerator, loggerFactory.CreateLogger(typeof(LoggingVideoGenerator))); + configure?.Invoke(videoGenerator); + return videoGenerator; + }); + } +} diff --git a/src/Libraries/Microsoft.Extensions.AI/Video/VideoGeneratorBuilder.cs b/src/Libraries/Microsoft.Extensions.AI/Video/VideoGeneratorBuilder.cs new file mode 100644 index 00000000000..016dc4debc2 --- /dev/null +++ b/src/Libraries/Microsoft.Extensions.AI/Video/VideoGeneratorBuilder.cs @@ -0,0 +1,86 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using Microsoft.Shared.DiagnosticIds; +using Microsoft.Shared.Diagnostics; + +namespace Microsoft.Extensions.AI; + +/// A builder for creating pipelines of . +[Experimental(DiagnosticIds.Experiments.AIVideoGeneration, UrlFormat = DiagnosticIds.UrlFormat)] +public sealed class VideoGeneratorBuilder +{ + private readonly Func _innerGeneratorFactory; + + /// The registered generator factory instances. + private List>? _generatorFactories; + + /// Initializes a new instance of the class. + /// The inner that represents the underlying backend. + /// is . + public VideoGeneratorBuilder(IVideoGenerator innerGenerator) + { + _ = Throw.IfNull(innerGenerator); + _innerGeneratorFactory = _ => innerGenerator; + } + + /// Initializes a new instance of the class. + /// A callback that produces the inner that represents the underlying backend. + /// is . + public VideoGeneratorBuilder(Func innerGeneratorFactory) + { + _innerGeneratorFactory = Throw.IfNull(innerGeneratorFactory); + } + + /// Builds an that represents the entire pipeline. Calls to this instance will pass through each of the pipeline stages in turn. + /// + /// The that should provide services to the instances. + /// If null, an empty will be used. + /// + /// An instance of that represents the entire pipeline. + public IVideoGenerator Build(IServiceProvider? services = null) + { + services ??= EmptyServiceProvider.Instance; + var videoGenerator = _innerGeneratorFactory(services); + + // To match intuitive expectations, apply the factories in reverse order, so that the first factory added is the outermost. + if (_generatorFactories is not null) + { + for (var i = _generatorFactories.Count - 1; i >= 0; i--) + { + videoGenerator = _generatorFactories[i](videoGenerator, services) ?? + throw new InvalidOperationException( + $"The {nameof(VideoGeneratorBuilder)} entry at index {i} returned null. " + + $"Ensure that the callbacks passed to {nameof(Use)} return non-null {nameof(IVideoGenerator)} instances."); + } + } + + return videoGenerator; + } + + /// Adds a factory for an intermediate video generator to the video generator pipeline. + /// The generator factory function. + /// The updated instance. + /// is . + public VideoGeneratorBuilder Use(Func generatorFactory) + { + _ = Throw.IfNull(generatorFactory); + + return Use((innerGenerator, _) => generatorFactory(innerGenerator)); + } + + /// Adds a factory for an intermediate video generator to the video generator pipeline. + /// The generator factory function. + /// The updated instance. + /// is . + public VideoGeneratorBuilder Use(Func generatorFactory) + { + _ = Throw.IfNull(generatorFactory); + + (_generatorFactories ??= []).Add(generatorFactory); + return this; + } +} diff --git a/src/Libraries/Microsoft.Extensions.AI/Video/VideoGeneratorBuilderServiceCollectionExtensions.cs b/src/Libraries/Microsoft.Extensions.AI/Video/VideoGeneratorBuilderServiceCollectionExtensions.cs new file mode 100644 index 00000000000..21a3b41f452 --- /dev/null +++ b/src/Libraries/Microsoft.Extensions.AI/Video/VideoGeneratorBuilderServiceCollectionExtensions.cs @@ -0,0 +1,85 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System; +using System.Diagnostics.CodeAnalysis; +using Microsoft.Extensions.AI; +using Microsoft.Shared.DiagnosticIds; +using Microsoft.Shared.Diagnostics; + +namespace Microsoft.Extensions.DependencyInjection; + +/// Provides extension methods for registering with a . +[Experimental(DiagnosticIds.Experiments.AIVideoGeneration, UrlFormat = DiagnosticIds.UrlFormat)] +public static class VideoGeneratorBuilderServiceCollectionExtensions +{ + /// Registers a singleton in the . + /// The to which the generator should be added. + /// The inner that represents the underlying backend. + /// The service lifetime for the generator. Defaults to . + /// An that can be used to build a pipeline around the inner generator. + /// or is . + /// The generator is registered as a singleton service. + public static VideoGeneratorBuilder AddVideoGenerator( + this IServiceCollection serviceCollection, + IVideoGenerator innerGenerator, + ServiceLifetime lifetime = ServiceLifetime.Singleton) + => AddVideoGenerator(serviceCollection, _ => innerGenerator, lifetime); + + /// Registers a singleton in the . + /// The to which the generator should be added. + /// A callback that produces the inner that represents the underlying backend. + /// The service lifetime for the generator. Defaults to . + /// An that can be used to build a pipeline around the inner generator. + /// or is . + /// The generator is registered as a singleton service. + public static VideoGeneratorBuilder AddVideoGenerator( + this IServiceCollection serviceCollection, + Func innerGeneratorFactory, + ServiceLifetime lifetime = ServiceLifetime.Singleton) + { + _ = Throw.IfNull(serviceCollection); + _ = Throw.IfNull(innerGeneratorFactory); + + var builder = new VideoGeneratorBuilder(innerGeneratorFactory); + serviceCollection.Add(new ServiceDescriptor(typeof(IVideoGenerator), builder.Build, lifetime)); + return builder; + } + + /// Registers a keyed singleton in the . + /// The to which the generator should be added. + /// The key with which to associate the generator. + /// The inner that represents the underlying backend. + /// The service lifetime for the generator. Defaults to . + /// An that can be used to build a pipeline around the inner generator. + /// , , or is . + /// The generator is registered as a scoped service. + public static VideoGeneratorBuilder AddKeyedVideoGenerator( + this IServiceCollection serviceCollection, + object? serviceKey, + IVideoGenerator innerGenerator, + ServiceLifetime lifetime = ServiceLifetime.Singleton) + => AddKeyedVideoGenerator(serviceCollection, serviceKey, _ => innerGenerator, lifetime); + + /// Registers a keyed singleton in the . + /// The to which the generator should be added. + /// The key with which to associate the generator. + /// A callback that produces the inner that represents the underlying backend. + /// The service lifetime for the generator. Defaults to . + /// An that can be used to build a pipeline around the inner generator. + /// , , or is . + /// The generator is registered as a scoped service. + public static VideoGeneratorBuilder AddKeyedVideoGenerator( + this IServiceCollection serviceCollection, + object? serviceKey, + Func innerGeneratorFactory, + ServiceLifetime lifetime = ServiceLifetime.Singleton) + { + _ = Throw.IfNull(serviceCollection); + _ = Throw.IfNull(innerGeneratorFactory); + + var builder = new VideoGeneratorBuilder(innerGeneratorFactory); + serviceCollection.Add(new ServiceDescriptor(typeof(IVideoGenerator), serviceKey, factory: (services, serviceKey) => builder.Build(services), lifetime)); + return builder; + } +} diff --git a/src/Libraries/Microsoft.Extensions.AI/Video/VideoGeneratorBuilderVideoGeneratorExtensions.cs b/src/Libraries/Microsoft.Extensions.AI/Video/VideoGeneratorBuilderVideoGeneratorExtensions.cs new file mode 100644 index 00000000000..142441f65ad --- /dev/null +++ b/src/Libraries/Microsoft.Extensions.AI/Video/VideoGeneratorBuilderVideoGeneratorExtensions.cs @@ -0,0 +1,29 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System; +using System.Diagnostics.CodeAnalysis; +using Microsoft.Shared.DiagnosticIds; +using Microsoft.Shared.Diagnostics; + +namespace Microsoft.Extensions.AI; + +/// Provides extension methods for working with in the context of . +[Experimental(DiagnosticIds.Experiments.AIVideoGeneration, UrlFormat = DiagnosticIds.UrlFormat)] +public static class VideoGeneratorBuilderVideoGeneratorExtensions +{ + /// Creates a new using as its inner generator. + /// The generator to use as the inner generator. + /// The new instance. + /// is . + /// + /// This method is equivalent to using the constructor directly, + /// specifying as the inner generator. + /// + public static VideoGeneratorBuilder AsBuilder(this IVideoGenerator innerGenerator) + { + _ = Throw.IfNull(innerGenerator); + + return new VideoGeneratorBuilder(innerGenerator); + } +} diff --git a/src/Shared/DiagnosticIds/DiagnosticIds.cs b/src/Shared/DiagnosticIds/DiagnosticIds.cs index 94cc1a1f04a..e3e45c13e04 100644 --- a/src/Shared/DiagnosticIds/DiagnosticIds.cs +++ b/src/Shared/DiagnosticIds/DiagnosticIds.cs @@ -49,6 +49,7 @@ internal static class Experiments // All AI experiments share a diagnostic ID but have different // constants to manage which experiment each API belongs to. internal const string AIImageGeneration = AIExperiments; + internal const string AIVideoGeneration = AIExperiments; internal const string AISpeechToText = AIExperiments; internal const string AITextToSpeech = AIExperiments; internal const string AIMcpServers = AIExperiments; @@ -67,6 +68,7 @@ internal static class Experiments internal const string AIOpenAIResponses = "OPENAI001"; internal const string AIOpenAIAssistants = "OPENAI001"; internal const string AIOpenAIImageClient = "OPENAI001"; + internal const string AIOpenAIVideoClient = "OPENAI001"; internal const string AIOpenAIAudio = "OPENAI001"; internal const string AIOpenAIReasoning = "OPENAI001"; internal const string AIOpenAIRealtime = "OPENAI002"; diff --git a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/TestVideoGenerator.cs b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/TestVideoGenerator.cs new file mode 100644 index 00000000000..9a52ed6c1ed --- /dev/null +++ b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/TestVideoGenerator.cs @@ -0,0 +1,47 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System; +using System.Threading; +using System.Threading.Tasks; + +namespace Microsoft.Extensions.AI; + +public sealed class TestVideoGenerator : IVideoGenerator +{ + public TestVideoGenerator() + { + GetServiceCallback = DefaultGetServiceCallback; + } + + public IServiceProvider? Services { get; set; } + + public Func>? GenerateVideosAsyncCallback { get; set; } + + public Func GetServiceCallback { get; set; } + + public bool DisposeInvoked { get; private set; } + + private object? DefaultGetServiceCallback(Type serviceType, object? serviceKey) + => serviceType is not null && serviceKey is null && serviceType.IsInstanceOfType(this) ? this : null; + + public Task GenerateAsync( + VideoGenerationRequest request, + VideoGenerationOptions? options = null, + IProgress? progress = null, + CancellationToken cancellationToken = default) + { + return GenerateVideosAsyncCallback?.Invoke(request, options, cancellationToken) ?? + Task.FromResult(new VideoGenerationResponse()); + } + + public object? GetService(Type serviceType, object? serviceKey = null) + { + return GetServiceCallback.Invoke(serviceType, serviceKey); + } + + public void Dispose() + { + DisposeInvoked = true; + } +} diff --git a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Video/DelegatingVideoGeneratorTests.cs b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Video/DelegatingVideoGeneratorTests.cs new file mode 100644 index 00000000000..3391ceec4d7 --- /dev/null +++ b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Video/DelegatingVideoGeneratorTests.cs @@ -0,0 +1,100 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System; +using System.Threading.Tasks; +using Xunit; + +namespace Microsoft.Extensions.AI; + +public class DelegatingVideoGeneratorTests +{ + [Fact] + public void RequiresInnerVideoGenerator() + { + Assert.Throws("innerGenerator", () => new TestDelegatingVideoGenerator(null!)); + } + + [Fact] + public async Task GenerateVideosAsyncDefaultsToInnerGeneratorAsync() + { + var expectedResponse = new VideoGenerationResponse(); + using var inner = new TestVideoGenerator + { + GenerateVideosAsyncCallback = (request, options, ct) => Task.FromResult(expectedResponse) + }; + + using var delegating = new TestDelegatingVideoGenerator(inner); + var result = await delegating.GenerateAsync(new VideoGenerationRequest("Test")); + Assert.Same(expectedResponse, result); + } + + [Fact] + public void GetServiceThrowsForNullType() + { + using var inner = new TestVideoGenerator(); + using var generator = new TestDelegatingVideoGenerator(inner); + Assert.Throws("serviceType", () => generator.GetService(null!)); + } + + [Fact] + public void GetServiceReturnsSelfIfCompatibleWithRequestAndKeyIsNull() + { + using var inner = new TestVideoGenerator(); + using var generator = new TestDelegatingVideoGenerator(inner); + Assert.Same(generator, generator.GetService(typeof(DelegatingVideoGenerator))); + Assert.Same(generator, generator.GetService(typeof(IVideoGenerator))); + } + + [Fact] + public void GetServiceDelegatesToInnerIfKeyIsNotNull() + { + using var inner = new TestVideoGenerator + { + GetServiceCallback = (type, key) => key is not null ? "inner-result" : null + }; + + using var generator = new TestDelegatingVideoGenerator(inner); + Assert.Equal("inner-result", generator.GetService(typeof(string), "someKey")); + } + + [Fact] + public void GetServiceDelegatesToInnerIfNotCompatibleWithRequest() + { + using var inner = new TestVideoGenerator + { + GetServiceCallback = (type, key) => type == typeof(string) ? "inner-result" : null + }; + + using var generator = new TestDelegatingVideoGenerator(inner); + Assert.Equal("inner-result", generator.GetService(typeof(string))); + } + + [Fact] + public void Dispose_SetsFlag() + { + using var inner = new TestVideoGenerator(); + var generator = new TestDelegatingVideoGenerator(inner); + Assert.False(inner.DisposeInvoked); + generator.Dispose(); + Assert.True(inner.DisposeInvoked); + } + + [Fact] + public void Dispose_MultipleCallsSafe() + { + using var inner = new TestVideoGenerator(); + var generator = new TestDelegatingVideoGenerator(inner); + generator.Dispose(); + generator.Dispose(); + Assert.True(inner.DisposeInvoked); + } + + private sealed class TestDelegatingVideoGenerator : DelegatingVideoGenerator + { + public TestDelegatingVideoGenerator(IVideoGenerator innerGenerator) + : base(innerGenerator) + { + } + } +} diff --git a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Video/VideoGenerationOptionsTests.cs b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Video/VideoGenerationOptionsTests.cs new file mode 100644 index 00000000000..e13a8c619e2 --- /dev/null +++ b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Video/VideoGenerationOptionsTests.cs @@ -0,0 +1,132 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System; +using System.Drawing; +using System.Text.Json; +using Xunit; + +namespace Microsoft.Extensions.AI; + +public class VideoGenerationOptionsTests +{ + [Fact] + public void Constructor_Defaults() + { + var options = new VideoGenerationOptions(); + Assert.Null(options.Count); + Assert.Null(options.Duration); + Assert.Null(options.FramesPerSecond); + Assert.Null(options.MediaType); + Assert.Null(options.ModelId); + Assert.Null(options.RawRepresentationFactory); + Assert.Null(options.ResponseFormat); + Assert.Null(options.VideoSize); + Assert.Null(options.AdditionalProperties); + } + + [Fact] + public void Properties_Roundtrip() + { + var options = new VideoGenerationOptions + { + Count = 3, + Duration = TimeSpan.FromSeconds(15), + FramesPerSecond = 30, + MediaType = "video/webm", + ModelId = "sora", + ResponseFormat = VideoGenerationResponseFormat.Data, + VideoSize = new Size(1280, 720), + AdditionalProperties = new() { ["key"] = "value" }, + }; + + Assert.Equal(3, options.Count); + Assert.Equal(TimeSpan.FromSeconds(15), options.Duration); + Assert.Equal(30, options.FramesPerSecond); + Assert.Equal("video/webm", options.MediaType); + Assert.Equal("sora", options.ModelId); + Assert.Equal(VideoGenerationResponseFormat.Data, options.ResponseFormat); + Assert.Equal(new Size(1280, 720), options.VideoSize); + Assert.Equal("value", options.AdditionalProperties["key"]); + } + + [Fact] + public void Clone_CreatesIndependentCopy() + { + var original = new VideoGenerationOptions + { + Count = 2, + Duration = TimeSpan.FromSeconds(5), + FramesPerSecond = 24, + MediaType = "video/mp4", + ModelId = "model-1", + ResponseFormat = VideoGenerationResponseFormat.Uri, + VideoSize = new Size(1920, 1080), + AdditionalProperties = new() { ["key"] = "value" }, + }; + + var clone = original.Clone(); + + Assert.NotSame(original, clone); + Assert.Equal(original.Count, clone.Count); + Assert.Equal(original.Duration, clone.Duration); + Assert.Equal(original.FramesPerSecond, clone.FramesPerSecond); + Assert.Equal(original.MediaType, clone.MediaType); + Assert.Equal(original.ModelId, clone.ModelId); + Assert.Equal(original.ResponseFormat, clone.ResponseFormat); + Assert.Equal(original.VideoSize, clone.VideoSize); + Assert.NotSame(original.AdditionalProperties, clone.AdditionalProperties); + } + + [Fact] + public void Clone_FromNull_ReturnsDefaults() + { + var options = new DerivedVideoGenerationOptions(null); + Assert.Null(options.Count); + Assert.Null(options.Duration); + Assert.Null(options.ModelId); + } + + [Theory] + [InlineData(VideoGenerationResponseFormat.Uri)] + [InlineData(VideoGenerationResponseFormat.Data)] + [InlineData(VideoGenerationResponseFormat.Hosted)] + public void ResponseFormat_EnumValues(VideoGenerationResponseFormat format) + { + var options = new VideoGenerationOptions { ResponseFormat = format }; + Assert.Equal(format, options.ResponseFormat); + } + + [Fact] + public void JsonSerialization_Roundtrip() + { + var options = new VideoGenerationOptions + { + Count = 2, + Duration = TimeSpan.FromSeconds(10), + FramesPerSecond = 24, + MediaType = "video/mp4", + ModelId = "test-model", + VideoSize = new Size(640, 480), + ResponseFormat = VideoGenerationResponseFormat.Data, + AdditionalProperties = new() { ["custom"] = "prop" }, + }; + + string json = JsonSerializer.Serialize(options, AIJsonUtilities.DefaultOptions); + var deserialized = JsonSerializer.Deserialize(json, AIJsonUtilities.DefaultOptions); + + Assert.NotNull(deserialized); + Assert.Equal(options.Count, deserialized!.Count); + Assert.Equal(options.MediaType, deserialized.MediaType); + Assert.Equal(options.ModelId, deserialized.ModelId); + Assert.Equal(options.ResponseFormat, deserialized.ResponseFormat); + } + + private class DerivedVideoGenerationOptions : VideoGenerationOptions + { + public DerivedVideoGenerationOptions(VideoGenerationOptions? other) + : base(other) + { + } + } +} diff --git a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Video/VideoGenerationResponseTests.cs b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Video/VideoGenerationResponseTests.cs new file mode 100644 index 00000000000..d5d51b929b7 --- /dev/null +++ b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Video/VideoGenerationResponseTests.cs @@ -0,0 +1,79 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System.Collections.Generic; +using System.Text.Json; +using Xunit; + +namespace Microsoft.Extensions.AI; + +public class VideoGenerationResponseTests +{ + [Fact] + public void Constructor_Defaults() + { + var response = new VideoGenerationResponse(); + Assert.NotNull(response.Contents); + Assert.Empty(response.Contents); + Assert.Null(response.RawRepresentation); + Assert.Null(response.Usage); + } + + [Fact] + public void Constructor_WithContents() + { + var contents = new List { new DataContent("dGVzdA=="u8.ToArray(), "video/mp4") }; + var response = new VideoGenerationResponse(contents); + Assert.Same(contents, response.Contents); + } + + [Fact] + public void Contents_NullSetter_ReturnsEmptyList() + { + var response = new VideoGenerationResponse { Contents = null! }; + Assert.NotNull(response.Contents); + Assert.Empty(response.Contents); + } + + [Fact] + public void RawRepresentation_Roundtrip() + { + var raw = new object(); + var response = new VideoGenerationResponse { RawRepresentation = raw }; + Assert.Same(raw, response.RawRepresentation); + } + + [Fact] + public void Usage_Roundtrip() + { + var usage = new UsageDetails { InputTokenCount = 100, OutputTokenCount = 200 }; + var response = new VideoGenerationResponse { Usage = usage }; + Assert.Same(usage, response.Usage); + Assert.Equal(100, response.Usage.InputTokenCount); + Assert.Equal(200, response.Usage.OutputTokenCount); + } + + [Fact] + public void JsonSerialization_WithUriContent() + { + var response = new VideoGenerationResponse( + [new UriContent("https://example.com/video.mp4", "video/mp4")]); + + string json = JsonSerializer.Serialize(response, AIJsonUtilities.DefaultOptions); + var deserialized = JsonSerializer.Deserialize(json, AIJsonUtilities.DefaultOptions); + + Assert.NotNull(deserialized); + Assert.Single(deserialized!.Contents); + } + + [Fact] + public void JsonSerialization_EmptyResponse() + { + var response = new VideoGenerationResponse(); + string json = JsonSerializer.Serialize(response, AIJsonUtilities.DefaultOptions); + var deserialized = JsonSerializer.Deserialize(json, AIJsonUtilities.DefaultOptions); + + Assert.NotNull(deserialized); + Assert.Empty(deserialized!.Contents); + } +} diff --git a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Video/VideoGeneratorExtensionsTests.cs b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Video/VideoGeneratorExtensionsTests.cs new file mode 100644 index 00000000000..1298346b27c --- /dev/null +++ b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Video/VideoGeneratorExtensionsTests.cs @@ -0,0 +1,167 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System; +using System.Threading.Tasks; +using Xunit; + +namespace Microsoft.Extensions.AI; + +public class VideoGeneratorExtensionsTests +{ + [Fact] + public void GetService_Generic_NullGenerator_Throws() + { + Assert.Throws("generator", () => ((IVideoGenerator)null!).GetService()); + } + + [Fact] + public void GetService_Generic_ReturnsService() + { + using var generator = new TestVideoGenerator(); + var result = generator.GetService(); + Assert.Same(generator, result); + } + + [Fact] + public void GetRequiredService_NullGenerator_Throws() + { + Assert.Throws("generator", () => ((IVideoGenerator)null!).GetRequiredService(typeof(IVideoGenerator))); + } + + [Fact] + public void GetRequiredService_NullType_Throws() + { + using var generator = new TestVideoGenerator(); + Assert.Throws("serviceType", () => generator.GetRequiredService(null!)); + } + + [Fact] + public void GetRequiredService_ServiceNotAvailable_Throws() + { + using var generator = new TestVideoGenerator(); + Assert.Throws(() => generator.GetRequiredService(typeof(string))); + } + + [Fact] + public void GetRequiredService_Generic_ServiceNotAvailable_Throws() + { + using var generator = new TestVideoGenerator(); + Assert.Throws(() => generator.GetRequiredService()); + } + + [Fact] + public async Task GenerateVideosAsync_NullGenerator_Throws() + { + await Assert.ThrowsAsync("generator", () => + ((IVideoGenerator)null!).GenerateVideosAsync("Test")); + } + + [Fact] + public async Task GenerateVideosAsync_NullPrompt_Throws() + { + using var generator = new TestVideoGenerator(); + await Assert.ThrowsAsync("prompt", () => + generator.GenerateVideosAsync(null!)); + } + + [Fact] + public async Task GenerateVideosAsync_CallsGenerateAsync() + { + VideoGenerationRequest? capturedRequest = null; + using var generator = new TestVideoGenerator + { + GenerateVideosAsyncCallback = (request, options, ct) => + { + capturedRequest = request; + return Task.FromResult(new VideoGenerationResponse()); + } + }; + + await generator.GenerateVideosAsync("A cat video"); + + Assert.NotNull(capturedRequest); + Assert.Equal("A cat video", capturedRequest!.Prompt); + Assert.Null(capturedRequest.OriginalMedia); + } + + [Fact] + public async Task EditVideosAsync_NullGenerator_Throws() + { + await Assert.ThrowsAsync("generator", () => + ((IVideoGenerator)null!).EditVideosAsync([], "prompt")); + } + + [Fact] + public async Task EditVideosAsync_NullOriginalMedia_Throws() + { + using var generator = new TestVideoGenerator(); + await Assert.ThrowsAsync("originalMedia", () => + generator.EditVideosAsync(null!, "prompt")); + } + + [Fact] + public async Task EditVideosAsync_NullPrompt_Throws() + { + using var generator = new TestVideoGenerator(); + await Assert.ThrowsAsync("prompt", () => + generator.EditVideosAsync([], null!)); + } + + [Fact] + public async Task EditVideoAsync_DataContent_CallsGenerateAsync() + { + VideoGenerationRequest? capturedRequest = null; + using var generator = new TestVideoGenerator + { + GenerateVideosAsyncCallback = (request, options, ct) => + { + capturedRequest = request; + return Task.FromResult(new VideoGenerationResponse()); + } + }; + + var originalVideo = new DataContent("dGVzdA=="u8.ToArray(), "video/mp4"); + await generator.EditVideoAsync(originalVideo, "Make it faster"); + + Assert.NotNull(capturedRequest); + Assert.Equal("Make it faster", capturedRequest!.Prompt); + Assert.NotNull(capturedRequest.OriginalMedia); + } + + [Fact] + public async Task EditVideoAsync_ByteArray_CallsGenerateAsync() + { + VideoGenerationRequest? capturedRequest = null; + using var generator = new TestVideoGenerator + { + GenerateVideosAsyncCallback = (request, options, ct) => + { + capturedRequest = request; + return Task.FromResult(new VideoGenerationResponse()); + } + }; + + await generator.EditVideoAsync(new byte[] { 1, 2, 3, 4 }, "test.mp4", "Add effects"); + + Assert.NotNull(capturedRequest); + Assert.Equal("Add effects", capturedRequest!.Prompt); + Assert.NotNull(capturedRequest.OriginalMedia); + } + + [Fact] + public async Task EditVideoAsync_ByteArray_NullFileName_Throws() + { + using var generator = new TestVideoGenerator(); + await Assert.ThrowsAsync("fileName", () => + generator.EditVideoAsync(new byte[] { 1 }, null!, "prompt")); + } + + [Fact] + public async Task EditVideoAsync_ByteArray_NullPrompt_Throws() + { + using var generator = new TestVideoGenerator(); + await Assert.ThrowsAsync("prompt", () => + generator.EditVideoAsync(new byte[] { 1 }, "test.mp4", null!)); + } +} diff --git a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Video/VideoGeneratorMetadataTests.cs b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Video/VideoGeneratorMetadataTests.cs new file mode 100644 index 00000000000..526d2b38560 --- /dev/null +++ b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Video/VideoGeneratorMetadataTests.cs @@ -0,0 +1,29 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System; +using Xunit; + +namespace Microsoft.Extensions.AI; + +public class VideoGeneratorMetadataTests +{ + [Fact] + public void Constructor_NullValues() + { + var metadata = new VideoGeneratorMetadata(); + Assert.Null(metadata.ProviderName); + Assert.Null(metadata.ProviderUri); + Assert.Null(metadata.DefaultModelId); + } + + [Fact] + public void Constructor_WithValues() + { + var uri = new Uri("https://api.example.com/v1"); + var metadata = new VideoGeneratorMetadata("test-provider", uri, "sora"); + Assert.Equal("test-provider", metadata.ProviderName); + Assert.Equal(uri, metadata.ProviderUri); + Assert.Equal("sora", metadata.DefaultModelId); + } +} diff --git a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Video/VideoGeneratorTests.cs b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Video/VideoGeneratorTests.cs new file mode 100644 index 00000000000..1af60e0ab55 --- /dev/null +++ b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Video/VideoGeneratorTests.cs @@ -0,0 +1,134 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System; +using System.Drawing; +using System.Threading.Tasks; +using Xunit; + +namespace Microsoft.Extensions.AI; + +public class VideoGeneratorTests +{ + [Fact] + public void GetService_WithServiceKey_ReturnsNull() + { + using var generator = new TestVideoGenerator(); + Assert.Null(generator.GetService(typeof(IVideoGenerator), "key")); + } + + [Fact] + public void GetService_WithoutServiceKey_CallsCallback() + { + using var generator = new TestVideoGenerator(); + var result = generator.GetService(typeof(IVideoGenerator)); + Assert.Same(generator, result); + } + + [Fact] + public async Task GenerateVideosAsync_CallsCallback() + { + var expectedRequest = new VideoGenerationRequest("Test prompt"); + var expectedResponse = new VideoGenerationResponse(); + + using var generator = new TestVideoGenerator + { + GenerateVideosAsyncCallback = (request, options, ct) => + { + Assert.Same(expectedRequest, request); + return Task.FromResult(expectedResponse); + } + }; + + var result = await generator.GenerateAsync(expectedRequest); + Assert.Same(expectedResponse, result); + } + + [Fact] + public async Task GenerateVideosAsync_NoCallback_ReturnsEmptyResponse() + { + using var generator = new TestVideoGenerator(); + var result = await generator.GenerateAsync(new VideoGenerationRequest("Test")); + Assert.NotNull(result); + Assert.Empty(result.Contents); + } + + [Fact] + public void Dispose_SetsFlag() + { + var generator = new TestVideoGenerator(); + Assert.False(generator.DisposeInvoked); + generator.Dispose(); + Assert.True(generator.DisposeInvoked); + } + + [Fact] + public void Dispose_MultipleCallsSafe() + { + var generator = new TestVideoGenerator(); + generator.Dispose(); + generator.Dispose(); // Should not throw + Assert.True(generator.DisposeInvoked); + } + + [Fact] + public async Task GenerateVideosAsync_WithOptions_PassesThroughCorrectly() + { + var options = new VideoGenerationOptions + { + Count = 2, + VideoSize = new Size(1920, 1080), + MediaType = "video/mp4", + ModelId = "sora", + Duration = TimeSpan.FromSeconds(10), + FramesPerSecond = 24, + ResponseFormat = VideoGenerationResponseFormat.Data + }; + + VideoGenerationOptions? capturedOptions = null; + + using var generator = new TestVideoGenerator + { + GenerateVideosAsyncCallback = (request, opts, ct) => + { + capturedOptions = opts; + return Task.FromResult(new VideoGenerationResponse()); + } + }; + + await generator.GenerateAsync(new VideoGenerationRequest("Test"), options); + + Assert.NotNull(capturedOptions); + Assert.Equal(2, capturedOptions!.Count); + Assert.Equal(new Size(1920, 1080), capturedOptions.VideoSize); + Assert.Equal("video/mp4", capturedOptions.MediaType); + Assert.Equal("sora", capturedOptions.ModelId); + Assert.Equal(TimeSpan.FromSeconds(10), capturedOptions.Duration); + Assert.Equal(24, capturedOptions.FramesPerSecond); + Assert.Equal(VideoGenerationResponseFormat.Data, capturedOptions.ResponseFormat); + } + + [Fact] + public async Task GenerateVideosAsync_WithEditRequest_PassesThroughCorrectly() + { + var originalVideos = new AIContent[] { new DataContent("dGVzdA=="u8.ToArray(), "video/mp4") }; + var request = new VideoGenerationRequest("Edit this", originalVideos); + + VideoGenerationRequest? capturedRequest = null; + + using var generator = new TestVideoGenerator + { + GenerateVideosAsyncCallback = (req, opts, ct) => + { + capturedRequest = req; + return Task.FromResult(new VideoGenerationResponse()); + } + }; + + await generator.GenerateAsync(request); + + Assert.NotNull(capturedRequest); + Assert.Equal("Edit this", capturedRequest!.Prompt); + Assert.NotNull(capturedRequest.OriginalMedia); + } +} diff --git a/test/Libraries/Microsoft.Extensions.AI.Integration.Tests/VideoGeneratorIntegrationTests.cs b/test/Libraries/Microsoft.Extensions.AI.Integration.Tests/VideoGeneratorIntegrationTests.cs new file mode 100644 index 00000000000..dc9829bf7e0 --- /dev/null +++ b/test/Libraries/Microsoft.Extensions.AI.Integration.Tests/VideoGeneratorIntegrationTests.cs @@ -0,0 +1,99 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System; +using System.Diagnostics.CodeAnalysis; +using System.Threading.Tasks; +using Microsoft.TestUtilities; +using Xunit; + +#pragma warning disable CA2214 // Do not call overridable methods in constructors + +namespace Microsoft.Extensions.AI; + +public abstract class VideoGeneratorIntegrationTests : IDisposable +{ + private readonly IVideoGenerator? _generator; + + protected VideoGeneratorIntegrationTests() + { + _generator = CreateGenerator(); + } + + public void Dispose() + { + _generator?.Dispose(); + GC.SuppressFinalize(this); + } + + protected abstract IVideoGenerator? CreateGenerator(); + + [ConditionalFact] + public virtual async Task GenerateVideosAsync_SingleVideoGeneration() + { + SkipIfNotEnabled(); + + var options = new VideoGenerationOptions + { + Count = 1 + }; + + var response = await _generator.GenerateVideosAsync("A simple animation of a bouncing ball", options); + + Assert.NotNull(response); + Assert.NotEmpty(response.Contents); + + var content = Assert.Single(response.Contents); + switch (content) + { + case UriContent uc: + Assert.StartsWith("http", uc.Uri.Scheme, StringComparison.Ordinal); + break; + + case DataContent dc: + Assert.False(dc.Data.IsEmpty); + Assert.StartsWith("video/", dc.MediaType, StringComparison.Ordinal); + break; + + default: + Assert.Fail($"Unexpected content type: {content.GetType()}"); + break; + } + } + + [ConditionalFact] + public virtual async Task GenerateVideosAsync_MultipleVideos() + { + SkipIfNotEnabled(); + + var options = new VideoGenerationOptions + { + Count = 2 + }; + + var response = await _generator.GenerateVideosAsync("A cat sitting on a table", options); + + Assert.NotNull(response); + Assert.NotEmpty(response.Contents); + Assert.Equal(2, response.Contents.Count); + + foreach (var content in response.Contents) + { + Assert.IsType(content); + var dataContent = (DataContent)content; + Assert.False(dataContent.Data.IsEmpty); + Assert.StartsWith("video/", dataContent.MediaType, StringComparison.Ordinal); + } + } + + [MemberNotNull(nameof(_generator))] + protected void SkipIfNotEnabled() + { + string? skipIntegration = TestRunnerConfiguration.Instance["SkipIntegrationTests"]; + + if (skipIntegration is not null || _generator is null) + { + throw new SkipTestException("Generator is not enabled."); + } + } +} diff --git a/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIVideoGeneratorIntegrationTests.cs b/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIVideoGeneratorIntegrationTests.cs new file mode 100644 index 00000000000..c00043a2edc --- /dev/null +++ b/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIVideoGeneratorIntegrationTests.cs @@ -0,0 +1,14 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +#pragma warning disable OPENAI001 // Experimental OpenAI APIs + +namespace Microsoft.Extensions.AI; + +public class OpenAIVideoGeneratorIntegrationTests : VideoGeneratorIntegrationTests +{ + protected override IVideoGenerator? CreateGenerator() + => IntegrationTestHelpers.GetOpenAIClient()? + .GetVideoClient() + .AsIVideoGenerator(TestRunnerConfiguration.Instance["OpenAI:VideoModel"] ?? "sora"); +} diff --git a/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIVideoGeneratorTests.cs b/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIVideoGeneratorTests.cs new file mode 100644 index 00000000000..bca109c1c96 --- /dev/null +++ b/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIVideoGeneratorTests.cs @@ -0,0 +1,47 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +#pragma warning disable OPENAI001 // Experimental OpenAI APIs + +using System; +using System.ClientModel; +using OpenAI; +using OpenAI.Videos; +using Xunit; + +namespace Microsoft.Extensions.AI; + +public class OpenAIVideoGeneratorTests +{ + [Fact] + public void AsIVideoGenerator_InvalidArgs_Throws() + { + Assert.Throws("videoClient", () => ((VideoClient)null!).AsIVideoGenerator()); + } + + [Fact] + public void AsIVideoGenerator_OpenAIClient_ProducesExpectedMetadata() + { + Uri endpoint = new("http://localhost/some/endpoint"); + string model = "sora"; + + var client = new OpenAIClient(new ApiKeyCredential("key"), new OpenAIClientOptions { Endpoint = endpoint }); + + IVideoGenerator videoGenerator = client.GetVideoClient().AsIVideoGenerator(model); + var metadata = videoGenerator.GetService(); + Assert.Equal(endpoint, metadata?.ProviderUri); + Assert.Equal(model, metadata?.DefaultModelId); + } + + [Fact] + public void GetService_ReturnsExpectedServices() + { + var client = new OpenAIClient(new ApiKeyCredential("key")); + IVideoGenerator videoGenerator = client.GetVideoClient().AsIVideoGenerator("sora"); + + Assert.Same(videoGenerator, videoGenerator.GetService()); + Assert.Same(videoGenerator, videoGenerator.GetService()); + Assert.NotNull(videoGenerator.GetService()); + Assert.NotNull(videoGenerator.GetService()); + } +} diff --git a/test/Libraries/Microsoft.Extensions.AI.Tests/Microsoft.Extensions.AI.Tests.csproj b/test/Libraries/Microsoft.Extensions.AI.Tests/Microsoft.Extensions.AI.Tests.csproj index c92459ef493..eada870401c 100644 --- a/test/Libraries/Microsoft.Extensions.AI.Tests/Microsoft.Extensions.AI.Tests.csproj +++ b/test/Libraries/Microsoft.Extensions.AI.Tests/Microsoft.Extensions.AI.Tests.csproj @@ -25,6 +25,7 @@ + diff --git a/test/Libraries/Microsoft.Extensions.AI.Tests/Video/ConfigureOptionsVideoGeneratorTests.cs b/test/Libraries/Microsoft.Extensions.AI.Tests/Video/ConfigureOptionsVideoGeneratorTests.cs new file mode 100644 index 00000000000..8ac11f557b9 --- /dev/null +++ b/test/Libraries/Microsoft.Extensions.AI.Tests/Video/ConfigureOptionsVideoGeneratorTests.cs @@ -0,0 +1,72 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System; +using System.Threading.Tasks; +using Xunit; + +namespace Microsoft.Extensions.AI; + +public class ConfigureOptionsVideoGeneratorTests +{ + [Fact] + public void InvalidArgs_Throws() + { + using var generator = new TestVideoGenerator(); + Assert.Throws("innerGenerator", () => new ConfigureOptionsVideoGenerator(null!, _ => { })); + Assert.Throws("configure", () => new ConfigureOptionsVideoGenerator(generator, null!)); + } + + [Fact] + public async Task ConfigureCallback_ReceivesClonedOptions() + { + var originalOptions = new VideoGenerationOptions { ModelId = "original-model" }; + VideoGenerationOptions? capturedOptions = null; + + using var inner = new TestVideoGenerator + { + GenerateVideosAsyncCallback = (request, options, ct) => + { + capturedOptions = options; + return Task.FromResult(new VideoGenerationResponse()); + } + }; + + using var configured = new ConfigureOptionsVideoGenerator(inner, opts => + { + opts.ModelId = "configured-model"; + }); + + await configured.GenerateAsync(new VideoGenerationRequest("Test"), originalOptions); + + Assert.NotNull(capturedOptions); + Assert.NotSame(originalOptions, capturedOptions); + Assert.Equal("configured-model", capturedOptions!.ModelId); + Assert.Equal("original-model", originalOptions.ModelId); // Original unchanged + } + + [Fact] + public async Task ConfigureCallback_WithNullOptions_CreatesNewInstance() + { + VideoGenerationOptions? capturedOptions = null; + + using var inner = new TestVideoGenerator + { + GenerateVideosAsyncCallback = (request, options, ct) => + { + capturedOptions = options; + return Task.FromResult(new VideoGenerationResponse()); + } + }; + + using var configured = new ConfigureOptionsVideoGenerator(inner, opts => + { + opts.ModelId = "new-model"; + }); + + await configured.GenerateAsync(new VideoGenerationRequest("Test"), null); + + Assert.NotNull(capturedOptions); + Assert.Equal("new-model", capturedOptions!.ModelId); + } +} diff --git a/test/Libraries/Microsoft.Extensions.AI.Tests/Video/LoggingVideoGeneratorTests.cs b/test/Libraries/Microsoft.Extensions.AI.Tests/Video/LoggingVideoGeneratorTests.cs new file mode 100644 index 00000000000..4192e7dc3f7 --- /dev/null +++ b/test/Libraries/Microsoft.Extensions.AI.Tests/Video/LoggingVideoGeneratorTests.cs @@ -0,0 +1,142 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System; +using System.Threading.Tasks; +using Microsoft.Extensions.AI; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Logging.Testing; +using Xunit; + +namespace Microsoft.Extensions.AI; + +public class LoggingVideoGeneratorTests +{ + [Fact] + public void LoggingVideoGenerator_InvalidArgs_Throws() + { + Assert.Throws("innerGenerator", () => new LoggingVideoGenerator(null!, NullLogger.Instance)); + Assert.Throws("logger", () => new LoggingVideoGenerator(new TestVideoGenerator(), null!)); + } + + [Fact] + public void UseLogging_AvoidsInjectingNopGenerator() + { + using var innerGenerator = new TestVideoGenerator(); + + Assert.Null(innerGenerator.AsBuilder().UseLogging(NullLoggerFactory.Instance).Build().GetService(typeof(LoggingVideoGenerator))); + Assert.Same(innerGenerator, innerGenerator.AsBuilder().UseLogging(NullLoggerFactory.Instance).Build().GetService(typeof(IVideoGenerator))); + + using var factory = LoggerFactory.Create(b => b.AddFakeLogging()); + Assert.NotNull(innerGenerator.AsBuilder().UseLogging(factory).Build().GetService(typeof(LoggingVideoGenerator))); + + ServiceCollection c = new(); + c.AddFakeLogging(); + var services = c.BuildServiceProvider(); + Assert.NotNull(innerGenerator.AsBuilder().UseLogging().Build(services).GetService(typeof(LoggingVideoGenerator))); + Assert.NotNull(innerGenerator.AsBuilder().UseLogging(null).Build(services).GetService(typeof(LoggingVideoGenerator))); + Assert.Null(innerGenerator.AsBuilder().UseLogging(NullLoggerFactory.Instance).Build(services).GetService(typeof(LoggingVideoGenerator))); + } + + [Theory] + [InlineData(LogLevel.Trace)] + [InlineData(LogLevel.Debug)] + [InlineData(LogLevel.Information)] + public async Task GenerateVideosAsync_LogsInvocationAndCompletion(LogLevel level) + { + var collector = new FakeLogCollector(); + + ServiceCollection c = new(); + c.AddLogging(b => b.AddProvider(new FakeLoggerProvider(collector)).SetMinimumLevel(level)); + var services = c.BuildServiceProvider(); + + using IVideoGenerator innerGenerator = new TestVideoGenerator + { + GenerateVideosAsyncCallback = (request, options, cancellationToken) => + { + return Task.FromResult(new VideoGenerationResponse()); + }, + }; + + using IVideoGenerator generator = innerGenerator + .AsBuilder() + .UseLogging() + .Build(services); + + await generator.GenerateAsync( + new VideoGenerationRequest("A beautiful sunset"), + new VideoGenerationOptions { ModelId = "sora" }); + + var logs = collector.GetSnapshot(); + if (level is LogLevel.Trace) + { + Assert.Collection(logs, + entry => Assert.True( + entry.Message.Contains($"{nameof(IVideoGenerator.GenerateAsync)} invoked:") && + entry.Message.Contains("A beautiful sunset") && + entry.Message.Contains("sora")), + entry => Assert.Contains($"{nameof(IVideoGenerator.GenerateAsync)} completed:", entry.Message)); + } + else if (level is LogLevel.Debug) + { + Assert.Collection(logs, + entry => Assert.True(entry.Message.Contains($"{nameof(IVideoGenerator.GenerateAsync)} invoked.") && !entry.Message.Contains("A beautiful sunset")), + entry => Assert.True(entry.Message.Contains($"{nameof(IVideoGenerator.GenerateAsync)} completed.") && !entry.Message.Contains("sora"))); + } + else + { + Assert.Empty(logs); + } + } + + [Theory] + [InlineData(LogLevel.Trace)] + [InlineData(LogLevel.Debug)] + [InlineData(LogLevel.Information)] + public async Task GenerateVideosAsync_WithOriginalMedia_LogsInvocationAndCompletion(LogLevel level) + { + var collector = new FakeLogCollector(); + using ILoggerFactory loggerFactory = LoggerFactory.Create(b => b.AddProvider(new FakeLoggerProvider(collector)).SetMinimumLevel(level)); + + using IVideoGenerator innerGenerator = new TestVideoGenerator + { + GenerateVideosAsyncCallback = (request, options, cancellationToken) => + { + return Task.FromResult(new VideoGenerationResponse()); + } + }; + + using IVideoGenerator generator = innerGenerator + .AsBuilder() + .UseLogging(loggerFactory) + .Build(); + + AIContent[] originalMedia = [new DataContent((byte[])[1, 2, 3, 4], "video/mp4")]; + await generator.GenerateAsync( + new VideoGenerationRequest("Make it more colorful", originalMedia), + new VideoGenerationOptions { ModelId = "sora" }); + + var logs = collector.GetSnapshot(); + if (level is LogLevel.Trace) + { + Assert.Collection(logs, + entry => Assert.True( + entry.Message.Contains($"{nameof(IVideoGenerator.GenerateAsync)} invoked:") && + entry.Message.Contains("Make it more colorful") && + entry.Message.Contains("sora")), + entry => Assert.Contains($"{nameof(IVideoGenerator.GenerateAsync)} completed", entry.Message)); + } + else if (level is LogLevel.Debug) + { + Assert.Collection(logs, + entry => Assert.True(entry.Message.Contains($"{nameof(IVideoGenerator.GenerateAsync)} invoked.") && !entry.Message.Contains("Make it more colorful")), + entry => Assert.True(entry.Message.Contains($"{nameof(IVideoGenerator.GenerateAsync)} completed.") && !entry.Message.Contains("sora"))); + } + else + { + Assert.Empty(logs); + } + } +} diff --git a/test/Libraries/Microsoft.Extensions.AI.Tests/Video/OpenTelemetryVideoGeneratorTests.cs b/test/Libraries/Microsoft.Extensions.AI.Tests/Video/OpenTelemetryVideoGeneratorTests.cs new file mode 100644 index 00000000000..951489f157f --- /dev/null +++ b/test/Libraries/Microsoft.Extensions.AI.Tests/Video/OpenTelemetryVideoGeneratorTests.cs @@ -0,0 +1,218 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Linq; +using System.Text.RegularExpressions; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Testing; +using OpenTelemetry.Trace; +using Xunit; + +namespace Microsoft.Extensions.AI; + +public class OpenTelemetryVideoGeneratorTests +{ + [Fact] + public void InvalidArgs_Throws() + { + Assert.Throws("innerGenerator", () => new OpenTelemetryVideoGenerator(null!)); + } + + [Theory] + [InlineData(false)] + [InlineData(true)] + public async Task ExpectedInformationLogged_Async(bool enableSensitiveData) + { + var sourceName = Guid.NewGuid().ToString(); + var activities = new List(); + using var tracerProvider = OpenTelemetry.Sdk.CreateTracerProviderBuilder() + .AddSource(sourceName) + .AddInMemoryExporter(activities) + .Build(); + + using var innerGenerator = new TestVideoGenerator + { + GenerateVideosAsyncCallback = async (request, options, cancellationToken) => + { + await Task.Yield(); + + return new() + { + Contents = + [ + new UriContent("http://example/output.mp4", "video/mp4"), + new DataContent(new byte[] { 1, 2, 3, 4 }, "video/mp4") { Name = "moreOutput.mp4" }, + ], + + Usage = new() + { + InputTokenCount = 10, + OutputTokenCount = 20, + TotalTokenCount = 30, + }, + }; + }, + + GetServiceCallback = (serviceType, serviceKey) => + serviceType == typeof(VideoGeneratorMetadata) ? new VideoGeneratorMetadata("testservice", new Uri("http://localhost:12345/something"), "amazingmodel") : + null, + }; + + using var g = innerGenerator + .AsBuilder() + .UseOpenTelemetry(null, sourceName, configure: instance => + { + instance.EnableSensitiveData = enableSensitiveData; + }) + .Build(); + + VideoGenerationRequest request = new() + { + Prompt = "This is the input prompt.", + OriginalMedia = [new UriContent("http://example/input.mp4", "video/mp4")], + }; + + VideoGenerationOptions options = new() + { + Count = 2, + VideoSize = new(1920, 1080), + Duration = TimeSpan.FromSeconds(10), + FramesPerSecond = 24, + MediaType = "video/mp4", + ModelId = "mycoolvideomodel", + AdditionalProperties = new() + { + ["service_tier"] = "value1", + ["SomethingElse"] = "value2", + }, + }; + + await g.GenerateAsync(request, options); + + var activity = Assert.Single(activities); + + Assert.NotNull(activity.Id); + Assert.NotEmpty(activity.Id); + + Assert.Equal("localhost", activity.GetTagItem("server.address")); + Assert.Equal(12345, (int)activity.GetTagItem("server.port")!); + + Assert.Equal("generate_content mycoolvideomodel", activity.DisplayName); + Assert.Equal("testservice", activity.GetTagItem("gen_ai.provider.name")); + + Assert.Equal("mycoolvideomodel", activity.GetTagItem("gen_ai.request.model")); + Assert.Equal(2, activity.GetTagItem("gen_ai.request.choice.count")); + Assert.Equal(1920, activity.GetTagItem("gen_ai.request.video.width")); + Assert.Equal(1080, activity.GetTagItem("gen_ai.request.video.height")); + Assert.Equal(10.0, activity.GetTagItem("gen_ai.request.video.duration")); + Assert.Equal(24, activity.GetTagItem("gen_ai.request.video.fps")); + Assert.Equal(enableSensitiveData ? "value1" : null, activity.GetTagItem("service_tier")); + Assert.Equal(enableSensitiveData ? "value2" : null, activity.GetTagItem("SomethingElse")); + + Assert.Equal(10, activity.GetTagItem("gen_ai.usage.input_tokens")); + Assert.Equal(20, activity.GetTagItem("gen_ai.usage.output_tokens")); + + Assert.True(activity.Duration.TotalMilliseconds > 0); + + var tags = activity.Tags.ToDictionary(kvp => kvp.Key, kvp => kvp.Value); + if (enableSensitiveData) + { + Assert.Equal(ReplaceWhitespace(""" + [ + { + "role": "user", + "parts": [ + { + "type": "text", + "content": "This is the input prompt." + }, + { + "type": "uri", + "uri": "http://example/input.mp4", + "mime_type": "video/mp4", + "modality": "video" + } + ] + } + ] + """), ReplaceWhitespace(tags["gen_ai.input.messages"])); + + Assert.Equal(ReplaceWhitespace(""" + [ + { + "role": "assistant", + "parts": [ + { + "type": "uri", + "uri": "http://example/output.mp4", + "mime_type": "video/mp4", + "modality": "video" + }, + { + "type": "blob", + "content": "AQIDBA==", + "mime_type": "video/mp4", + "modality": "video" + } + ] + } + ] + """), ReplaceWhitespace(tags["gen_ai.output.messages"])); + } + else + { + Assert.False(tags.ContainsKey("gen_ai.input.messages")); + Assert.False(tags.ContainsKey("gen_ai.output.messages")); + } + + static string ReplaceWhitespace(string? input) => Regex.Replace(input ?? "", @"\s+", " ").Trim(); + } + + [Fact] + public async Task ExceptionLogged_Async() + { + var sourceName = Guid.NewGuid().ToString(); + var activities = new List(); + using var tracerProvider = OpenTelemetry.Sdk.CreateTracerProviderBuilder() + .AddSource(sourceName) + .AddInMemoryExporter(activities) + .Build(); + + var collector = new FakeLogCollector(); + using var loggerFactory = LoggerFactory.Create(b => b.AddProvider(new FakeLoggerProvider(collector))); + + var expectedException = new InvalidOperationException("test exception message"); + + using var innerGenerator = new TestVideoGenerator + { + GenerateVideosAsyncCallback = (request, options, cancellationToken) => throw expectedException, + GetServiceCallback = (serviceType, serviceKey) => + serviceType == typeof(VideoGeneratorMetadata) ? new VideoGeneratorMetadata("testservice", new Uri("http://localhost:12345"), "testmodel") : + null, + }; + + using var g = innerGenerator + .AsBuilder() + .UseOpenTelemetry(loggerFactory, sourceName) + .Build(); + + await Assert.ThrowsAsync(() => + g.GenerateAsync(new VideoGenerationRequest { Prompt = "a cat video" })); + + var activity = Assert.Single(activities); + + // Existing error behavior is preserved + Assert.Equal(expectedException.GetType().FullName, activity.GetTagItem("error.type")); + Assert.Equal(ActivityStatusCode.Error, activity.Status); + + // Exception is logged via ILogger + var logEntry = Assert.Single(collector.GetSnapshot()); + Assert.Equal("gen_ai.client.operation.exception", logEntry.Id.Name); + Assert.Equal(LogLevel.Warning, logEntry.Level); + Assert.Same(expectedException, logEntry.Exception); + } +} diff --git a/test/Libraries/Microsoft.Extensions.AI.Tests/Video/SingletonVideoGeneratorExtensions.cs b/test/Libraries/Microsoft.Extensions.AI.Tests/Video/SingletonVideoGeneratorExtensions.cs new file mode 100644 index 00000000000..93f2905e73a --- /dev/null +++ b/test/Libraries/Microsoft.Extensions.AI.Tests/Video/SingletonVideoGeneratorExtensions.cs @@ -0,0 +1,11 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +namespace Microsoft.Extensions.AI; + +public static class SingletonVideoGeneratorExtensions +{ + public static VideoGeneratorBuilder UseSingletonMiddleware(this VideoGeneratorBuilder builder) + => builder.Use((inner, services) + => new VideoGeneratorDependencyInjectionPatterns.SingletonMiddleware(inner, services)); +} diff --git a/test/Libraries/Microsoft.Extensions.AI.Tests/Video/VideoGeneratorBuilderTests.cs b/test/Libraries/Microsoft.Extensions.AI.Tests/Video/VideoGeneratorBuilderTests.cs new file mode 100644 index 00000000000..dbc4608e3c7 --- /dev/null +++ b/test/Libraries/Microsoft.Extensions.AI.Tests/Video/VideoGeneratorBuilderTests.cs @@ -0,0 +1,103 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System; +using Microsoft.Extensions.DependencyInjection; +using Xunit; + +namespace Microsoft.Extensions.AI; + +public class VideoGeneratorBuilderTests +{ + [Fact] + public void PassesServiceProviderToFactories() + { + var expectedServiceProvider = new ServiceCollection().BuildServiceProvider(); + using TestVideoGenerator expectedInnerGenerator = new(); + using TestVideoGenerator expectedOuterGenerator = new(); + + var builder = new VideoGeneratorBuilder(services => + { + Assert.Same(expectedServiceProvider, services); + return expectedInnerGenerator; + }); + + builder.Use((innerGenerator, serviceProvider) => + { + Assert.Same(expectedServiceProvider, serviceProvider); + Assert.Same(expectedInnerGenerator, innerGenerator); + return expectedOuterGenerator; + }); + + Assert.Same(expectedOuterGenerator, builder.Build(expectedServiceProvider)); + } + + [Fact] + public void BuildsPipelineInOrderAdded() + { + using TestVideoGenerator expectedInnerGenerator = new(); + var builder = new VideoGeneratorBuilder(expectedInnerGenerator); + + builder.Use(next => new InnerGeneratorCapturingVideoGenerator("First", next)); + builder.Use(next => new InnerGeneratorCapturingVideoGenerator("Second", next)); + builder.Use(next => new InnerGeneratorCapturingVideoGenerator("Third", next)); + + var first = (InnerGeneratorCapturingVideoGenerator)builder.Build(); + + Assert.Equal("First", first.Name); + var second = (InnerGeneratorCapturingVideoGenerator)first.InnerGenerator; + Assert.Equal("Second", second.Name); + var third = (InnerGeneratorCapturingVideoGenerator)second.InnerGenerator; + Assert.Equal("Third", third.Name); + Assert.Same(expectedInnerGenerator, third.InnerGenerator); + } + + [Fact] + public void DoesNotAcceptNullInnerService() + { + Assert.Throws("innerGenerator", () => new VideoGeneratorBuilder((IVideoGenerator)null!)); + Assert.Throws("innerGenerator", () => ((IVideoGenerator)null!).AsBuilder()); + } + + [Fact] + public void DoesNotAcceptNullFactories() + { + Assert.Throws("innerGeneratorFactory", () => new VideoGeneratorBuilder((Func)null!)); + } + + [Fact] + public void DoesNotAllowFactoriesToReturnNull() + { + using var innerGenerator = new TestVideoGenerator(); + VideoGeneratorBuilder builder = new(innerGenerator); + builder.Use(_ => null!); + var ex = Assert.Throws(() => builder.Build()); + Assert.Contains("entry at index 0", ex.Message); + } + + [Fact] + public void UsesEmptyServiceProviderWhenNoServicesProvided() + { + using var innerGenerator = new TestVideoGenerator(); + VideoGeneratorBuilder builder = new(innerGenerator); + builder.Use((innerGenerator, serviceProvider) => + { + Assert.Null(serviceProvider.GetService(typeof(object))); + + var keyedServiceProvider = Assert.IsAssignableFrom(serviceProvider); + Assert.Null(keyedServiceProvider.GetKeyedService(typeof(object), "key")); + Assert.Throws(() => keyedServiceProvider.GetRequiredKeyedService(typeof(object), "key")); + + return innerGenerator; + }); + builder.Build(); + } + + private sealed class InnerGeneratorCapturingVideoGenerator(string name, IVideoGenerator innerGenerator) : DelegatingVideoGenerator(innerGenerator) + { +#pragma warning disable S3604 // False positive: Member initializer values should not be redundant + public string Name { get; } = name; +#pragma warning restore S3604 + public new IVideoGenerator InnerGenerator => base.InnerGenerator; + } +} diff --git a/test/Libraries/Microsoft.Extensions.AI.Tests/Video/VideoGeneratorDependencyInjectionPatterns.cs b/test/Libraries/Microsoft.Extensions.AI.Tests/Video/VideoGeneratorDependencyInjectionPatterns.cs new file mode 100644 index 00000000000..da9fe96150b --- /dev/null +++ b/test/Libraries/Microsoft.Extensions.AI.Tests/Video/VideoGeneratorDependencyInjectionPatterns.cs @@ -0,0 +1,178 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System; +using Microsoft.Extensions.DependencyInjection; +using Xunit; + +namespace Microsoft.Extensions.AI; + +public class VideoGeneratorDependencyInjectionPatterns +{ + private IServiceCollection ServiceCollection { get; } = new ServiceCollection(); + + [Fact] + public void CanRegisterSingletonUsingFactory() + { + // Arrange/Act + ServiceCollection.AddVideoGenerator(services => new TestVideoGenerator { Services = services }) + .UseSingletonMiddleware(); + + // Assert + var services = ServiceCollection.BuildServiceProvider(); + using var scope1 = services.CreateScope(); + using var scope2 = services.CreateScope(); + + var instance1 = scope1.ServiceProvider.GetRequiredService(); + var instance1Copy = scope1.ServiceProvider.GetRequiredService(); + var instance2 = scope2.ServiceProvider.GetRequiredService(); + + // Each scope gets the same instance, because it's singleton + var instance = Assert.IsType(instance1); + Assert.Same(instance, instance1Copy); + Assert.Same(instance, instance2); + Assert.IsType(instance.InnerGenerator); + } + + [Fact] + public void CanRegisterSingletonUsingSharedInstance() + { + // Arrange/Act + using var singleton = new TestVideoGenerator(); + ServiceCollection.AddVideoGenerator(singleton) + .UseSingletonMiddleware(); + + // Assert + var services = ServiceCollection.BuildServiceProvider(); + using var scope1 = services.CreateScope(); + using var scope2 = services.CreateScope(); + + var instance1 = scope1.ServiceProvider.GetRequiredService(); + var instance1Copy = scope1.ServiceProvider.GetRequiredService(); + var instance2 = scope2.ServiceProvider.GetRequiredService(); + + // Each scope gets the same instance, because it's singleton + var instance = Assert.IsType(instance1); + Assert.Same(instance, instance1Copy); + Assert.Same(instance, instance2); + Assert.IsType(instance.InnerGenerator); + } + + [Fact] + public void CanRegisterKeyedSingletonUsingFactory() + { + // Arrange/Act + ServiceCollection.AddKeyedVideoGenerator("mykey", services => new TestVideoGenerator { Services = services }) + .UseSingletonMiddleware(); + + // Assert + var services = ServiceCollection.BuildServiceProvider(); + using var scope1 = services.CreateScope(); + using var scope2 = services.CreateScope(); + + Assert.Null(services.GetService()); + + var instance1 = scope1.ServiceProvider.GetRequiredKeyedService("mykey"); + var instance1Copy = scope1.ServiceProvider.GetRequiredKeyedService("mykey"); + var instance2 = scope2.ServiceProvider.GetRequiredKeyedService("mykey"); + + // Each scope gets the same instance, because it's singleton + var instance = Assert.IsType(instance1); + Assert.Same(instance, instance1Copy); + Assert.Same(instance, instance2); + Assert.IsType(instance.InnerGenerator); + } + + [Fact] + public void CanRegisterKeyedSingletonUsingSharedInstance() + { + // Arrange/Act + using var singleton = new TestVideoGenerator(); + ServiceCollection.AddKeyedVideoGenerator("mykey", singleton) + .UseSingletonMiddleware(); + + // Assert + var services = ServiceCollection.BuildServiceProvider(); + using var scope1 = services.CreateScope(); + using var scope2 = services.CreateScope(); + + Assert.Null(services.GetService()); + + var instance1 = scope1.ServiceProvider.GetRequiredKeyedService("mykey"); + var instance1Copy = scope1.ServiceProvider.GetRequiredKeyedService("mykey"); + var instance2 = scope2.ServiceProvider.GetRequiredKeyedService("mykey"); + + // Each scope gets the same instance, because it's singleton + var instance = Assert.IsType(instance1); + Assert.Same(instance, instance1Copy); + Assert.Same(instance, instance2); + Assert.IsType(instance.InnerGenerator); + } + + [Theory] + [InlineData(null)] + [InlineData(ServiceLifetime.Singleton)] + [InlineData(ServiceLifetime.Scoped)] + [InlineData(ServiceLifetime.Transient)] + public void AddVideoGenerator_RegistersExpectedLifetime(ServiceLifetime? lifetime) + { + ServiceCollection sc = new(); + ServiceLifetime expectedLifetime = lifetime ?? ServiceLifetime.Singleton; + VideoGeneratorBuilder builder = lifetime.HasValue + ? sc.AddVideoGenerator(services => new TestVideoGenerator(), lifetime.Value) + : sc.AddVideoGenerator(services => new TestVideoGenerator()); + + ServiceDescriptor sd = Assert.Single(sc); + Assert.Equal(typeof(IVideoGenerator), sd.ServiceType); + Assert.False(sd.IsKeyedService); + Assert.Null(sd.ImplementationInstance); + Assert.NotNull(sd.ImplementationFactory); + Assert.IsType(sd.ImplementationFactory(null!)); + Assert.Equal(expectedLifetime, sd.Lifetime); + } + + [Theory] + [InlineData(null)] + [InlineData(ServiceLifetime.Singleton)] + [InlineData(ServiceLifetime.Scoped)] + [InlineData(ServiceLifetime.Transient)] + public void AddKeyedVideoGenerator_RegistersExpectedLifetime(ServiceLifetime? lifetime) + { + ServiceCollection sc = new(); + ServiceLifetime expectedLifetime = lifetime ?? ServiceLifetime.Singleton; + VideoGeneratorBuilder builder = lifetime.HasValue + ? sc.AddKeyedVideoGenerator("key", services => new TestVideoGenerator(), lifetime.Value) + : sc.AddKeyedVideoGenerator("key", services => new TestVideoGenerator()); + + ServiceDescriptor sd = Assert.Single(sc); + Assert.Equal(typeof(IVideoGenerator), sd.ServiceType); + Assert.True(sd.IsKeyedService); + Assert.Equal("key", sd.ServiceKey); + Assert.Null(sd.KeyedImplementationInstance); + Assert.NotNull(sd.KeyedImplementationFactory); + Assert.IsType(sd.KeyedImplementationFactory(null!, null!)); + Assert.Equal(expectedLifetime, sd.Lifetime); + } + + [Fact] + public void AddKeyedVideoGenerator_WorksWithNullServiceKey() + { + ServiceCollection sc = new(); + sc.AddKeyedVideoGenerator(null, _ => new TestVideoGenerator()); + + ServiceDescriptor sd = Assert.Single(sc); + Assert.Equal(typeof(IVideoGenerator), sd.ServiceType); + Assert.False(sd.IsKeyedService); + Assert.Null(sd.ServiceKey); + Assert.Null(sd.ImplementationInstance); + Assert.NotNull(sd.ImplementationFactory); + Assert.IsType(sd.ImplementationFactory(null!)); + Assert.Equal(ServiceLifetime.Singleton, sd.Lifetime); + } + + public class SingletonMiddleware(IVideoGenerator inner, IServiceProvider services) : DelegatingVideoGenerator(inner) + { + public new IVideoGenerator InnerGenerator => base.InnerGenerator; + public IServiceProvider Services => services; + } +} From 96e3aa96d1588138e296a441cff23d37b2bc72f9 Mon Sep 17 00:00:00 2001 From: "Eric St. John" Date: Fri, 20 Mar 2026 09:22:58 -0700 Subject: [PATCH 02/10] Address feedback --- .../Video/HostedVideoGenerationTool.cs | 45 -- .../Video/VideoGenerationResponse.cs | 6 + .../Video/VideoGenerationToolResultContent.cs | 37 -- .../OpenAIVideoGenerator.cs | 106 ++-- .../VideoGeneratingChatClient.cs | 506 ------------------ ...eoGeneratingChatClientBuilderExtensions.cs | 47 -- ...ratorBuilderServiceCollectionExtensions.cs | 8 +- 7 files changed, 82 insertions(+), 673 deletions(-) delete mode 100644 src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/HostedVideoGenerationTool.cs delete mode 100644 src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGenerationToolResultContent.cs delete mode 100644 src/Libraries/Microsoft.Extensions.AI/ChatCompletion/VideoGeneratingChatClient.cs delete mode 100644 src/Libraries/Microsoft.Extensions.AI/ChatCompletion/VideoGeneratingChatClientBuilderExtensions.cs diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/HostedVideoGenerationTool.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/HostedVideoGenerationTool.cs deleted file mode 100644 index 910814f5c1e..00000000000 --- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/HostedVideoGenerationTool.cs +++ /dev/null @@ -1,45 +0,0 @@ -// Licensed to the .NET Foundation under one or more agreements. -// The .NET Foundation licenses this file to you under the MIT license. - -using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; -using Microsoft.Shared.DiagnosticIds; - -namespace Microsoft.Extensions.AI; - -/// Represents a hosted tool that can be specified to an AI service to enable it to perform video generation. -/// -/// This tool does not itself implement video generation. It is a marker that can be used to inform a service -/// that the service is allowed to perform video generation if the service is capable of doing so. -/// -[Experimental(DiagnosticIds.Experiments.AIVideoGeneration, UrlFormat = DiagnosticIds.UrlFormat)] -public class HostedVideoGenerationTool : AITool -{ - /// Any additional properties associated with the tool. - private IReadOnlyDictionary? _additionalProperties; - - /// - /// Initializes a new instance of the class with the specified options. - /// - public HostedVideoGenerationTool() - { - } - - /// Initializes a new instance of the class. - /// Any additional properties associated with the tool. - public HostedVideoGenerationTool(IReadOnlyDictionary? additionalProperties) - { - _additionalProperties = additionalProperties; - } - - /// - public override string Name => "video_generation"; - - /// - public override IReadOnlyDictionary AdditionalProperties => _additionalProperties ?? base.AdditionalProperties; - - /// - /// Gets or sets the options used to configure video generation. - /// - public VideoGenerationOptions? Options { get; set; } -} diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGenerationResponse.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGenerationResponse.cs index 471888ad4fb..d3febd2bb4b 100644 --- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGenerationResponse.cs +++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGenerationResponse.cs @@ -50,4 +50,10 @@ public IList Contents /// Gets or sets usage details for the video generation response. public UsageDetails? Usage { get; set; } + + /// Gets or sets the model ID used to generate the video. + public string? ModelId { get; set; } + + /// Gets or sets any additional properties associated with the response. + public AdditionalPropertiesDictionary? AdditionalProperties { get; set; } } diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGenerationToolResultContent.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGenerationToolResultContent.cs deleted file mode 100644 index 3679d7de174..00000000000 --- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGenerationToolResultContent.cs +++ /dev/null @@ -1,37 +0,0 @@ -// Licensed to the .NET Foundation under one or more agreements. -// The .NET Foundation licenses this file to you under the MIT license. - -using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; -using Microsoft.Shared.DiagnosticIds; - -namespace Microsoft.Extensions.AI; - -/// -/// Represents a video generation tool call invocation by a hosted service. -/// -/// -/// This content type represents when a hosted AI service invokes a video generation tool. -/// It is informational only and represents the call itself, not the result. -/// -[Experimental(DiagnosticIds.Experiments.AIVideoGeneration, UrlFormat = DiagnosticIds.UrlFormat)] -public sealed class VideoGenerationToolResultContent : ToolResultContent -{ - /// - /// Initializes a new instance of the class. - /// - /// The tool call ID. - public VideoGenerationToolResultContent(string callId) - : base(callId) - { - } - - /// - /// Gets or sets the generated content items. - /// - /// - /// Content is typically for videos streamed from the tool, or for remotely hosted videos, but - /// can also be provider-specific content types that represent the generated videos. - /// - public IList? Outputs { get; set; } -} diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIVideoGenerator.cs b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIVideoGenerator.cs index 9f279f3ec27..90aa8d33130 100644 --- a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIVideoGenerator.cs +++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIVideoGenerator.cs @@ -8,7 +8,6 @@ using System.Diagnostics.CodeAnalysis; using System.Drawing; using System.IO; -using System.Linq; using System.Text; using System.Text.Json; using System.Text.Json.Nodes; @@ -128,19 +127,27 @@ public async Task GenerateAsync( if (editVideoId is null && extendVideoId is null && request.OriginalMedia is { } originalMedia) { - AIContent? firstMedia = originalMedia.FirstOrDefault(); - if (firstMedia is DataContent dc && - IsVideoMediaType(dc.MediaType) && dc.Data.Length > 0) + foreach (AIContent media in originalMedia) { - videoEditContent = dc; - } - else if (firstMedia is UriContent uc) - { - imageReferenceUri = uc; - } - else if (firstMedia is DataContent imgDc && imgDc.Data.Length > 0) - { - imageReferenceData = imgDc; + if (media is DataContent dc && dc.Data.Length > 0) + { + if (IsVideoMediaType(dc.MediaType)) + { + videoEditContent = dc; + } + else if (IsImageMediaType(dc.MediaType)) + { + imageReferenceData = dc; + } + + break; + } + + if (media is UriContent uc && IsImageMediaType(uc.MediaType)) + { + imageReferenceUri = uc; + break; + } } } @@ -159,15 +166,12 @@ public async Task GenerateAsync( if (options?.Duration is TimeSpan extDuration) { -#pragma warning disable LA0002 - body["seconds"] = ((int)extDuration.TotalSeconds) - .ToString(System.Globalization.CultureInfo.InvariantCulture); -#pragma warning restore LA0002 + body["seconds"] = (int)extDuration.TotalSeconds; } ForwardAdditionalProperties(body, options); using BinaryContent extendContent = BinaryContent.Create( - new BinaryData(body.ToJsonString())); + SerializeJsonToUtf8(body)); using PipelineMessage extendMsg = CreatePipelineRequest( _videoClient, "/videos/extensions", extendContent, "application/json", reqOpts); @@ -185,7 +189,7 @@ public async Task GenerateAsync( ForwardAdditionalProperties(body, options); using BinaryContent editContent = BinaryContent.Create( - new BinaryData(body.ToJsonString())); + SerializeJsonToUtf8(body)); using PipelineMessage editMsg = CreatePipelineRequest( _videoClient, "/videos/edits", editContent, "application/json", reqOpts); @@ -227,10 +231,7 @@ public async Task GenerateAsync( if (options?.Duration is TimeSpan duration) { -#pragma warning disable LA0002 - requestBody["seconds"] = ((int)duration.TotalSeconds) - .ToString(System.Globalization.CultureInfo.InvariantCulture); -#pragma warning restore LA0002 + requestBody["seconds"] = (int)duration.TotalSeconds; } if (options?.Count is int count && count > 1) @@ -260,7 +261,7 @@ public async Task GenerateAsync( else { using BinaryContent content = BinaryContent.Create( - new BinaryData(requestBody.ToJsonString())); + SerializeJsonToUtf8(requestBody)); createResult = await _videoClient.CreateVideoAsync( content, "application/json", reqOpts).ConfigureAwait(false); } @@ -306,15 +307,28 @@ await Task.Delay( errorMessage ?? "Video generation failed."); } - // Download the completed video content - var dlOpts = new RequestOptions { CancellationToken = cancellationToken }; - ClientResult downloadResult = await _videoClient.DownloadVideoAsync( - videoId, options: dlOpts).ConfigureAwait(false); - BinaryData videoData = downloadResult.GetRawResponse().Content; - + // Honor the requested response format. string contentType = options?.MediaType ?? "video/mp4"; - List contents = - [new DataContent(videoData.ToMemory(), contentType)]; + List contents; + + if (options?.ResponseFormat is VideoGenerationResponseFormat.Uri or + VideoGenerationResponseFormat.Hosted) + { + // Return a URI pointing to the video content endpoint without downloading + // the potentially large video blob. + string baseUrl = _videoClient.Endpoint.ToString().TrimEnd('/'); + var videoUri = new Uri($"{baseUrl}/videos/{videoId}/content"); + contents = [new UriContent(videoUri, contentType)]; + } + else + { + // Download the completed video content. + var dlOpts = new RequestOptions { CancellationToken = cancellationToken }; + ClientResult downloadResult = await _videoClient.DownloadVideoAsync( + videoId, options: dlOpts).ConfigureAwait(false); + BinaryData videoData = downloadResult.GetRawResponse().Content; + contents = [new DataContent(videoData.ToMemory(), contentType)]; + } return new VideoGenerationResponse(contents); } @@ -342,7 +356,7 @@ private static PipelineMessage CreatePipelineRequest( string baseUrl = videoClient.Endpoint.ToString().TrimEnd('/'); Uri uri = new($"{baseUrl}{path}"); PipelineMessageClassifier classifier = PipelineMessageClassifier.Create( - stackalloc ushort[] { 200 }); + stackalloc ushort[] { 200, 201, 202 }); PipelineMessage message = videoClient.Pipeline.CreateMessage( uri, "POST", classifier); message.Request.Headers.Set("Content-Type", contentType); @@ -363,6 +377,12 @@ private static bool IsVideoMediaType(string? mediaType) => mediaType is not null && mediaType.StartsWith("video/", StringComparison.OrdinalIgnoreCase); + /// Determines whether the given media type represents an image format. + /// Treats or unspecified media types as images for backward compatibility. + private static bool IsImageMediaType(string? mediaType) => + mediaType is null || + mediaType.StartsWith("image/", StringComparison.OrdinalIgnoreCase); + /// Determines whether the given key is a routing key consumed by this generator. private static bool IsRoutingKey(string key) => string.Equals(key, EditVideoIdKey, StringComparison.OrdinalIgnoreCase) || @@ -398,6 +418,18 @@ private static bool IsTerminalStatus(string status) => ? val : null; + /// Serializes a to UTF-8 bytes without an intermediate string allocation. + private static BinaryData SerializeJsonToUtf8(JsonObject body) + { + using var ms = new MemoryStream(); + using (var writer = new Utf8JsonWriter(ms)) + { + body.WriteTo(writer); + } + + return new BinaryData(ms.ToArray()); + } + /// Builds a multipart/form-data body containing the form fields and a file part. private static BinaryContent BuildMultipartContent( JsonObject formFields, @@ -417,7 +449,13 @@ private static BinaryContent BuildMultipartContent( continue; } - WriteFormField(ms, boundary, prop.Key, prop.Value.ToString()); + string fieldValue = + prop.Value is JsonValue jsonValue && + jsonValue.TryGetValue(out string? stringValue) + ? stringValue + : prop.Value.ToString(); + + WriteFormField(ms, boundary, prop.Key, fieldValue); } string fileName = fileContent.Name ?? filePartName; diff --git a/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/VideoGeneratingChatClient.cs b/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/VideoGeneratingChatClient.cs deleted file mode 100644 index 631307582a2..00000000000 --- a/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/VideoGeneratingChatClient.cs +++ /dev/null @@ -1,506 +0,0 @@ -// Licensed to the .NET Foundation under one or more agreements. -// The .NET Foundation licenses this file to you under the MIT license. - -using System; -using System.Collections.Generic; -using System.ComponentModel; -using System.Diagnostics.CodeAnalysis; -using System.Linq; -using System.Runtime.CompilerServices; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Shared.DiagnosticIds; -using Microsoft.Shared.Diagnostics; - -namespace Microsoft.Extensions.AI; - -/// A delegating chat client that enables video generation capabilities by converting instances to function tools. -/// -/// -/// The provided implementation of is thread-safe for concurrent use so long as the -/// employed is also thread-safe for concurrent use. -/// -/// -/// This client automatically detects instances in the collection -/// and replaces them with equivalent function tools that the chat client can invoke to perform video generation and editing operations. -/// -/// -[Experimental(DiagnosticIds.Experiments.AIVideoGeneration, UrlFormat = DiagnosticIds.UrlFormat)] -public sealed class VideoGeneratingChatClient : DelegatingChatClient -{ - /// - /// Specifies how video and other data content is handled when passing data to an inner client. - /// - /// - /// Use this enumeration to control whether videos in the data content are passed as-is, replaced - /// with unique identifiers, or only generated videos are replaced. This setting affects how downstream clients - /// receive and process video data. - /// Reducing what's passed downstream can help manage the context window. - /// - public enum DataContentHandling - { - /// Pass all DataContent to inner client. - None, - - /// Replace all videos with unique identifiers when passing to inner client. - AllVideos, - - /// Replace only videos that were produced by past video generation requests with unique identifiers when passing to inner client. - GeneratedVideos - } - - private const string VideoKey = "meai_video"; - - private readonly IVideoGenerator _videoGenerator; - private readonly DataContentHandling _dataContentHandling; - - /// Initializes a new instance of the class. - /// The underlying . - /// An instance that will be used for video generation operations. - /// Specifies how to handle instances when passing messages to the inner client. - /// The default is . - /// or is . - public VideoGeneratingChatClient(IChatClient innerClient, IVideoGenerator videoGenerator, DataContentHandling dataContentHandling = DataContentHandling.AllVideos) - : base(innerClient) - { - _videoGenerator = Throw.IfNull(videoGenerator); - _dataContentHandling = dataContentHandling; - } - - /// - public override async Task GetResponseAsync( - IEnumerable messages, ChatOptions? options = null, CancellationToken cancellationToken = default) - { - _ = Throw.IfNull(messages); - - var requestState = new RequestState(_videoGenerator, _dataContentHandling); - - // Process the chat options to replace HostedVideoGenerationTool with functions - var processedOptions = requestState.ProcessChatOptions(options); - var processedMessages = requestState.ProcessChatMessages(messages); - - // Get response from base implementation - var response = await base.GetResponseAsync(processedMessages, processedOptions, cancellationToken); - - // Replace FunctionResultContent instances with generated video content - foreach (var message in response.Messages) - { - message.Contents = requestState.ReplaceVideoGenerationFunctionResults(message.Contents); - } - - return response; - } - - /// - public override async IAsyncEnumerable GetStreamingResponseAsync( - IEnumerable messages, ChatOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) - { - _ = Throw.IfNull(messages); - - var requestState = new RequestState(_videoGenerator, _dataContentHandling); - - // Process the chat options to replace HostedVideoGenerationTool with functions - var processedOptions = requestState.ProcessChatOptions(options); - var processedMessages = requestState.ProcessChatMessages(messages); - - await foreach (var update in base.GetStreamingResponseAsync(processedMessages, processedOptions, cancellationToken)) - { - // Replace any FunctionResultContent instances with generated video content - var newContents = requestState.ReplaceVideoGenerationFunctionResults(update.Contents); - - if (!ReferenceEquals(newContents, update.Contents)) - { - // Create a new update instance with modified contents - var modifiedUpdate = update.Clone(); - modifiedUpdate.Contents = newContents; - yield return modifiedUpdate; - } - else - { - yield return update; - } - } - } - - /// Provides a mechanism for releasing unmanaged resources. - /// to dispose managed resources; otherwise, . - protected override void Dispose(bool disposing) - { - if (disposing) - { - _videoGenerator.Dispose(); - } - - base.Dispose(disposing); - } - - /// - /// Contains all the per-request state and methods for handling video generation requests. - /// This class is created fresh for each request to ensure thread safety. - /// This class is not exposed publicly and does not own any of it's resources. - /// - private sealed class RequestState - { - private readonly IVideoGenerator _videoGenerator; - private readonly DataContentHandling _dataContentHandling; - private readonly HashSet _toolNames = new(StringComparer.Ordinal); - private readonly Dictionary> _videoContentByCallId = []; - private readonly Dictionary _videoContentById = new(StringComparer.OrdinalIgnoreCase); - private VideoGenerationOptions? _videoGenerationOptions; - - public RequestState(IVideoGenerator videoGenerator, DataContentHandling dataContentHandling) - { - _videoGenerator = videoGenerator; - _dataContentHandling = dataContentHandling; - } - - /// - /// Processes the chat messages to replace videos in data content with unique identifiers as needed. - /// All videos will be stored for later retrieval during video editing operations. - /// See for details on video replacement behavior. - /// - /// Messages to process. - /// Processed messages, or the original messages if no changes were made. - public IEnumerable ProcessChatMessages(IEnumerable messages) - { - List? newMessages = null; - int messageIndex = 0; - foreach (var message in messages) - { - List? newContents = null; - for (int contentIndex = 0; contentIndex < message.Contents.Count; contentIndex++) - { - var content = message.Contents[contentIndex]; - - void ReplaceVideo(string videoId, DataContent dataContent) - { - // Replace video with a placeholder text content, to give an indication to the model of its placement in the context - newContents ??= CopyList(message.Contents, contentIndex); - newContents.Add(new TextContent($"[{VideoKey}:{videoId}] available for edit.") - { - Annotations = dataContent.Annotations, - AdditionalProperties = dataContent.AdditionalProperties - }); - } - - if (content is DataContent dataContent && dataContent.HasTopLevelMediaType("video")) - { - // Store the video to make available for edit - var videoId = StoreVideo(dataContent); - - if (_dataContentHandling == DataContentHandling.AllVideos) - { - ReplaceVideo(videoId, dataContent); - continue; // Skip adding the original content - } - } - else if (content is VideoGenerationToolResultContent toolResultContent) - { - foreach (var output in toolResultContent.Outputs ?? []) - { - if (output is DataContent generatedDataContent && generatedDataContent.HasTopLevelMediaType("video")) - { - // Store the video to make available for edit - var videoId = StoreVideo(generatedDataContent, isGenerated: true); - - if (_dataContentHandling == DataContentHandling.AllVideos || - _dataContentHandling == DataContentHandling.GeneratedVideos) - { - ReplaceVideo(videoId, generatedDataContent); - } - } - } - - if (_dataContentHandling == DataContentHandling.AllVideos || - _dataContentHandling == DataContentHandling.GeneratedVideos) - { - // skip adding the generated content - continue; - } - } - - // Add the original content if no replacement was made - newContents?.Add(content); - } - - if (newContents != null) - { - newMessages ??= [.. messages.Take(messageIndex)]; - var newMessage = message.Clone(); - newMessage.Contents = newContents; - newMessages.Add(newMessage); - } - else - { - newMessages?.Add(message); - } - - messageIndex++; - } - - return newMessages ?? messages; - } - - public ChatOptions? ProcessChatOptions(ChatOptions? options) - { - if (options?.Tools is null || options.Tools.Count == 0) - { - return options; - } - - List? newTools = null; - var tools = options.Tools; - for (int i = 0; i < tools.Count; i++) - { - var tool = tools[i]; - - // remove all instances of HostedVideoGenerationTool and store the options from the last one - if (tool is HostedVideoGenerationTool videoGenerationTool) - { - _videoGenerationOptions = videoGenerationTool.Options; - - // for the first video generation tool, clone the options and insert our function tools - // remove any subsequent video generation tools - newTools ??= InitializeTools(tools, i); - } - else - { - newTools?.Add(tool); - } - } - - if (newTools is not null) - { - var newOptions = options.Clone(); - newOptions.Tools = newTools; - return newOptions; - } - - return options; - - List InitializeTools(IList existingTools, int toOffsetExclusive) - { -#if NET - ReadOnlySpan tools = -#else - AITool[] tools = -#endif - [ - AIFunctionFactory.Create(GenerateVideoAsync), - AIFunctionFactory.Create(EditVideoAsync), - AIFunctionFactory.Create(GetVideosForEdit) - ]; - - foreach (var tool in tools) - { - _toolNames.Add(tool.Name); - } - - var result = CopyList(existingTools, toOffsetExclusive, tools.Length); - result.AddRange(tools); - return result; - } - } - - /// - /// Replaces FunctionResultContent instances for video generation functions with actual generated video content. - /// - /// The list of AI content to process. - public IList ReplaceVideoGenerationFunctionResults(IList contents) - { - List? newContents = null; - - // Replace FunctionResultContent instances with generated video content - for (int i = contents.Count - 1; i >= 0; i--) - { - var content = contents[i]; - - // We must lookup by name because in the streaming case we have not yet been called to record the CallId. - if (content is FunctionCallContent functionCall && - _toolNames.Contains(functionCall.Name)) - { - // create a new list and omit the FunctionCallContent - newContents ??= CopyList(contents, i); - - if (functionCall.Name != nameof(GetVideosForEdit)) - { - newContents.Add(new VideoGenerationToolCallContent(functionCall.CallId)); - } - } - else if (content is FunctionResultContent functionResult && - _videoContentByCallId.TryGetValue(functionResult.CallId, out var videoContents)) - { - newContents ??= CopyList(contents, i); - - if (videoContents.Any()) - { - // Insert VideoGenerationToolResultContent in its place, do not preserve the FunctionResultContent - newContents.Add(new VideoGenerationToolResultContent(functionResult.CallId) - { - Outputs = videoContents - }); - } - - // Remove the mapping as it's no longer needed - _ = _videoContentByCallId.Remove(functionResult.CallId); - } - else - { - // keep the existing content if we have a new list - newContents?.Add(content); - } - } - - return newContents ?? contents; - } - - [Description("Generates videos based on a text description.")] - public async Task GenerateVideoAsync( - [Description("A detailed description of the video to generate")] string prompt, - CancellationToken cancellationToken = default) - { - // Get the call ID from the current function invocation context - var callId = FunctionInvokingChatClient.CurrentContext?.CallContent.CallId; - if (callId == null) - { - return "No call ID available for video generation."; - } - - var request = new VideoGenerationRequest(prompt); - var options = _videoGenerationOptions ?? new VideoGenerationOptions(); - options.Count ??= 1; - - var response = await _videoGenerator.GenerateAsync(request, options, cancellationToken: cancellationToken); - - if (response.Contents.Count == 0) - { - return "No video was generated."; - } - - List videoIds = []; - List videoContents = _videoContentByCallId[callId] = []; - foreach (var content in response.Contents) - { - if (content is DataContent videoContent && videoContent.MediaType.StartsWith("video/", StringComparison.OrdinalIgnoreCase)) - { - videoContents.Add(videoContent); - videoIds.Add(StoreVideo(videoContent, true)); - } - } - - return "Generated video successfully."; - } - - [Description("Lists the identifiers of all videos available for edit.")] - public IEnumerable GetVideosForEdit() - { - // Get the call ID from the current function invocation context - var callId = FunctionInvokingChatClient.CurrentContext?.CallContent.CallId; - if (callId == null) - { - return ["No call ID available for video editing."]; - } - - _videoContentByCallId[callId] = []; - - return _videoContentById.Keys.AsEnumerable(); - } - - [Description("Edits an existing video based on a text description.")] - public async Task EditVideoAsync( - [Description("A detailed description of the video to generate")] string prompt, - [Description($"The video to edit from one of the available video identifiers returned by {nameof(GetVideosForEdit)}")] string videoId, - CancellationToken cancellationToken = default) - { - // Get the call ID from the current function invocation context - var callId = FunctionInvokingChatClient.CurrentContext?.CallContent.CallId; - if (callId == null) - { - return "No call ID available for video editing."; - } - - if (string.IsNullOrEmpty(videoId)) - { - return "No videoId provided"; - } - - try - { - var originalVideo = RetrieveVideoContent(videoId); - if (originalVideo == null) - { - return $"No video found with: {videoId}"; - } - - var request = new VideoGenerationRequest(prompt, [originalVideo]); - var response = await _videoGenerator.GenerateAsync(request, _videoGenerationOptions, cancellationToken: cancellationToken); - - if (response.Contents.Count == 0) - { - return "No edited video was generated."; - } - - List videoIds = []; - List videoContents = _videoContentByCallId[callId] = []; - foreach (var content in response.Contents) - { - if (content is DataContent videoContent && videoContent.MediaType.StartsWith("video/", StringComparison.OrdinalIgnoreCase)) - { - videoContents.Add(videoContent); - videoIds.Add(StoreVideo(videoContent, true)); - } - } - - return "Edited video successfully."; - } - catch (FormatException) - { - return "Invalid video data format. Please provide a valid base64-encoded video."; - } - } - - private static List CopyList(IList original, int toOffsetExclusive, int additionalCapacity = 0) - { - var newList = new List(original.Count + additionalCapacity); - - // Copy all items up to and excluding the current index - for (int j = 0; j < toOffsetExclusive; j++) - { - newList.Add(original[j]); - } - - return newList; - } - - private DataContent? RetrieveVideoContent(string videoId) - { - if (_videoContentById.TryGetValue(videoId, out var videoContent)) - { - return videoContent as DataContent; - } - - return null; - } - - private string StoreVideo(DataContent videoContent, bool isGenerated = false) - { - // Generate a unique ID for the video if it doesn't have one - string? videoId = null; - if (videoContent.AdditionalProperties?.TryGetValue(VideoKey, out videoId) is false || videoId is null) - { - videoId = videoContent.Name ?? Guid.NewGuid().ToString(); - } - - if (isGenerated) - { - videoContent.AdditionalProperties ??= []; - videoContent.AdditionalProperties[VideoKey] = videoId; - } - - // Store the video content for later retrieval - _videoContentById[videoId] = videoContent; - - return videoId; - } - } -} diff --git a/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/VideoGeneratingChatClientBuilderExtensions.cs b/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/VideoGeneratingChatClientBuilderExtensions.cs deleted file mode 100644 index 1fe30653e58..00000000000 --- a/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/VideoGeneratingChatClientBuilderExtensions.cs +++ /dev/null @@ -1,47 +0,0 @@ -// Licensed to the .NET Foundation under one or more agreements. -// The .NET Foundation licenses this file to you under the MIT license. - -using System; -using System.Diagnostics.CodeAnalysis; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Shared.DiagnosticIds; -using Microsoft.Shared.Diagnostics; - -namespace Microsoft.Extensions.AI; - -/// Provides extensions for configuring instances. -[Experimental(DiagnosticIds.Experiments.AIVideoGeneration, UrlFormat = DiagnosticIds.UrlFormat)] -public static class VideoGeneratingChatClientBuilderExtensions -{ - /// Adds video generation capabilities to the chat client pipeline. - /// The . - /// - /// An optional used for video generation operations. - /// If not supplied, a required instance will be resolved from the service provider. - /// - /// An optional callback that can be used to configure the instance. - /// The . - /// is . - /// - /// - /// This method enables the chat client to handle instances by converting them - /// into function tools that can be invoked by the underlying chat model to perform video generation and editing operations. - /// - /// - public static ChatClientBuilder UseVideoGeneration( - this ChatClientBuilder builder, - IVideoGenerator? videoGenerator = null, - Action? configure = null) - { - _ = Throw.IfNull(builder); - - return builder.Use((innerClient, services) => - { - videoGenerator ??= services.GetRequiredService(); - - var chatClient = new VideoGeneratingChatClient(innerClient, videoGenerator); - configure?.Invoke(chatClient); - return chatClient; - }); - } -} diff --git a/src/Libraries/Microsoft.Extensions.AI/Video/VideoGeneratorBuilderServiceCollectionExtensions.cs b/src/Libraries/Microsoft.Extensions.AI/Video/VideoGeneratorBuilderServiceCollectionExtensions.cs index 21a3b41f452..202d71cadb1 100644 --- a/src/Libraries/Microsoft.Extensions.AI/Video/VideoGeneratorBuilderServiceCollectionExtensions.cs +++ b/src/Libraries/Microsoft.Extensions.AI/Video/VideoGeneratorBuilderServiceCollectionExtensions.cs @@ -52,8 +52,8 @@ public static VideoGeneratorBuilder AddVideoGenerator( /// The inner that represents the underlying backend. /// The service lifetime for the generator. Defaults to . /// An that can be used to build a pipeline around the inner generator. - /// , , or is . - /// The generator is registered as a scoped service. + /// or is . + /// The generator is registered with the specified . public static VideoGeneratorBuilder AddKeyedVideoGenerator( this IServiceCollection serviceCollection, object? serviceKey, @@ -67,8 +67,8 @@ public static VideoGeneratorBuilder AddKeyedVideoGenerator( /// A callback that produces the inner that represents the underlying backend. /// The service lifetime for the generator. Defaults to . /// An that can be used to build a pipeline around the inner generator. - /// , , or is . - /// The generator is registered as a scoped service. + /// or is . + /// The generator is registered with the specified . public static VideoGeneratorBuilder AddKeyedVideoGenerator( this IServiceCollection serviceCollection, object? serviceKey, From f2189b78976919484c5da898599797552389aa69 Mon Sep 17 00:00:00 2001 From: "Eric St. John" Date: Fri, 20 Mar 2026 17:07:17 -0700 Subject: [PATCH 03/10] Address feedback and refactor to return an Operation rather than Response object. --- samples/VideoGenerationPOC/Program.cs | 47 ++-- .../Video/DelegatingVideoGenerator.cs | 6 +- .../Video/IVideoGenerator.cs | 10 +- .../Video/VideoGenerationOperation.cs | 115 +++++++++ .../Video/VideoGenerationRequest.cs | 33 ++- .../Video/VideoGenerationResponse.cs | 59 ----- .../Video/VideoGenerationToolCallContent.cs | 23 -- .../Video/VideoGeneratorExtensions.cs | 51 ++-- .../Video/VideoOperationKind.cs | 31 +++ .../OpenAIClientExtensions.cs | 73 ++++-- .../OpenAIVideoGenerationOperation.cs | 149 ++++++++++++ .../OpenAIVideoGenerator.cs | 223 +++++++----------- .../OpenTelemetryVideoGenerator.cs | 29 +-- .../Video/ConfigureOptionsVideoGenerator.cs | 6 +- .../Video/LoggingVideoGenerator.cs | 17 +- .../TestVideoGenerationOperation.cs | 51 ++++ .../TestVideoGenerator.cs | 7 +- .../Video/DelegatingVideoGeneratorTests.cs | 6 +- .../Video/VideoGenerationResponseTests.cs | 79 ------- .../Video/VideoGeneratorExtensionsTests.cs | 19 +- .../Video/VideoGeneratorTests.cs | 14 +- .../VideoGeneratorIntegrationTests.cs | 29 ++- .../Microsoft.Extensions.AI.Tests.csproj | 1 + .../ConfigureOptionsVideoGeneratorTests.cs | 4 +- .../Video/LoggingVideoGeneratorTests.cs | 4 +- .../Video/OpenTelemetryVideoGeneratorTests.cs | 36 +-- 26 files changed, 638 insertions(+), 484 deletions(-) create mode 100644 src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGenerationOperation.cs delete mode 100644 src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGenerationResponse.cs delete mode 100644 src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGenerationToolCallContent.cs create mode 100644 src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoOperationKind.cs create mode 100644 src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIVideoGenerationOperation.cs create mode 100644 test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/TestVideoGenerationOperation.cs delete mode 100644 test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Video/VideoGenerationResponseTests.cs diff --git a/samples/VideoGenerationPOC/Program.cs b/samples/VideoGenerationPOC/Program.cs index 944cea59bde..f0c6f1e2023 100644 --- a/samples/VideoGenerationPOC/Program.cs +++ b/samples/VideoGenerationPOC/Program.cs @@ -144,18 +144,6 @@ ResponseFormat = VideoGenerationResponseFormat.Data, }; - if (editVideoId is not null) - { - generateOptions.AdditionalProperties ??= []; - generateOptions.AdditionalProperties["edit_video_id"] = editVideoId; - } - - if (extendVideoId is not null) - { - generateOptions.AdditionalProperties ??= []; - generateOptions.AdditionalProperties["extend_video_id"] = extendVideoId; - } - if (characterIds.Length > 0) { var chars = new JsonArray(); @@ -168,9 +156,25 @@ generateOptions.AdditionalProperties["characters"] = chars; } - var response = await generator.GenerateAsync( - new VideoGenerationRequest(prompt, originalMedia), - generateOptions, + var request = new VideoGenerationRequest(prompt, originalMedia); + + if (editVideoId is not null) + { + request.OperationKind = VideoOperationKind.Edit; + request.SourceVideoId = editVideoId; + } + else if (extendVideoId is not null) + { + request.OperationKind = VideoOperationKind.Extend; + request.SourceVideoId = extendVideoId; + } + + var operation = await generator.GenerateAsync(request, generateOptions); + + Console.WriteLine($" Operation ID: {operation.OperationId}"); + Console.WriteLine($" Initial status: {operation.Status}"); + + await operation.WaitForCompletionAsync( new Progress(p => Console.WriteLine($" Status: {p.Status}{(p.PercentComplete.HasValue ? $" ({p.PercentComplete}%)" : string.Empty)}"))); @@ -178,20 +182,21 @@ Console.WriteLine($"Completed in {stopwatch.Elapsed.TotalSeconds:F1}s"); Console.WriteLine(); - // --- Process response --- - if (response.Usage is { } usage) + // --- Download and process contents --- + if (operation.Usage is { } usage) { Console.WriteLine($"Token Usage: input={usage.InputTokenCount}, output={usage.OutputTokenCount}, total={usage.TotalTokenCount}"); } - Console.WriteLine($"Generated {response.Contents.Count} content item(s):"); - for (int i = 0; i < response.Contents.Count; i++) + var contents = await operation.GetContentsAsync(generateOptions); + Console.WriteLine($"Generated {contents.Count} content item(s):"); + for (int i = 0; i < contents.Count; i++) { - var content = response.Contents[i]; + var content = contents[i]; switch (content) { case DataContent dc: - string filePath = response.Contents.Count == 1 + string filePath = contents.Count == 1 ? outputPath : Path.Combine( Path.GetDirectoryName(outputPath) ?? ".", diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/DelegatingVideoGenerator.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/DelegatingVideoGenerator.cs index 7dbd8330cfc..725de70a076 100644 --- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/DelegatingVideoGenerator.cs +++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/DelegatingVideoGenerator.cs @@ -41,10 +41,10 @@ public void Dispose() protected IVideoGenerator InnerGenerator { get; } /// - public virtual Task GenerateAsync( - VideoGenerationRequest request, VideoGenerationOptions? options = null, IProgress? progress = null, CancellationToken cancellationToken = default) + public virtual Task GenerateAsync( + VideoGenerationRequest request, VideoGenerationOptions? options = null, CancellationToken cancellationToken = default) { - return InnerGenerator.GenerateAsync(request, options, progress, cancellationToken); + return InnerGenerator.GenerateAsync(request, options, cancellationToken); } /// diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/IVideoGenerator.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/IVideoGenerator.cs index a1b146316e7..5502508d912 100644 --- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/IVideoGenerator.cs +++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/IVideoGenerator.cs @@ -16,15 +16,15 @@ namespace Microsoft.Extensions.AI; public interface IVideoGenerator : IDisposable { /// - /// Sends a video generation request and returns the generated video as a . + /// Submits a video generation request and returns a that can be used to + /// monitor progress, wait for completion, and download the generated content. /// - /// The video generation request containing the prompt and optional original videos for editing. + /// The video generation request containing the prompt and optional media inputs. /// The video generation options to configure the request. - /// An optional to receive progress updates during the generation process. /// The to monitor for cancellation requests. The default is . /// is . - /// The videos generated by the . - Task GenerateAsync(VideoGenerationRequest request, VideoGenerationOptions? options = null, IProgress? progress = null, CancellationToken cancellationToken = default); + /// A representing the submitted video generation job. + Task GenerateAsync(VideoGenerationRequest request, VideoGenerationOptions? options = null, CancellationToken cancellationToken = default); /// Asks the for an object of the specified type . /// The type of object being requested. diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGenerationOperation.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGenerationOperation.cs new file mode 100644 index 00000000000..3c3623d29d9 --- /dev/null +++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGenerationOperation.cs @@ -0,0 +1,115 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Text.Json.Serialization; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Shared.DiagnosticIds; + +namespace Microsoft.Extensions.AI; + +/// +/// Represents an in-flight or completed video generation operation. +/// +/// +/// +/// When is called, the provider submits a video generation +/// job and returns a immediately. The caller can then: +/// +/// +/// Check and for the current state. +/// Call to poll for updated status. +/// Call to poll until the operation reaches a terminal state. +/// Call to download the generated video content. +/// Call or to derive +/// follow-up requests from a completed video. +/// +/// +/// Providers implement this abstract class to supply their own polling, download, and derived-request logic. +/// Provider-specific operations (e.g., character upload) can be exposed as additional public methods on +/// the concrete subclass. +/// +/// +[Experimental(DiagnosticIds.Experiments.AIVideoGeneration, UrlFormat = DiagnosticIds.UrlFormat)] +public abstract class VideoGenerationOperation +{ + /// Gets the provider-specific identifier for this operation. + public abstract string? OperationId { get; } + + /// Gets the current status of the operation (e.g., "queued", "in_progress", "completed", "failed"). + public abstract string? Status { get; } + + /// Gets the completion percentage (0–100), or if not available. + public abstract int? PercentComplete { get; } + + /// Gets a value indicating whether the operation has reached a terminal state. + public abstract bool IsCompleted { get; } + + /// Gets the failure reason if the operation failed, or . + public abstract string? FailureReason { get; } + + /// Gets or sets the model ID used for the operation. + public string? ModelId { get; set; } + + /// Gets or sets usage details for the video generation operation. + public UsageDetails? Usage { get; set; } + + /// Gets or sets the raw representation of the operation from an underlying implementation. + [JsonIgnore] + public object? RawRepresentation { get; set; } + + /// Gets or sets any additional properties associated with the operation. + public AdditionalPropertiesDictionary? AdditionalProperties { get; set; } + + /// Polls the provider for the current status of this operation. + /// The to monitor for cancellation requests. + /// A task that completes when the status has been refreshed. + public abstract Task UpdateAsync(CancellationToken cancellationToken = default); + + /// Polls the provider until the operation reaches a terminal state. + /// An optional to receive progress updates during waiting. + /// The to monitor for cancellation requests. + /// A task that completes when the operation has finished. + /// The operation failed. + public abstract Task WaitForCompletionAsync( + IProgress? progress = null, + CancellationToken cancellationToken = default); + + /// Downloads the completed video content. + /// Optional options that may influence the download (e.g., ). + /// The to monitor for cancellation requests. + /// The generated video content items. + /// The operation has not completed successfully. + public abstract Task> GetContentsAsync( + VideoGenerationOptions? options = null, + CancellationToken cancellationToken = default); + + /// Creates a to edit this completed video. + /// The prompt describing the desired edits. + /// A configured for editing. + public virtual VideoGenerationRequest CreateEditRequest(string prompt) + { + return new VideoGenerationRequest + { + Prompt = prompt, + SourceVideoId = OperationId, + OperationKind = VideoOperationKind.Edit, + }; + } + + /// Creates a to extend this completed video. + /// An optional prompt to guide the extension. + /// A configured for extension. + public virtual VideoGenerationRequest CreateExtensionRequest(string? prompt = null) + { + return new VideoGenerationRequest + { + Prompt = prompt, + SourceVideoId = OperationId, + OperationKind = VideoOperationKind.Extend, + }; + } +} diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGenerationRequest.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGenerationRequest.cs index d5ae3f4ff02..998d12cb59e 100644 --- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGenerationRequest.cs +++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGenerationRequest.cs @@ -35,13 +35,29 @@ public VideoGenerationRequest(string prompt, IEnumerable? originalMed /// Gets or sets the prompt to guide the video generation. public string? Prompt { get; set; } + /// Gets or sets the kind of video operation to perform. + /// + /// Defaults to . Set to or + /// when working with an existing video referenced by + /// or uploaded via . + /// + public VideoOperationKind OperationKind { get; set; } + + /// Gets or sets the provider-specific ID of an existing video to edit or extend. + /// + /// This is typically the of a previously completed + /// video generation. Use or + /// to create a request with this property set. + /// + public string? SourceVideoId { get; set; } + /// /// Gets or sets the original media (images or videos) to use as input for the video generation. /// /// /// - /// The interpretation of this property depends on the content type of the media and the capabilities - /// of the underlying provider. Common behaviors include: + /// The interpretation of this property depends on the content type of the media, the , + /// and the capabilities of the underlying provider. Common behaviors include: /// /// /// @@ -49,18 +65,15 @@ public VideoGenerationRequest(string prompt, IEnumerable? originalMed /// generation. The provider creates a video inspired by or based on the image. Supported by most providers. /// /// - /// Video content (e.g., video/mp4): Used as a source video for editing or remixing. The provider - /// modifies the existing video according to the . Not all providers support video editing. + /// Video content (e.g., video/mp4): Used as a source video for editing when + /// is . The provider modifies the + /// existing video according to the . /// /// /// /// If this property is or empty, the request is treated as a text-to-video generation - /// using only the . - /// - /// - /// Provider-specific scenarios such as video continuations or character consistency can be controlled - /// via . Refer to the provider documentation - /// for supported keys. + /// using only the . To edit or extend a previously generated video by ID rather than by + /// uploading media, set and the appropriate . /// /// public IEnumerable? OriginalMedia { get; set; } diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGenerationResponse.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGenerationResponse.cs deleted file mode 100644 index d3febd2bb4b..00000000000 --- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGenerationResponse.cs +++ /dev/null @@ -1,59 +0,0 @@ -// Licensed to the .NET Foundation under one or more agreements. -// The .NET Foundation licenses this file to you under the MIT license. - -using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; -using System.Text.Json.Serialization; -using Microsoft.Shared.DiagnosticIds; - -namespace Microsoft.Extensions.AI; - -/// Represents the result of a video generation request. -[Experimental(DiagnosticIds.Experiments.AIVideoGeneration, UrlFormat = DiagnosticIds.UrlFormat)] -public class VideoGenerationResponse -{ - /// Initializes a new instance of the class. - [JsonConstructor] - public VideoGenerationResponse() - { - } - - /// Initializes a new instance of the class. - /// The contents for this response. - public VideoGenerationResponse(IList? contents) - { - Contents = contents; - } - - /// Gets or sets the raw representation of the video generation response from an underlying implementation. - /// - /// If a is created to represent some underlying object from another object - /// model, this property can be used to store that original object. This can be useful for debugging or - /// for enabling a consumer to access the underlying object model if needed. - /// - [JsonIgnore] - public object? RawRepresentation { get; set; } - - /// - /// Gets or sets the generated content items. - /// - /// - /// Content is typically for videos streamed from the generator, or for remotely hosted videos, but - /// can also be provider-specific content types that represent the generated videos. - /// - [AllowNull] - public IList Contents - { - get => field ??= []; - set; - } - - /// Gets or sets usage details for the video generation response. - public UsageDetails? Usage { get; set; } - - /// Gets or sets the model ID used to generate the video. - public string? ModelId { get; set; } - - /// Gets or sets any additional properties associated with the response. - public AdditionalPropertiesDictionary? AdditionalProperties { get; set; } -} diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGenerationToolCallContent.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGenerationToolCallContent.cs deleted file mode 100644 index 36b0fdd4f04..00000000000 --- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGenerationToolCallContent.cs +++ /dev/null @@ -1,23 +0,0 @@ -// Licensed to the .NET Foundation under one or more agreements. -// The .NET Foundation licenses this file to you under the MIT license. - -using System.Diagnostics.CodeAnalysis; -using Microsoft.Shared.DiagnosticIds; - -namespace Microsoft.Extensions.AI; - -/// -/// Represents the invocation of a video generation tool call by a hosted service. -/// -[Experimental(DiagnosticIds.Experiments.AIVideoGeneration, UrlFormat = DiagnosticIds.UrlFormat)] -public sealed class VideoGenerationToolCallContent : ToolCallContent -{ - /// - /// Initializes a new instance of the class. - /// - /// The tool call ID. - public VideoGenerationToolCallContent(string callId) - : base(callId) - { - } -} diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGeneratorExtensions.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGeneratorExtensions.cs index bf310f75424..e29fb8c3eb6 100644 --- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGeneratorExtensions.cs +++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGeneratorExtensions.cs @@ -90,107 +90,104 @@ public static TService GetRequiredService(this IVideoGenerator generat /// The video generator. /// The prompt to guide the video generation. /// The video generation options to configure the request. - /// An optional to receive progress updates during the generation process. /// The to monitor for cancellation requests. The default is . /// or is . - /// The videos generated by the generator. - public static Task GenerateVideosAsync( + /// A representing the submitted video generation job. + public static Task GenerateVideoAsync( this IVideoGenerator generator, string prompt, VideoGenerationOptions? options = null, - IProgress? progress = null, CancellationToken cancellationToken = default) { _ = Throw.IfNull(generator); _ = Throw.IfNull(prompt); - return generator.GenerateAsync(new VideoGenerationRequest(prompt), options, progress, cancellationToken); + return generator.GenerateAsync(new VideoGenerationRequest { Prompt = prompt }, options, cancellationToken); } /// - /// Generates or edits videos using original media and a text prompt. + /// Submits an edit request for original media using the specified prompt. /// /// The video generator. /// The original media (images or videos) to use as input. /// The prompt to guide the video generation or editing. /// The video generation options to configure the request. - /// An optional to receive progress updates during the generation process. /// The to monitor for cancellation requests. The default is . /// , , or is . - /// The videos generated by the generator. - public static Task EditVideosAsync( + /// A representing the submitted video generation job. + public static Task EditVideoAsync( this IVideoGenerator generator, IEnumerable originalMedia, string prompt, VideoGenerationOptions? options = null, - IProgress? progress = null, CancellationToken cancellationToken = default) { _ = Throw.IfNull(generator); _ = Throw.IfNull(originalMedia); _ = Throw.IfNull(prompt); - return generator.GenerateAsync(new VideoGenerationRequest(prompt, originalMedia), options, progress, cancellationToken); + return generator.GenerateAsync( + new VideoGenerationRequest { Prompt = prompt, OriginalMedia = originalMedia, OperationKind = VideoOperationKind.Edit }, + options, cancellationToken); } /// - /// Generates or edits a video using the original video and the specified prompt. + /// Submits an edit request for a single video using the specified prompt. /// /// The video generator. /// The single video to use as input. - /// The prompt to guide the video generation or editing. + /// The prompt to guide the video editing. /// The video generation options to configure the request. - /// An optional to receive progress updates during the generation process. /// The to monitor for cancellation requests. The default is . /// , , or is . - /// The videos generated by the generator. - public static Task EditVideoAsync( + /// A representing the submitted video generation job. + public static Task EditVideoAsync( this IVideoGenerator generator, DataContent originalVideo, string prompt, VideoGenerationOptions? options = null, - IProgress? progress = null, CancellationToken cancellationToken = default) { _ = Throw.IfNull(generator); _ = Throw.IfNull(originalVideo); _ = Throw.IfNull(prompt); - return generator.GenerateAsync(new VideoGenerationRequest(prompt, [originalVideo]), options, progress, cancellationToken); + return generator.GenerateAsync( + new VideoGenerationRequest { Prompt = prompt, OriginalMedia = [originalVideo], OperationKind = VideoOperationKind.Edit }, + options, cancellationToken); } /// - /// Generates or edits a video using video data provided as a byte array and the specified prompt. + /// Submits an edit request for video data provided as a byte array. /// /// The video generator. /// The byte array containing the video data to use as input. /// The filename for the video data. /// The prompt to guide the video generation. /// The video generation options to configure the request. - /// An optional to receive progress updates during the generation process. /// The to monitor for cancellation requests. The default is . /// /// , , or is . /// - /// The videos generated by the generator. - public static Task EditVideoAsync( + /// A representing the submitted video generation job. + public static Task EditVideoAsync( this IVideoGenerator generator, ReadOnlyMemory originalVideoData, string fileName, string prompt, VideoGenerationOptions? options = null, - IProgress? progress = null, CancellationToken cancellationToken = default) { _ = Throw.IfNull(generator); _ = Throw.IfNull(fileName); _ = Throw.IfNull(prompt); - // Infer media type from file extension string mediaType = GetMediaTypeFromFileName(fileName); - var dataContent = new DataContent(originalVideoData, mediaType) { Name = fileName }; - return generator.GenerateAsync(new VideoGenerationRequest(prompt, [dataContent]), options, progress, cancellationToken); + + return generator.GenerateAsync( + new VideoGenerationRequest { Prompt = prompt, OriginalMedia = [dataContent], OperationKind = VideoOperationKind.Edit }, + options, cancellationToken); } /// @@ -200,6 +197,6 @@ public static Task EditVideoAsync( /// The inferred media type. private static string GetMediaTypeFromFileName(string fileName) { - return MediaTypeMap.GetMediaType(fileName) ?? "video/mp4"; // Default to MP4 if unknown extension + return MediaTypeMap.GetMediaType(fileName) ?? "video/mp4"; } } diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoOperationKind.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoOperationKind.cs new file mode 100644 index 00000000000..8ff0dab943e --- /dev/null +++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoOperationKind.cs @@ -0,0 +1,31 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System.Diagnostics.CodeAnalysis; +using Microsoft.Shared.DiagnosticIds; + +namespace Microsoft.Extensions.AI; + +/// +/// Specifies the kind of video generation operation to perform. +/// +[Experimental(DiagnosticIds.Experiments.AIVideoGeneration, UrlFormat = DiagnosticIds.UrlFormat)] +public enum VideoOperationKind +{ + /// + /// Create a new video from a text prompt, optionally guided by a reference image + /// supplied via . + /// + Create, + + /// + /// Edit an existing video identified by + /// or uploaded via . + /// + Edit, + + /// + /// Extend an existing video identified by . + /// + Extend, +} diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIClientExtensions.cs b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIClientExtensions.cs index 28ab454ef73..52f183514bc 100644 --- a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIClientExtensions.cs +++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIClientExtensions.cs @@ -11,7 +11,10 @@ using System.Text.Json; using System.Text.Json.Nodes; using System.Text.Json.Serialization; +using System.Threading; +using System.Threading.Tasks; using Microsoft.Shared.DiagnosticIds; +using Microsoft.Shared.Diagnostics; using OpenAI; using OpenAI.Assistants; using OpenAI.Audio; @@ -192,46 +195,43 @@ public static IImageGenerator AsIImageGenerator(this ImageClient imageClient) => /// is . /// /// - /// The returned supports the following scenarios based on the - /// request contents and keys: + /// The returned submits video generation jobs and returns + /// instances. The endpoint is chosen based on the + /// and : /// /// /// - /// Text-to-video: When is - /// and no routing keys are set, generates a new video from the - /// text prompt via POST /videos. + /// Text-to-video (): Generates a new video from + /// the text prompt via POST /videos. /// /// - /// Image-to-video: When - /// contains image content (e.g., image/png), uses the image as an - /// input_reference to guide new video creation via POST /videos. - /// A sends the image URL in the JSON body; - /// a uploads the image bytes via multipart/form-data. + /// Image-to-video (): When + /// contains image content, uses it as an + /// input_reference via POST /videos. /// /// - /// Edit by video ID: Set edit_video_id in - /// to the ID of a previously - /// generated video. The request is routed to POST /videos/edits. + /// Edit by video ID (): When + /// is set, edits the video via + /// POST /videos/edits. /// /// - /// Edit by upload: When - /// contains video content (e.g., video/mp4), uploads the video for editing - /// via POST /videos/edits with multipart/form-data. + /// Edit by upload (): When + /// contains video content and no + /// is set, uploads the video for editing. /// /// - /// Extend: Set extend_video_id in - /// to the ID of a completed - /// video. The request is routed to POST /videos/extensions. + /// Extend (): When + /// is set, extends the video via + /// POST /videos/extensions. /// /// /// /// Character IDs can be included in the create request by passing a characters key - /// in as a JSON array (e.g., - /// [{ "id": "char_abc123" }]). Characters are reusable visual assets created - /// separately via POST /videos/characters. + /// in as a JSON array. Characters + /// can also be uploaded via . /// /// - /// Any other keys in are forwarded + /// Any keys in are forwarded /// as-is to the OpenAI API request body. /// /// @@ -239,6 +239,33 @@ public static IImageGenerator AsIImageGenerator(this ImageClient imageClient) => public static IVideoGenerator AsIVideoGenerator(this VideoClient videoClient, string? modelId = null) => new OpenAIVideoGenerator(videoClient, modelId); + /// + /// Uploads a character asset from a video for use in subsequent video generation requests. + /// + /// The video generator backed by an OpenAI . + /// The name of the character. + /// The video content containing the character. + /// The to monitor for cancellation requests. + /// The provider-specific character ID that can be passed in + /// under the "characters" key. + /// The is not + /// backed by an OpenAI . + [Experimental(DiagnosticIds.Experiments.AIVideoGeneration, UrlFormat = DiagnosticIds.UrlFormat)] + public static Task UploadVideoCharacterAsync( + this IVideoGenerator generator, + string name, + DataContent videoContent, + CancellationToken cancellationToken = default) + { + _ = Throw.IfNull(generator); + _ = Throw.IfNull(videoContent); + + OpenAIVideoGenerator openAIGenerator = generator as OpenAIVideoGenerator + ?? throw new InvalidOperationException("The video generator is not backed by an OpenAI VideoClient."); + + return openAIGenerator.UploadVideoCharacterAsync(name, videoContent, cancellationToken); + } + /// Gets an for use with this . /// The client. /// The number of dimensions to generate in each embedding. diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIVideoGenerationOperation.cs b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIVideoGenerationOperation.cs new file mode 100644 index 00000000000..7aab67537ed --- /dev/null +++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIVideoGenerationOperation.cs @@ -0,0 +1,149 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System; +using System.ClientModel; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Shared.DiagnosticIds; +using OpenAI.Videos; + +namespace Microsoft.Extensions.AI; + +/// +/// Represents an OpenAI video generation operation returned by . +/// +/// +/// Use to upload character assets +/// that can be referenced in subsequent video generation requests. +/// +[Experimental(DiagnosticIds.Experiments.AIOpenAIVideoClient)] +public sealed class OpenAIVideoGenerationOperation : VideoGenerationOperation +{ + /// Default polling interval for checking video generation status. + private static readonly TimeSpan _defaultPollingInterval = TimeSpan.FromSeconds(10); + + private readonly VideoClient _videoClient; + private string? _operationId; + private string? _status; + private int? _percentComplete; + private string? _failureReason; + + /// Initializes a new instance of the class. + internal OpenAIVideoGenerationOperation(VideoClient videoClient, string operationId, string status, int? percentComplete) + { + _videoClient = videoClient; + _operationId = operationId; + _status = status; + _percentComplete = percentComplete; + } + + /// + public override string? OperationId => _operationId; + + /// + public override string? Status => _status; + + /// + public override int? PercentComplete => _percentComplete; + + /// + public override bool IsCompleted => IsTerminalStatus(_status); + + /// + public override string? FailureReason => _failureReason; + + /// + public override async Task UpdateAsync(CancellationToken cancellationToken = default) + { + if (_operationId is null || IsCompleted) + { + return; + } + + var opts = new RequestOptions { CancellationToken = cancellationToken }; + ClientResult result = await _videoClient.GetVideoAsync(_operationId, opts).ConfigureAwait(false); + ParseStatus(result.GetRawResponse().Content); + } + + /// + public override async Task WaitForCompletionAsync( + IProgress? progress = null, + CancellationToken cancellationToken = default) + { + progress?.Report(new VideoGenerationProgress(_status, _percentComplete)); + + while (!IsCompleted) + { + await Task.Delay(_defaultPollingInterval, cancellationToken).ConfigureAwait(false); + await UpdateAsync(cancellationToken).ConfigureAwait(false); + progress?.Report(new VideoGenerationProgress(_status, _percentComplete)); + } + + if (string.Equals(_status, "failed", StringComparison.OrdinalIgnoreCase)) + { + throw new InvalidOperationException(_failureReason ?? "Video generation failed."); + } + } + + /// + public override async Task> GetContentsAsync( + VideoGenerationOptions? options = null, + CancellationToken cancellationToken = default) + { + if (!IsCompleted) + { + throw new InvalidOperationException("The operation has not completed. Call WaitForCompletionAsync first."); + } + + if (string.Equals(_status, "failed", StringComparison.OrdinalIgnoreCase)) + { + throw new InvalidOperationException(_failureReason ?? "Video generation failed."); + } + + string contentType = options?.MediaType ?? "video/mp4"; + + if (options?.ResponseFormat is VideoGenerationResponseFormat.Uri or + VideoGenerationResponseFormat.Hosted) + { + string baseUrl = _videoClient.Endpoint.ToString().TrimEnd('/'); + var videoUri = new Uri($"{baseUrl}/videos/{_operationId}/content"); + return [new UriContent(videoUri, contentType)]; + } + + var dlOpts = new RequestOptions { CancellationToken = cancellationToken }; + ClientResult downloadResult = await _videoClient.DownloadVideoAsync( + _operationId!, options: dlOpts).ConfigureAwait(false); + BinaryData videoData = downloadResult.GetRawResponse().Content; + return [new DataContent(videoData.ToMemory(), contentType)]; + } + + private static bool IsTerminalStatus(string? status) => + string.Equals(status, "completed", StringComparison.OrdinalIgnoreCase) || + string.Equals(status, "failed", StringComparison.OrdinalIgnoreCase) || + string.Equals(status, "expired", StringComparison.OrdinalIgnoreCase); + + /// Parses status fields from a video job JSON response. + private void ParseStatus(BinaryData content) + { + using JsonDocument doc = JsonDocument.Parse(content); + _status = doc.RootElement.GetProperty("status").GetString(); + + if (doc.RootElement.TryGetProperty("progress", out JsonElement progressEl) && + progressEl.TryGetInt32(out int pct)) + { + _percentComplete = pct; + } + + if (string.Equals(_status, "failed", StringComparison.OrdinalIgnoreCase) && + doc.RootElement.TryGetProperty("error", out JsonElement errorEl) && + errorEl.TryGetProperty("message", out JsonElement msgEl)) + { + _failureReason = msgEl.GetString(); + } + } +} diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIVideoGenerator.cs b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIVideoGenerator.cs index 90aa8d33130..b790b0cd874 100644 --- a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIVideoGenerator.cs +++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIVideoGenerator.cs @@ -23,63 +23,52 @@ namespace Microsoft.Extensions.AI; /// /// /// This implementation uses the OpenAI video generation API. Video generation is asynchronous: -/// a generation job is created, polled for completion, and then the video content is downloaded. +/// submits a generation job and returns an +/// that can be used to poll for completion and download the result. /// -/// The operation chosen depends on the request contents and options: +/// The endpoint chosen depends on +/// and : /// /// -/// Text-to-video: When is -/// and no routing keys are set, creates a new video from the -/// text prompt via POST /videos. +/// Text-to-video (): When +/// is , +/// creates a new video from the text prompt via POST /videos. /// /// -/// Image-to-video: When -/// contains image content (e.g., image/png), uses the image as an -/// input_reference to guide new video creation via POST /videos. -/// A sends the image URL in JSON; a -/// uploads the image bytes via multipart/form-data. +/// Image-to-video (): When +/// contains image content +/// (e.g., image/png), uses the image as an input_reference to guide +/// new video creation via POST /videos. A sends the +/// image URL in JSON; a uploads the image bytes via +/// multipart/form-data. /// /// -/// Edit by video ID: When edit_video_id is set in -/// , edits the specified +/// Edit by video ID (): When +/// is set, edits the specified /// video via POST /videos/edits. /// /// -/// Edit by upload: When -/// contains video content (e.g., video/mp4), uploads the video for editing -/// via POST /videos/edits with multipart/form-data. +/// Edit by upload (): When +/// contains video content +/// (e.g., video/mp4) and no +/// is set, uploads the video for editing via POST /videos/edits with multipart/form-data. /// /// -/// Extend: When extend_video_id is set in -/// , extends the completed +/// Extend (): When +/// is set, extends the completed /// video via POST /videos/extensions. /// /// /// /// Character IDs can be included in the create request by passing a characters /// key in as a JSON array. -/// Characters are reusable visual assets created separately via -/// POST /videos/characters. +/// Characters are reusable visual assets that can be uploaded via +/// . /// /// [Experimental(DiagnosticIds.Experiments.AIOpenAIVideoClient)] internal sealed class OpenAIVideoGenerator : IVideoGenerator { - /// Default polling interval for checking video generation status. - private static readonly TimeSpan _defaultPollingInterval = TimeSpan.FromSeconds(10); - - /// - /// Well-known key that routes the - /// request to POST /videos/edits. The value should be the video ID to edit. - /// - internal const string EditVideoIdKey = "edit_video_id"; - - /// - /// Well-known key that routes the - /// request to POST /videos/extensions. The value should be the completed video ID to extend. - /// - internal const string ExtendVideoIdKey = "extend_video_id"; - /// Metadata about the client. private readonly VideoGeneratorMetadata _metadata; @@ -102,10 +91,9 @@ public OpenAIVideoGenerator(VideoClient videoClient, string? modelId = null) } /// - public async Task GenerateAsync( + public async Task GenerateAsync( VideoGenerationRequest request, VideoGenerationOptions? options = null, - IProgress? progress = null, CancellationToken cancellationToken = default) { _ = Throw.IfNull(request); @@ -115,27 +103,19 @@ public async Task GenerateAsync( string modelId = options?.ModelId ?? _defaultModelId ?? "sora-2"; - // Check for routing keys in AdditionalProperties - string? editVideoId = GetStringAdditionalProperty(options, EditVideoIdKey); - string? extendVideoId = GetStringAdditionalProperty(options, ExtendVideoIdKey); - - // Determine OriginalMedia type (only when no routing keys override the operation) + // Determine OriginalMedia type based on the operation kind DataContent? videoEditContent = null; DataContent? imageReferenceData = null; UriContent? imageReferenceUri = null; - if (editVideoId is null && extendVideoId is null && - request.OriginalMedia is { } originalMedia) + if (request.OperationKind == VideoOperationKind.Create && + request.OriginalMedia is { } createMedia) { - foreach (AIContent media in originalMedia) + foreach (AIContent media in createMedia) { if (media is DataContent dc && dc.Data.Length > 0) { - if (IsVideoMediaType(dc.MediaType)) - { - videoEditContent = dc; - } - else if (IsImageMediaType(dc.MediaType)) + if (IsImageMediaType(dc.MediaType)) { imageReferenceData = dc; } @@ -150,18 +130,31 @@ public async Task GenerateAsync( } } } + else if (request.OperationKind == VideoOperationKind.Edit && + request.SourceVideoId is null && + request.OriginalMedia is { } editMedia) + { + foreach (AIContent media in editMedia) + { + if (media is DataContent dc && dc.Data.Length > 0 && IsVideoMediaType(dc.MediaType)) + { + videoEditContent = dc; + break; + } + } + } - // Route to the appropriate endpoint and create the video generation job + // Route to the appropriate endpoint and submit the video generation job RequestOptions reqOpts = new() { CancellationToken = cancellationToken }; ClientResult createResult; - if (extendVideoId is not null) + if (request.OperationKind == VideoOperationKind.Extend && request.SourceVideoId is not null) { // POST /videos/extensions — extend a completed video JsonObject body = new() { ["prompt"] = prompt, - ["video"] = new JsonObject { ["id"] = extendVideoId }, + ["video"] = new JsonObject { ["id"] = request.SourceVideoId }, }; if (options?.Duration is TimeSpan extDuration) @@ -178,13 +171,13 @@ public async Task GenerateAsync( await _videoClient.Pipeline.SendAsync(extendMsg).ConfigureAwait(false); createResult = ClientResult.FromResponse(extendMsg.Response!); } - else if (editVideoId is not null) + else if (request.OperationKind == VideoOperationKind.Edit && request.SourceVideoId is not null) { // POST /videos/edits — edit an existing video by ID JsonObject body = new() { ["prompt"] = prompt, - ["video"] = new JsonObject { ["id"] = editVideoId }, + ["video"] = new JsonObject { ["id"] = request.SourceVideoId }, }; ForwardAdditionalProperties(body, options); @@ -267,70 +260,19 @@ public async Task GenerateAsync( } } - // Parse the creation response to get the video ID and status + // Parse the creation response to get the video ID and initial status using JsonDocument createDoc = JsonDocument.Parse( createResult.GetRawResponse().Content); string videoId = createDoc.RootElement.GetProperty("id").GetString()!; string status = createDoc.RootElement.GetProperty("status").GetString()!; - int? progressPercent = TryGetProgress(createDoc.RootElement); - - progress?.Report(new VideoGenerationProgress(status, progressPercent)); - - // Poll until the video generation is complete - string? errorMessage = null; - while (!IsTerminalStatus(status)) - { - await Task.Delay( - _defaultPollingInterval, cancellationToken).ConfigureAwait(false); - - var pollOpts = new RequestOptions { CancellationToken = cancellationToken }; - ClientResult getResult = await _videoClient.GetVideoAsync( - videoId, pollOpts).ConfigureAwait(false); - using JsonDocument statusDoc = JsonDocument.Parse( - getResult.GetRawResponse().Content); - status = statusDoc.RootElement.GetProperty("status").GetString()!; - progressPercent = TryGetProgress(statusDoc.RootElement); - - progress?.Report(new VideoGenerationProgress(status, progressPercent)); - - if (string.Equals(status, "failed", StringComparison.OrdinalIgnoreCase) && - statusDoc.RootElement.TryGetProperty("error", out JsonElement errorEl) && - errorEl.TryGetProperty("message", out JsonElement msgEl)) - { - errorMessage = msgEl.GetString(); - } - } - - if (string.Equals(status, "failed", StringComparison.OrdinalIgnoreCase)) - { - throw new InvalidOperationException( - errorMessage ?? "Video generation failed."); - } - - // Honor the requested response format. - string contentType = options?.MediaType ?? "video/mp4"; - List contents; - - if (options?.ResponseFormat is VideoGenerationResponseFormat.Uri or - VideoGenerationResponseFormat.Hosted) - { - // Return a URI pointing to the video content endpoint without downloading - // the potentially large video blob. - string baseUrl = _videoClient.Endpoint.ToString().TrimEnd('/'); - var videoUri = new Uri($"{baseUrl}/videos/{videoId}/content"); - contents = [new UriContent(videoUri, contentType)]; - } - else + int? progressPercent = null; + if (createDoc.RootElement.TryGetProperty("progress", out JsonElement progEl) && + progEl.TryGetInt32(out int pct)) { - // Download the completed video content. - var dlOpts = new RequestOptions { CancellationToken = cancellationToken }; - ClientResult downloadResult = await _videoClient.DownloadVideoAsync( - videoId, options: dlOpts).ConfigureAwait(false); - BinaryData videoData = downloadResult.GetRawResponse().Content; - contents = [new DataContent(videoData.ToMemory(), contentType)]; + progressPercent = pct; } - return new VideoGenerationResponse(contents); + return new OpenAIVideoGenerationOperation(_videoClient, videoId, status, progressPercent); } /// @@ -348,6 +290,34 @@ void IDisposable.Dispose() // Nothing to dispose. Implementation required for the IVideoGenerator interface. } + /// Uploads a character asset from a video for use in subsequent video generation requests. + internal async Task UploadVideoCharacterAsync( + string name, + DataContent videoContent, + CancellationToken cancellationToken = default) + { + string boundary = $"----MEAI{Guid.NewGuid():N}"; + string contentType = $"multipart/form-data; boundary={boundary}"; + + using var ms = new MemoryStream(); + WriteFormField(ms, boundary, "name", name); + + string fileName = videoContent.Name ?? "character.mp4"; + string mediaType = videoContent.MediaType ?? "video/mp4"; + WriteFilePart(ms, boundary, "video", fileName, mediaType, videoContent.Data); + WriteString(ms, $"--{boundary}--\r\n"); + + using BinaryContent content = BinaryContent.Create(new BinaryData(ms.ToArray())); + RequestOptions reqOpts = new() { CancellationToken = cancellationToken }; + using PipelineMessage message = CreatePipelineRequest( + _videoClient, "/videos/characters", content, contentType, reqOpts); + + await _videoClient.Pipeline.SendAsync(message).ConfigureAwait(false); + + using JsonDocument doc = JsonDocument.Parse(message.Response!.Content); + return doc.RootElement.GetProperty("id").GetString()!; + } + /// Creates a for a POST request to a path not yet exposed by the SDK. private static PipelineMessage CreatePipelineRequest( VideoClient videoClient, string path, BinaryContent content, @@ -366,12 +336,6 @@ private static PipelineMessage CreatePipelineRequest( return message; } - /// Returns the string value of an additional property, or if not present. - private static string? GetStringAdditionalProperty(VideoGenerationOptions? options, string key) => - options?.AdditionalProperties is { } props && - props.TryGetValue(key, out object? value) && - value is string s ? s : null; - /// Determines whether the given media type represents a video format. private static bool IsVideoMediaType(string? mediaType) => mediaType is not null && @@ -383,12 +347,7 @@ private static bool IsImageMediaType(string? mediaType) => mediaType is null || mediaType.StartsWith("image/", StringComparison.OrdinalIgnoreCase); - /// Determines whether the given key is a routing key consumed by this generator. - private static bool IsRoutingKey(string key) => - string.Equals(key, EditVideoIdKey, StringComparison.OrdinalIgnoreCase) || - string.Equals(key, ExtendVideoIdKey, StringComparison.OrdinalIgnoreCase); - - /// Forwards additional properties to the JSON body, skipping routing keys. + /// Forwards additional properties to the JSON body. private static void ForwardAdditionalProperties(JsonObject body, VideoGenerationOptions? options) { if (options?.AdditionalProperties is not { } props) @@ -398,26 +357,10 @@ private static void ForwardAdditionalProperties(JsonObject body, VideoGeneration foreach (KeyValuePair prop in props) { - if (!IsRoutingKey(prop.Key)) - { - body[prop.Key] = ToJsonNode(prop.Value); - } + body[prop.Key] = ToJsonNode(prop.Value); } } - /// Determines whether the given status indicates the video generation job has finished. - private static bool IsTerminalStatus(string status) => - string.Equals(status, "completed", StringComparison.OrdinalIgnoreCase) || - string.Equals(status, "failed", StringComparison.OrdinalIgnoreCase) || - string.Equals(status, "expired", StringComparison.OrdinalIgnoreCase); - - /// Tries to read the integer progress field from a video job JSON element. - private static int? TryGetProgress(JsonElement element) => - element.TryGetProperty("progress", out JsonElement el) && - el.TryGetInt32(out int val) - ? val - : null; - /// Serializes a to UTF-8 bytes without an intermediate string allocation. private static BinaryData SerializeJsonToUtf8(JsonObject body) { diff --git a/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/OpenTelemetryVideoGenerator.cs b/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/OpenTelemetryVideoGenerator.cs index aa5ae461d1b..72e673d8e12 100644 --- a/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/OpenTelemetryVideoGenerator.cs +++ b/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/OpenTelemetryVideoGenerator.cs @@ -112,8 +112,8 @@ protected override void Dispose(bool disposing) base.GetService(serviceType, serviceKey); /// - public async override Task GenerateAsync( - VideoGenerationRequest request, VideoGenerationOptions? options = null, IProgress? progress = null, CancellationToken cancellationToken = default) + public async override Task GenerateAsync( + VideoGenerationRequest request, VideoGenerationOptions? options = null, CancellationToken cancellationToken = default) { _ = Throw.IfNull(request); @@ -121,12 +121,12 @@ public async override Task GenerateAsync( Stopwatch? stopwatch = _operationDurationHistogram.Enabled ? Stopwatch.StartNew() : null; string? requestModelId = options?.ModelId ?? _defaultModelId; - VideoGenerationResponse? response = null; + VideoGenerationOperation? operation = null; Exception? error = null; try { - response = await base.GenerateAsync(request, options, progress, cancellationToken).ConfigureAwait(false); - return response; + operation = await base.GenerateAsync(request, options, cancellationToken).ConfigureAwait(false); + return operation; } catch (Exception ex) { @@ -135,7 +135,7 @@ public async override Task GenerateAsync( } finally { - TraceResponse(activity, requestModelId, response, error, stopwatch); + TraceResponse(activity, requestModelId, operation, error, stopwatch); } } @@ -223,11 +223,11 @@ public async override Task GenerateAsync( return activity; } - /// Adds video generation response information to the activity. + /// Adds video generation operation information to the activity. private void TraceResponse( Activity? activity, string? requestModelId, - VideoGenerationResponse? response, + VideoGenerationOperation? operation, Exception? error, Stopwatch? stopwatch) { @@ -256,18 +256,15 @@ private void TraceResponse( } } - if (response is not null) + if (operation is not null) { - if (EnableSensitiveData && - response.Contents is { Count: > 0 } contents && - activity is { IsAllDataRequested: true }) + if (activity is { IsAllDataRequested: true }) { - _ = activity.AddTag( - OpenTelemetryConsts.GenAI.Output.Messages, - OpenTelemetryChatClient.SerializeChatMessages([new(ChatRole.Assistant, contents)])); + _ = activity.AddTag("gen_ai.operation.id", operation.OperationId); + _ = activity.AddTag("gen_ai.operation.status", operation.Status); } - if (response.Usage is { } usage) + if (operation.Usage is { } usage) { if (_tokenUsageHistogram.Enabled) { diff --git a/src/Libraries/Microsoft.Extensions.AI/Video/ConfigureOptionsVideoGenerator.cs b/src/Libraries/Microsoft.Extensions.AI/Video/ConfigureOptionsVideoGenerator.cs index 9ce0be59397..21522420b20 100644 --- a/src/Libraries/Microsoft.Extensions.AI/Video/ConfigureOptionsVideoGenerator.cs +++ b/src/Libraries/Microsoft.Extensions.AI/Video/ConfigureOptionsVideoGenerator.cs @@ -36,10 +36,10 @@ public ConfigureOptionsVideoGenerator(IVideoGenerator innerGenerator, Action - public override async Task GenerateAsync( - VideoGenerationRequest request, VideoGenerationOptions? options = null, IProgress? progress = null, CancellationToken cancellationToken = default) + public override async Task GenerateAsync( + VideoGenerationRequest request, VideoGenerationOptions? options = null, CancellationToken cancellationToken = default) { - return await base.GenerateAsync(request, Configure(options), progress, cancellationToken); + return await base.GenerateAsync(request, Configure(options), cancellationToken); } /// Creates and configures the to pass along to the inner generator. diff --git a/src/Libraries/Microsoft.Extensions.AI/Video/LoggingVideoGenerator.cs b/src/Libraries/Microsoft.Extensions.AI/Video/LoggingVideoGenerator.cs index 67c879aa78a..f57762c7b49 100644 --- a/src/Libraries/Microsoft.Extensions.AI/Video/LoggingVideoGenerator.cs +++ b/src/Libraries/Microsoft.Extensions.AI/Video/LoggingVideoGenerator.cs @@ -3,7 +3,6 @@ using System; using System.Diagnostics.CodeAnalysis; -using System.Linq; using System.Text.Json; using System.Threading; using System.Threading.Tasks; @@ -55,8 +54,8 @@ public JsonSerializerOptions JsonSerializerOptions } /// - public override async Task GenerateAsync( - VideoGenerationRequest request, VideoGenerationOptions? options = null, IProgress? progress = null, CancellationToken cancellationToken = default) + public override async Task GenerateAsync( + VideoGenerationRequest request, VideoGenerationOptions? options = null, CancellationToken cancellationToken = default) { _ = Throw.IfNull(request); @@ -74,13 +73,13 @@ public override async Task GenerateAsync( try { - var response = await base.GenerateAsync(request, options, progress, cancellationToken); + var operation = await base.GenerateAsync(request, options, cancellationToken); if (_logger.IsEnabled(LogLevel.Debug)) { - if (_logger.IsEnabled(LogLevel.Trace) && response.Contents.All(c => c is not DataContent)) + if (_logger.IsEnabled(LogLevel.Trace)) { - LogCompletedSensitive(nameof(GenerateAsync), AsJson(response)); + LogCompletedSensitive(nameof(GenerateAsync), $"OperationId={operation.OperationId}, Status={operation.Status}"); } else { @@ -88,7 +87,7 @@ public override async Task GenerateAsync( } } - return response; + return operation; } catch (OperationCanceledException) { @@ -113,8 +112,8 @@ public override async Task GenerateAsync( [LoggerMessage(LogLevel.Debug, "{MethodName} completed.")] private partial void LogCompleted(string methodName); - [LoggerMessage(LogLevel.Trace, "{MethodName} completed: {VideoGenerationResponse}.")] - private partial void LogCompletedSensitive(string methodName, string videoGenerationResponse); + [LoggerMessage(LogLevel.Trace, "{MethodName} completed: {VideoGenerationOperation}.")] + private partial void LogCompletedSensitive(string methodName, string videoGenerationOperation); [LoggerMessage(LogLevel.Debug, "{MethodName} canceled.")] private partial void LogInvocationCanceled(string methodName); diff --git a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/TestVideoGenerationOperation.cs b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/TestVideoGenerationOperation.cs new file mode 100644 index 00000000000..0419a54e394 --- /dev/null +++ b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/TestVideoGenerationOperation.cs @@ -0,0 +1,51 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; + +namespace Microsoft.Extensions.AI; + +public class TestVideoGenerationOperation : VideoGenerationOperation +{ + private string? _operationId; + private string? _status; + private int? _percentComplete; + + public TestVideoGenerationOperation( + string? operationId = "test-op-id", + string? status = "completed", + int? percentComplete = 100) + { + _operationId = operationId; + _status = status; + _percentComplete = percentComplete; + } + + public override string? OperationId => _operationId; + + public override string? Status => _status; + + public override int? PercentComplete => _percentComplete; + + public override bool IsCompleted => + string.Equals(_status, "completed", StringComparison.OrdinalIgnoreCase) || + string.Equals(_status, "failed", StringComparison.OrdinalIgnoreCase); + + public override string? FailureReason => null; + + public IList? Contents { get; set; } + + public override Task UpdateAsync(CancellationToken cancellationToken = default) => Task.CompletedTask; + + public override Task WaitForCompletionAsync( + IProgress? progress = null, + CancellationToken cancellationToken = default) => Task.CompletedTask; + + public override Task> GetContentsAsync( + VideoGenerationOptions? options = null, + CancellationToken cancellationToken = default) + => Task.FromResult>(Contents ?? new List()); +} diff --git a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/TestVideoGenerator.cs b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/TestVideoGenerator.cs index 9a52ed6c1ed..e32dd595166 100644 --- a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/TestVideoGenerator.cs +++ b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/TestVideoGenerator.cs @@ -16,7 +16,7 @@ public TestVideoGenerator() public IServiceProvider? Services { get; set; } - public Func>? GenerateVideosAsyncCallback { get; set; } + public Func>? GenerateVideosAsyncCallback { get; set; } public Func GetServiceCallback { get; set; } @@ -25,14 +25,13 @@ public TestVideoGenerator() private object? DefaultGetServiceCallback(Type serviceType, object? serviceKey) => serviceType is not null && serviceKey is null && serviceType.IsInstanceOfType(this) ? this : null; - public Task GenerateAsync( + public Task GenerateAsync( VideoGenerationRequest request, VideoGenerationOptions? options = null, - IProgress? progress = null, CancellationToken cancellationToken = default) { return GenerateVideosAsyncCallback?.Invoke(request, options, cancellationToken) ?? - Task.FromResult(new VideoGenerationResponse()); + Task.FromResult(new TestVideoGenerationOperation()); } public object? GetService(Type serviceType, object? serviceKey = null) diff --git a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Video/DelegatingVideoGeneratorTests.cs b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Video/DelegatingVideoGeneratorTests.cs index 3391ceec4d7..fa06547f4f2 100644 --- a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Video/DelegatingVideoGeneratorTests.cs +++ b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Video/DelegatingVideoGeneratorTests.cs @@ -18,15 +18,15 @@ public void RequiresInnerVideoGenerator() [Fact] public async Task GenerateVideosAsyncDefaultsToInnerGeneratorAsync() { - var expectedResponse = new VideoGenerationResponse(); + var expectedOperation = new TestVideoGenerationOperation(); using var inner = new TestVideoGenerator { - GenerateVideosAsyncCallback = (request, options, ct) => Task.FromResult(expectedResponse) + GenerateVideosAsyncCallback = (request, options, ct) => Task.FromResult(expectedOperation) }; using var delegating = new TestDelegatingVideoGenerator(inner); var result = await delegating.GenerateAsync(new VideoGenerationRequest("Test")); - Assert.Same(expectedResponse, result); + Assert.Same(expectedOperation, result); } [Fact] diff --git a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Video/VideoGenerationResponseTests.cs b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Video/VideoGenerationResponseTests.cs deleted file mode 100644 index d5d51b929b7..00000000000 --- a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Video/VideoGenerationResponseTests.cs +++ /dev/null @@ -1,79 +0,0 @@ -// Licensed to the .NET Foundation under one or more agreements. -// The .NET Foundation licenses this file to you under the MIT license. - -using System.Collections.Generic; -using System.Text.Json; -using Xunit; - -namespace Microsoft.Extensions.AI; - -public class VideoGenerationResponseTests -{ - [Fact] - public void Constructor_Defaults() - { - var response = new VideoGenerationResponse(); - Assert.NotNull(response.Contents); - Assert.Empty(response.Contents); - Assert.Null(response.RawRepresentation); - Assert.Null(response.Usage); - } - - [Fact] - public void Constructor_WithContents() - { - var contents = new List { new DataContent("dGVzdA=="u8.ToArray(), "video/mp4") }; - var response = new VideoGenerationResponse(contents); - Assert.Same(contents, response.Contents); - } - - [Fact] - public void Contents_NullSetter_ReturnsEmptyList() - { - var response = new VideoGenerationResponse { Contents = null! }; - Assert.NotNull(response.Contents); - Assert.Empty(response.Contents); - } - - [Fact] - public void RawRepresentation_Roundtrip() - { - var raw = new object(); - var response = new VideoGenerationResponse { RawRepresentation = raw }; - Assert.Same(raw, response.RawRepresentation); - } - - [Fact] - public void Usage_Roundtrip() - { - var usage = new UsageDetails { InputTokenCount = 100, OutputTokenCount = 200 }; - var response = new VideoGenerationResponse { Usage = usage }; - Assert.Same(usage, response.Usage); - Assert.Equal(100, response.Usage.InputTokenCount); - Assert.Equal(200, response.Usage.OutputTokenCount); - } - - [Fact] - public void JsonSerialization_WithUriContent() - { - var response = new VideoGenerationResponse( - [new UriContent("https://example.com/video.mp4", "video/mp4")]); - - string json = JsonSerializer.Serialize(response, AIJsonUtilities.DefaultOptions); - var deserialized = JsonSerializer.Deserialize(json, AIJsonUtilities.DefaultOptions); - - Assert.NotNull(deserialized); - Assert.Single(deserialized!.Contents); - } - - [Fact] - public void JsonSerialization_EmptyResponse() - { - var response = new VideoGenerationResponse(); - string json = JsonSerializer.Serialize(response, AIJsonUtilities.DefaultOptions); - var deserialized = JsonSerializer.Deserialize(json, AIJsonUtilities.DefaultOptions); - - Assert.NotNull(deserialized); - Assert.Empty(deserialized!.Contents); - } -} diff --git a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Video/VideoGeneratorExtensionsTests.cs b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Video/VideoGeneratorExtensionsTests.cs index 1298346b27c..54f00720575 100644 --- a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Video/VideoGeneratorExtensionsTests.cs +++ b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Video/VideoGeneratorExtensionsTests.cs @@ -2,6 +2,7 @@ // The .NET Foundation licenses this file to you under the MIT license. using System; +using System.Collections.Generic; using System.Threading.Tasks; using Xunit; @@ -54,7 +55,7 @@ public void GetRequiredService_Generic_ServiceNotAvailable_Throws() public async Task GenerateVideosAsync_NullGenerator_Throws() { await Assert.ThrowsAsync("generator", () => - ((IVideoGenerator)null!).GenerateVideosAsync("Test")); + ((IVideoGenerator)null!).GenerateVideoAsync("Test")); } [Fact] @@ -62,7 +63,7 @@ public async Task GenerateVideosAsync_NullPrompt_Throws() { using var generator = new TestVideoGenerator(); await Assert.ThrowsAsync("prompt", () => - generator.GenerateVideosAsync(null!)); + generator.GenerateVideoAsync(null!)); } [Fact] @@ -74,11 +75,11 @@ public async Task GenerateVideosAsync_CallsGenerateAsync() GenerateVideosAsyncCallback = (request, options, ct) => { capturedRequest = request; - return Task.FromResult(new VideoGenerationResponse()); + return Task.FromResult(new TestVideoGenerationOperation()); } }; - await generator.GenerateVideosAsync("A cat video"); + await generator.GenerateVideoAsync("A cat video"); Assert.NotNull(capturedRequest); Assert.Equal("A cat video", capturedRequest!.Prompt); @@ -89,7 +90,7 @@ public async Task GenerateVideosAsync_CallsGenerateAsync() public async Task EditVideosAsync_NullGenerator_Throws() { await Assert.ThrowsAsync("generator", () => - ((IVideoGenerator)null!).EditVideosAsync([], "prompt")); + ((IVideoGenerator)null!).EditVideoAsync(Array.Empty(), "prompt")); } [Fact] @@ -97,7 +98,7 @@ public async Task EditVideosAsync_NullOriginalMedia_Throws() { using var generator = new TestVideoGenerator(); await Assert.ThrowsAsync("originalMedia", () => - generator.EditVideosAsync(null!, "prompt")); + generator.EditVideoAsync((IEnumerable)null!, "prompt")); } [Fact] @@ -105,7 +106,7 @@ public async Task EditVideosAsync_NullPrompt_Throws() { using var generator = new TestVideoGenerator(); await Assert.ThrowsAsync("prompt", () => - generator.EditVideosAsync([], null!)); + generator.EditVideoAsync(Array.Empty(), null!)); } [Fact] @@ -117,7 +118,7 @@ public async Task EditVideoAsync_DataContent_CallsGenerateAsync() GenerateVideosAsyncCallback = (request, options, ct) => { capturedRequest = request; - return Task.FromResult(new VideoGenerationResponse()); + return Task.FromResult(new TestVideoGenerationOperation()); } }; @@ -138,7 +139,7 @@ public async Task EditVideoAsync_ByteArray_CallsGenerateAsync() GenerateVideosAsyncCallback = (request, options, ct) => { capturedRequest = request; - return Task.FromResult(new VideoGenerationResponse()); + return Task.FromResult(new TestVideoGenerationOperation()); } }; diff --git a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Video/VideoGeneratorTests.cs b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Video/VideoGeneratorTests.cs index 1af60e0ab55..6b829671e5f 100644 --- a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Video/VideoGeneratorTests.cs +++ b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Video/VideoGeneratorTests.cs @@ -29,28 +29,28 @@ public void GetService_WithoutServiceKey_CallsCallback() public async Task GenerateVideosAsync_CallsCallback() { var expectedRequest = new VideoGenerationRequest("Test prompt"); - var expectedResponse = new VideoGenerationResponse(); + var expectedOperation = new TestVideoGenerationOperation(); using var generator = new TestVideoGenerator { GenerateVideosAsyncCallback = (request, options, ct) => { Assert.Same(expectedRequest, request); - return Task.FromResult(expectedResponse); + return Task.FromResult(expectedOperation); } }; var result = await generator.GenerateAsync(expectedRequest); - Assert.Same(expectedResponse, result); + Assert.Same(expectedOperation, result); } [Fact] - public async Task GenerateVideosAsync_NoCallback_ReturnsEmptyResponse() + public async Task GenerateVideosAsync_NoCallback_ReturnsDefaultOperation() { using var generator = new TestVideoGenerator(); var result = await generator.GenerateAsync(new VideoGenerationRequest("Test")); Assert.NotNull(result); - Assert.Empty(result.Contents); + Assert.True(result.IsCompleted); } [Fact] @@ -92,7 +92,7 @@ public async Task GenerateVideosAsync_WithOptions_PassesThroughCorrectly() GenerateVideosAsyncCallback = (request, opts, ct) => { capturedOptions = opts; - return Task.FromResult(new VideoGenerationResponse()); + return Task.FromResult(new TestVideoGenerationOperation()); } }; @@ -121,7 +121,7 @@ public async Task GenerateVideosAsync_WithEditRequest_PassesThroughCorrectly() GenerateVideosAsyncCallback = (req, opts, ct) => { capturedRequest = req; - return Task.FromResult(new VideoGenerationResponse()); + return Task.FromResult(new TestVideoGenerationOperation()); } }; diff --git a/test/Libraries/Microsoft.Extensions.AI.Integration.Tests/VideoGeneratorIntegrationTests.cs b/test/Libraries/Microsoft.Extensions.AI.Integration.Tests/VideoGeneratorIntegrationTests.cs index dc9829bf7e0..d1eb0bd68ba 100644 --- a/test/Libraries/Microsoft.Extensions.AI.Integration.Tests/VideoGeneratorIntegrationTests.cs +++ b/test/Libraries/Microsoft.Extensions.AI.Integration.Tests/VideoGeneratorIntegrationTests.cs @@ -38,12 +38,18 @@ public virtual async Task GenerateVideosAsync_SingleVideoGeneration() Count = 1 }; - var response = await _generator.GenerateVideosAsync("A simple animation of a bouncing ball", options); + var operation = await _generator.GenerateVideoAsync("A simple animation of a bouncing ball", options); - Assert.NotNull(response); - Assert.NotEmpty(response.Contents); + Assert.NotNull(operation); + Assert.NotNull(operation.OperationId); - var content = Assert.Single(response.Contents); + await operation.WaitForCompletionAsync(); + Assert.True(operation.IsCompleted); + + var contents = await operation.GetContentsAsync(); + Assert.NotEmpty(contents); + + var content = Assert.Single(contents); switch (content) { case UriContent uc: @@ -71,13 +77,18 @@ public virtual async Task GenerateVideosAsync_MultipleVideos() Count = 2 }; - var response = await _generator.GenerateVideosAsync("A cat sitting on a table", options); + var operation = await _generator.GenerateVideoAsync("A cat sitting on a table", options); + + Assert.NotNull(operation); + + await operation.WaitForCompletionAsync(); + Assert.True(operation.IsCompleted); - Assert.NotNull(response); - Assert.NotEmpty(response.Contents); - Assert.Equal(2, response.Contents.Count); + var contents = await operation.GetContentsAsync(); + Assert.NotEmpty(contents); + Assert.Equal(2, contents.Count); - foreach (var content in response.Contents) + foreach (var content in contents) { Assert.IsType(content); var dataContent = (DataContent)content; diff --git a/test/Libraries/Microsoft.Extensions.AI.Tests/Microsoft.Extensions.AI.Tests.csproj b/test/Libraries/Microsoft.Extensions.AI.Tests/Microsoft.Extensions.AI.Tests.csproj index eada870401c..8abe55bb8b9 100644 --- a/test/Libraries/Microsoft.Extensions.AI.Tests/Microsoft.Extensions.AI.Tests.csproj +++ b/test/Libraries/Microsoft.Extensions.AI.Tests/Microsoft.Extensions.AI.Tests.csproj @@ -26,6 +26,7 @@ + diff --git a/test/Libraries/Microsoft.Extensions.AI.Tests/Video/ConfigureOptionsVideoGeneratorTests.cs b/test/Libraries/Microsoft.Extensions.AI.Tests/Video/ConfigureOptionsVideoGeneratorTests.cs index 8ac11f557b9..8c506a88354 100644 --- a/test/Libraries/Microsoft.Extensions.AI.Tests/Video/ConfigureOptionsVideoGeneratorTests.cs +++ b/test/Libraries/Microsoft.Extensions.AI.Tests/Video/ConfigureOptionsVideoGeneratorTests.cs @@ -28,7 +28,7 @@ public async Task ConfigureCallback_ReceivesClonedOptions() GenerateVideosAsyncCallback = (request, options, ct) => { capturedOptions = options; - return Task.FromResult(new VideoGenerationResponse()); + return Task.FromResult(new TestVideoGenerationOperation()); } }; @@ -55,7 +55,7 @@ public async Task ConfigureCallback_WithNullOptions_CreatesNewInstance() GenerateVideosAsyncCallback = (request, options, ct) => { capturedOptions = options; - return Task.FromResult(new VideoGenerationResponse()); + return Task.FromResult(new TestVideoGenerationOperation()); } }; diff --git a/test/Libraries/Microsoft.Extensions.AI.Tests/Video/LoggingVideoGeneratorTests.cs b/test/Libraries/Microsoft.Extensions.AI.Tests/Video/LoggingVideoGeneratorTests.cs index 4192e7dc3f7..3126179019f 100644 --- a/test/Libraries/Microsoft.Extensions.AI.Tests/Video/LoggingVideoGeneratorTests.cs +++ b/test/Libraries/Microsoft.Extensions.AI.Tests/Video/LoggingVideoGeneratorTests.cs @@ -56,7 +56,7 @@ public async Task GenerateVideosAsync_LogsInvocationAndCompletion(LogLevel level { GenerateVideosAsyncCallback = (request, options, cancellationToken) => { - return Task.FromResult(new VideoGenerationResponse()); + return Task.FromResult(new TestVideoGenerationOperation()); }, }; @@ -104,7 +104,7 @@ public async Task GenerateVideosAsync_WithOriginalMedia_LogsInvocationAndComplet { GenerateVideosAsyncCallback = (request, options, cancellationToken) => { - return Task.FromResult(new VideoGenerationResponse()); + return Task.FromResult(new TestVideoGenerationOperation()); } }; diff --git a/test/Libraries/Microsoft.Extensions.AI.Tests/Video/OpenTelemetryVideoGeneratorTests.cs b/test/Libraries/Microsoft.Extensions.AI.Tests/Video/OpenTelemetryVideoGeneratorTests.cs index 951489f157f..8c176a0c3bf 100644 --- a/test/Libraries/Microsoft.Extensions.AI.Tests/Video/OpenTelemetryVideoGeneratorTests.cs +++ b/test/Libraries/Microsoft.Extensions.AI.Tests/Video/OpenTelemetryVideoGeneratorTests.cs @@ -40,14 +40,8 @@ public async Task ExpectedInformationLogged_Async(bool enableSensitiveData) { await Task.Yield(); - return new() + return new TestVideoGenerationOperation { - Contents = - [ - new UriContent("http://example/output.mp4", "video/mp4"), - new DataContent(new byte[] { 1, 2, 3, 4 }, "video/mp4") { Name = "moreOutput.mp4" }, - ], - Usage = new() { InputTokenCount = 10, @@ -119,6 +113,11 @@ public async Task ExpectedInformationLogged_Async(bool enableSensitiveData) Assert.True(activity.Duration.TotalMilliseconds > 0); var tags = activity.Tags.ToDictionary(kvp => kvp.Key, kvp => kvp.Value); + + // Operation metadata is always recorded + Assert.Equal("test-op-id", activity.GetTagItem("gen_ai.operation.id")); + Assert.Equal("completed", activity.GetTagItem("gen_ai.operation.status")); + if (enableSensitiveData) { Assert.Equal(ReplaceWhitespace(""" @@ -140,33 +139,10 @@ public async Task ExpectedInformationLogged_Async(bool enableSensitiveData) } ] """), ReplaceWhitespace(tags["gen_ai.input.messages"])); - - Assert.Equal(ReplaceWhitespace(""" - [ - { - "role": "assistant", - "parts": [ - { - "type": "uri", - "uri": "http://example/output.mp4", - "mime_type": "video/mp4", - "modality": "video" - }, - { - "type": "blob", - "content": "AQIDBA==", - "mime_type": "video/mp4", - "modality": "video" - } - ] - } - ] - """), ReplaceWhitespace(tags["gen_ai.output.messages"])); } else { Assert.False(tags.ContainsKey("gen_ai.input.messages")); - Assert.False(tags.ContainsKey("gen_ai.output.messages")); } static string ReplaceWhitespace(string? input) => Regex.Replace(input ?? "", @"\s+", " ").Trim(); From afd5d29d3a002933dd774ee6ad8ce1f2d628b3ee Mon Sep 17 00:00:00 2001 From: "Eric St. John" Date: Fri, 20 Mar 2026 17:58:26 -0700 Subject: [PATCH 04/10] Address feedback --- .../OpenAIVideoGenerator.cs | 23 +++------ .../Utf8JsonBinaryContent.cs | 50 +++++++++++++++++++ ...ratorBuilderServiceCollectionExtensions.cs | 20 ++++---- ...ratorBuilderServiceCollectionExtensions.cs | 12 ++--- 4 files changed, 74 insertions(+), 31 deletions(-) create mode 100644 src/Libraries/Microsoft.Extensions.AI.OpenAI/Utf8JsonBinaryContent.cs diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIVideoGenerator.cs b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIVideoGenerator.cs index b790b0cd874..f6ac7ba470c 100644 --- a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIVideoGenerator.cs +++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIVideoGenerator.cs @@ -163,8 +163,7 @@ request.SourceVideoId is null && } ForwardAdditionalProperties(body, options); - using BinaryContent extendContent = BinaryContent.Create( - SerializeJsonToUtf8(body)); + using var extendContent = CreateJsonContent(body); using PipelineMessage extendMsg = CreatePipelineRequest( _videoClient, "/videos/extensions", extendContent, "application/json", reqOpts); @@ -181,8 +180,7 @@ request.SourceVideoId is null && }; ForwardAdditionalProperties(body, options); - using BinaryContent editContent = BinaryContent.Create( - SerializeJsonToUtf8(body)); + using var editContent = CreateJsonContent(body); using PipelineMessage editMsg = CreatePipelineRequest( _videoClient, "/videos/edits", editContent, "application/json", reqOpts); @@ -253,8 +251,7 @@ request.SourceVideoId is null && } else { - using BinaryContent content = BinaryContent.Create( - SerializeJsonToUtf8(requestBody)); + using var content = CreateJsonContent(requestBody); createResult = await _videoClient.CreateVideoAsync( content, "application/json", reqOpts).ConfigureAwait(false); } @@ -361,16 +358,12 @@ private static void ForwardAdditionalProperties(JsonObject body, VideoGeneration } } - /// Serializes a to UTF-8 bytes without an intermediate string allocation. - private static BinaryData SerializeJsonToUtf8(JsonObject body) + /// Creates a containing the serialized JSON object. + private static Utf8JsonBinaryContent CreateJsonContent(JsonObject body) { - using var ms = new MemoryStream(); - using (var writer = new Utf8JsonWriter(ms)) - { - body.WriteTo(writer); - } - - return new BinaryData(ms.ToArray()); + var content = new Utf8JsonBinaryContent(); + body.WriteTo(content.JsonWriter); + return content; } /// Builds a multipart/form-data body containing the form fields and a file part. diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/Utf8JsonBinaryContent.cs b/src/Libraries/Microsoft.Extensions.AI.OpenAI/Utf8JsonBinaryContent.cs new file mode 100644 index 00000000000..49a4244d0d5 --- /dev/null +++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/Utf8JsonBinaryContent.cs @@ -0,0 +1,50 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System.ClientModel; +using System.IO; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; + +namespace Microsoft.Extensions.AI; + +/// A that writes UTF-8 JSON directly to the pipeline stream. +internal sealed class Utf8JsonBinaryContent : BinaryContent +{ + private readonly MemoryStream _stream = new(); + private readonly BinaryContent _content; + + public Utf8JsonBinaryContent() + { + _content = Create(_stream); + JsonWriter = new Utf8JsonWriter(_stream); + } + + public Utf8JsonWriter JsonWriter { get; } + + public override async Task WriteToAsync(Stream stream, CancellationToken cancellationToken = default) + { + await JsonWriter.FlushAsync(cancellationToken).ConfigureAwait(false); + await _content.WriteToAsync(stream, cancellationToken).ConfigureAwait(false); + } + + public override void WriteTo(Stream stream, CancellationToken cancellationToken = default) + { + JsonWriter.Flush(); + _content.WriteTo(stream, cancellationToken); + } + + public override bool TryComputeLength(out long length) + { + length = JsonWriter.BytesCommitted + JsonWriter.BytesPending; + return true; + } + + public override void Dispose() + { + JsonWriter.Dispose(); + _content.Dispose(); + _stream.Dispose(); + } +} diff --git a/src/Libraries/Microsoft.Extensions.AI/Image/ImageGeneratorBuilderServiceCollectionExtensions.cs b/src/Libraries/Microsoft.Extensions.AI/Image/ImageGeneratorBuilderServiceCollectionExtensions.cs index 3413e9957cb..ab615e03e15 100644 --- a/src/Libraries/Microsoft.Extensions.AI/Image/ImageGeneratorBuilderServiceCollectionExtensions.cs +++ b/src/Libraries/Microsoft.Extensions.AI/Image/ImageGeneratorBuilderServiceCollectionExtensions.cs @@ -13,26 +13,26 @@ namespace Microsoft.Extensions.DependencyInjection; [Experimental(DiagnosticIds.Experiments.AIImageGeneration, UrlFormat = DiagnosticIds.UrlFormat)] public static class ImageGeneratorBuilderServiceCollectionExtensions { - /// Registers a singleton in the . + /// Registers an in the . /// The to which the generator should be added. /// The inner that represents the underlying backend. /// The service lifetime for the generator. Defaults to . /// An that can be used to build a pipeline around the inner generator. /// or is . - /// The generator is registered as a singleton service. + /// The generator is registered with the specified . public static ImageGeneratorBuilder AddImageGenerator( this IServiceCollection serviceCollection, IImageGenerator innerGenerator, ServiceLifetime lifetime = ServiceLifetime.Singleton) => AddImageGenerator(serviceCollection, _ => innerGenerator, lifetime); - /// Registers a singleton in the . + /// Registers an in the . /// The to which the generator should be added. /// A callback that produces the inner that represents the underlying backend. /// The service lifetime for the generator. Defaults to . /// An that can be used to build a pipeline around the inner generator. /// or is . - /// The generator is registered as a singleton service. + /// The generator is registered with the specified . public static ImageGeneratorBuilder AddImageGenerator( this IServiceCollection serviceCollection, Func innerGeneratorFactory, @@ -46,14 +46,14 @@ public static ImageGeneratorBuilder AddImageGenerator( return builder; } - /// Registers a keyed singleton in the . + /// Registers a keyed in the . /// The to which the generator should be added. /// The key with which to associate the generator. /// The inner that represents the underlying backend. /// The service lifetime for the generator. Defaults to . /// An that can be used to build a pipeline around the inner generator. - /// , , or is . - /// The generator is registered as a scoped service. + /// or is . + /// The generator is registered with the specified . public static ImageGeneratorBuilder AddKeyedImageGenerator( this IServiceCollection serviceCollection, object? serviceKey, @@ -61,14 +61,14 @@ public static ImageGeneratorBuilder AddKeyedImageGenerator( ServiceLifetime lifetime = ServiceLifetime.Singleton) => AddKeyedImageGenerator(serviceCollection, serviceKey, _ => innerGenerator, lifetime); - /// Registers a keyed singleton in the . + /// Registers a keyed in the . /// The to which the generator should be added. /// The key with which to associate the generator. /// A callback that produces the inner that represents the underlying backend. /// The service lifetime for the generator. Defaults to . /// An that can be used to build a pipeline around the inner generator. - /// , , or is . - /// The generator is registered as a scoped service. + /// or is . + /// The generator is registered with the specified . public static ImageGeneratorBuilder AddKeyedImageGenerator( this IServiceCollection serviceCollection, object? serviceKey, diff --git a/src/Libraries/Microsoft.Extensions.AI/Video/VideoGeneratorBuilderServiceCollectionExtensions.cs b/src/Libraries/Microsoft.Extensions.AI/Video/VideoGeneratorBuilderServiceCollectionExtensions.cs index 202d71cadb1..23514815e2a 100644 --- a/src/Libraries/Microsoft.Extensions.AI/Video/VideoGeneratorBuilderServiceCollectionExtensions.cs +++ b/src/Libraries/Microsoft.Extensions.AI/Video/VideoGeneratorBuilderServiceCollectionExtensions.cs @@ -13,26 +13,26 @@ namespace Microsoft.Extensions.DependencyInjection; [Experimental(DiagnosticIds.Experiments.AIVideoGeneration, UrlFormat = DiagnosticIds.UrlFormat)] public static class VideoGeneratorBuilderServiceCollectionExtensions { - /// Registers a singleton in the . + /// Registers an in the . /// The to which the generator should be added. /// The inner that represents the underlying backend. /// The service lifetime for the generator. Defaults to . /// An that can be used to build a pipeline around the inner generator. /// or is . - /// The generator is registered as a singleton service. + /// The generator is registered with the specified . public static VideoGeneratorBuilder AddVideoGenerator( this IServiceCollection serviceCollection, IVideoGenerator innerGenerator, ServiceLifetime lifetime = ServiceLifetime.Singleton) => AddVideoGenerator(serviceCollection, _ => innerGenerator, lifetime); - /// Registers a singleton in the . + /// Registers an in the . /// The to which the generator should be added. /// A callback that produces the inner that represents the underlying backend. /// The service lifetime for the generator. Defaults to . /// An that can be used to build a pipeline around the inner generator. /// or is . - /// The generator is registered as a singleton service. + /// The generator is registered with the specified . public static VideoGeneratorBuilder AddVideoGenerator( this IServiceCollection serviceCollection, Func innerGeneratorFactory, @@ -46,7 +46,7 @@ public static VideoGeneratorBuilder AddVideoGenerator( return builder; } - /// Registers a keyed singleton in the . + /// Registers a keyed in the . /// The to which the generator should be added. /// The key with which to associate the generator. /// The inner that represents the underlying backend. @@ -61,7 +61,7 @@ public static VideoGeneratorBuilder AddKeyedVideoGenerator( ServiceLifetime lifetime = ServiceLifetime.Singleton) => AddKeyedVideoGenerator(serviceCollection, serviceKey, _ => innerGenerator, lifetime); - /// Registers a keyed singleton in the . + /// Registers a keyed in the . /// The to which the generator should be added. /// The key with which to associate the generator. /// A callback that produces the inner that represents the underlying backend. From 8d17e11ed09c5ed1bb1f386284f876c38e06a7d3 Mon Sep 17 00:00:00 2001 From: "Eric St. John" Date: Mon, 23 Mar 2026 09:43:17 -0700 Subject: [PATCH 05/10] Register VideoGenerationOptions for serialization --- .../Utilities/AIJsonUtilities.Defaults.cs | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Utilities/AIJsonUtilities.Defaults.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Utilities/AIJsonUtilities.Defaults.cs index 6feffa455c0..4f46189aac3 100644 --- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Utilities/AIJsonUtilities.Defaults.cs +++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Utilities/AIJsonUtilities.Defaults.cs @@ -159,6 +159,9 @@ private static JsonSerializerOptions CreateDefaultOptions() [JsonSerializable(typeof(ImageGenerationOptions))] [JsonSerializable(typeof(ImageGenerationResponse))] + // IVideoGenerator + [JsonSerializable(typeof(VideoGenerationOptions))] + // IHostedFileClient [JsonSerializable(typeof(HostedFileClientOptions))] [JsonSerializable(typeof(HostedFileClientMetadata))] From 1ea78761fc6d1ec106e480570a752a225b4083f0 Mon Sep 17 00:00:00 2001 From: "Eric St. John" Date: Mon, 23 Mar 2026 19:29:34 -0700 Subject: [PATCH 06/10] Expand demo, fix a few issues with OpenAI handler, add tests --- samples/VideoGenerationPOC/Program.cs | 353 +++++++++++------- samples/VideoGenerationPOC/demo-dotbot.ps1 | 244 ++++++++++++ .../VideoGenerationPOC/my-dotnet-bot-mod.png | Bin 0 -> 124092 bytes .../Microsoft.Extensions.AI.OpenAI.csproj | 1 + .../OpenAIClientExtensions.cs | 2 +- .../OpenAIVideoGenerator.cs | 54 ++- .../OpenAIVideoGeneratorTests.cs | 186 +++++++++ 7 files changed, 689 insertions(+), 151 deletions(-) create mode 100644 samples/VideoGenerationPOC/demo-dotbot.ps1 create mode 100644 samples/VideoGenerationPOC/my-dotnet-bot-mod.png diff --git a/samples/VideoGenerationPOC/Program.cs b/samples/VideoGenerationPOC/Program.cs index f0c6f1e2023..87db81f185f 100644 --- a/samples/VideoGenerationPOC/Program.cs +++ b/samples/VideoGenerationPOC/Program.cs @@ -1,13 +1,17 @@ // Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. -// Video Generation POC - Microsoft.Extensions.AI -// Usage: set OPENAI_API_KEY environment variable, then run: -// dotnet run -- "A cat playing piano" -// dotnet run -- "She turns and smiles" --input reference.jpg -// dotnet run -- "Change the sky to sunset" --edit video_abc123 -// dotnet run -- "Continue the scene" --extend video_abc123 -// dotnet run -- "A tracking shot of Mossy" --character char_abc123 +// Video Generation POC — Microsoft.Extensions.AI general-purpose CLI +// +// Usage examples: +// dotnet run -- generate "A cat playing piano" +// dotnet run -- generate "She turns and smiles" --input reference.jpg +// dotnet run -- generate "A tracking shot of DotBot" --character char_abc123 +// dotnet run -- upload-character DotBot --input clip.mp4 +// dotnet run -- edit "Change the sky to sunset" --video video_abc123 +// dotnet run -- extend "Continue the scene" --video video_abc123 +// +// All commands print machine-parseable lines (OPERATION_ID, CHARACTER_ID) for scripting. using System.CommandLine; using System.Drawing; @@ -16,133 +20,197 @@ using Microsoft.Extensions.Logging; using OpenAI; -var promptArg = new Argument("prompt", () => "A serene lake at sunset with gentle ripples", "Text prompt describing the video to generate."); -var modelOption = new Option("--model", () => "sora-2", "Model ID to use for video generation."); -var outputOption = new Option("--output", () => $"video_{DateTime.Now:yyyyMMdd_HHmmss}.mp4", "Output file path for the generated video."); +// ── Shared options ───────────────────────────────────────────────────────── +var modelOption = new Option("--model", () => "sora-2", "Model ID."); +var outputOption = new Option("--output", "Output file path (.mp4). Omit for URI-only display."); +var durationOption = new Option("--duration", "Duration in seconds."); +var widthOption = new Option("--width", () => 1280, "Video width."); +var heightOption = new Option("--height", () => 720, "Video height."); +var formatOption = new Option("--format", () => "data", "Response format: data or uri."); + +// ── generate ─────────────────────────────────────────────────────────────── +var generatePromptArg = new Argument("prompt", "Text prompt describing the video to generate."); var inputOption = new Option("--input", "Input file(s) — images for image-to-video, or a video for editing.") { AllowMultipleArgumentsPerToken = true }; -var editOption = new Option("--edit", "Video ID of an existing generation to edit (POST /videos/edits)."); -var extendOption = new Option("--extend", "Video ID of a completed video to extend (POST /videos/extensions)."); -var characterOption = new Option("--character", "Character ID(s) to include in the generation.") { AllowMultipleArgumentsPerToken = true }; +var characterOption = new Option("--character", "Character ID(s) to include.") { AllowMultipleArgumentsPerToken = true }; -var rootCommand = new RootCommand("Video Generation POC — demonstrates Microsoft.Extensions.AI video generation with OpenAI.") +var generateCommand = new Command("generate", "Generate a new video from a text prompt (optionally with input images and characters).") { - promptArg, - modelOption, - outputOption, - inputOption, - editOption, - extendOption, - characterOption, + generatePromptArg, modelOption, outputOption, inputOption, characterOption, durationOption, widthOption, heightOption, formatOption, }; -rootCommand.SetHandler(async (context) => +generateCommand.SetHandler(async (context) => { - string prompt = context.ParseResult.GetValueForArgument(promptArg); + string prompt = context.ParseResult.GetValueForArgument(generatePromptArg); string model = context.ParseResult.GetValueForOption(modelOption)!; - string outputPath = context.ParseResult.GetValueForOption(outputOption)!; + string? outputPath = context.ParseResult.GetValueForOption(outputOption); string[] inputPaths = context.ParseResult.GetValueForOption(inputOption) ?? []; - string? editVideoId = context.ParseResult.GetValueForOption(editOption); - string? extendVideoId = context.ParseResult.GetValueForOption(extendOption); string[] characterIds = context.ParseResult.GetValueForOption(characterOption) ?? []; + int? duration = context.ParseResult.GetValueForOption(durationOption); + int? width = context.ParseResult.GetValueForOption(widthOption); + int? height = context.ParseResult.GetValueForOption(heightOption); + string format = context.ParseResult.GetValueForOption(formatOption)!; - // --- API key --- - string? apiKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY"); - if (string.IsNullOrEmpty(apiKey)) + using var generator = CreateGenerator(model); + + List? originalMedia = await LoadInputFilesAsync(inputPaths); + if (originalMedia is null && inputPaths.Length > 0) { - Console.Error.WriteLine("Error: Set the OPENAI_API_KEY environment variable."); context.ExitCode = 1; return; } - Console.WriteLine($"Prompt: {prompt}"); - Console.WriteLine($"Model: {model}"); - Console.WriteLine($"Output: {outputPath}"); - if (inputPaths.Length > 0) - { - Console.WriteLine($"Inputs: {string.Join(", ", inputPaths)}"); - } + var options = BuildOptions(duration, width, height, format, characterIds); + var request = new VideoGenerationRequest(prompt, originalMedia); + + var operation = await generator.GenerateAsync(request, options); + await CompleteAndSaveAsync(operation, options, outputPath); +}); + +// ── upload-character ─────────────────────────────────────────────────────── +var charNameArg = new Argument("name", "Name for the character (mention this name verbatim in prompts)."); +var charInputOption = new Option("--input", "Video file (.mp4) to upload as the character source.") { IsRequired = true }; - if (editVideoId is not null) +var uploadCharCommand = new Command("upload-character", "Upload a video clip as a reusable character asset.") +{ + charNameArg, charInputOption, modelOption, +}; + +uploadCharCommand.SetHandler(async (context) => +{ + string name = context.ParseResult.GetValueForArgument(charNameArg); + string inputPath = context.ParseResult.GetValueForOption(charInputOption)!; + string model = context.ParseResult.GetValueForOption(modelOption)!; + + if (!File.Exists(inputPath)) { - Console.WriteLine($"Edit: {editVideoId}"); + Console.Error.WriteLine($"Error: File not found: {inputPath}"); + context.ExitCode = 1; + return; } - if (extendVideoId is not null) + using var generator = CreateGenerator(model); + DataContent videoContent = await DataContent.LoadFromAsync(inputPath); + + Console.WriteLine($"Uploading character '{name}' from {inputPath} ({videoContent.Data.Length} bytes)..."); + string characterId = await generator.UploadVideoCharacterAsync(name, videoContent); + + Console.WriteLine($"CHARACTER_ID: {characterId}"); +}); + +// ── edit ─────────────────────────────────────────────────────────────────── +var editPromptArg = new Argument("prompt", "Prompt describing the edit to apply."); +var editVideoOption = new Option("--video", "Video ID of the generation to edit.") { IsRequired = true }; + +var editCommand = new Command("edit", "Edit an existing video by ID.") +{ + editPromptArg, editVideoOption, modelOption, outputOption, formatOption, +}; + +editCommand.SetHandler(async (context) => +{ + string prompt = context.ParseResult.GetValueForArgument(editPromptArg); + string videoId = context.ParseResult.GetValueForOption(editVideoOption)!; + string model = context.ParseResult.GetValueForOption(modelOption)!; + string? outputPath = context.ParseResult.GetValueForOption(outputOption); + string format = context.ParseResult.GetValueForOption(formatOption)!; + + using var generator = CreateGenerator(model); + + var options = BuildOptions(duration: null, width: null, height: null, format, characterIds: []); + var request = new VideoGenerationRequest(prompt) { - Console.WriteLine($"Extend: {extendVideoId}"); - } + OperationKind = VideoOperationKind.Edit, + SourceVideoId = videoId, + }; - if (characterIds.Length > 0) + var operation = await generator.GenerateAsync(request, options); + await CompleteAndSaveAsync(operation, options, outputPath); +}); + +// ── extend ───────────────────────────────────────────────────────────────── +var extendPromptArg = new Argument("prompt", "Prompt describing how the scene should continue."); +var extendVideoOption = new Option("--video", "Video ID of the completed video to extend.") { IsRequired = true }; + +var extendCommand = new Command("extend", "Extend a completed video by ID.") +{ + extendPromptArg, extendVideoOption, modelOption, outputOption, durationOption, formatOption, +}; + +extendCommand.SetHandler(async (context) => +{ + string prompt = context.ParseResult.GetValueForArgument(extendPromptArg); + string videoId = context.ParseResult.GetValueForOption(extendVideoOption)!; + string model = context.ParseResult.GetValueForOption(modelOption)!; + string? outputPath = context.ParseResult.GetValueForOption(outputOption); + int? duration = context.ParseResult.GetValueForOption(durationOption); + string format = context.ParseResult.GetValueForOption(formatOption)!; + + using var generator = CreateGenerator(model); + + var options = BuildOptions(duration, width: null, height: null, format, characterIds: []); + var request = new VideoGenerationRequest(prompt) { - Console.WriteLine($"Characters: {string.Join(", ", characterIds)}"); - } + OperationKind = VideoOperationKind.Extend, + SourceVideoId = videoId, + }; + + var operation = await generator.GenerateAsync(request, options); + await CompleteAndSaveAsync(operation, options, outputPath); +}); + +// ── Root command ─────────────────────────────────────────────────────────── +var rootCommand = new RootCommand("Video Generation POC — Microsoft.Extensions.AI CLI for video generation, editing, extending, and character management.") +{ + generateCommand, + uploadCharCommand, + editCommand, + extendCommand, +}; - Console.WriteLine(); +return await rootCommand.InvokeAsync(args); - // --- Create the video generator with middleware pipeline --- - using var loggerFactory = LoggerFactory.Create(builder => builder.AddConsole().SetMinimumLevel(LogLevel.Debug)); +// ═══════════════════════════════════════════════════════════════════════════ +// Helpers +// ═══════════════════════════════════════════════════════════════════════════ +static IVideoGenerator CreateGenerator(string model) +{ + string? apiKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY"); + if (string.IsNullOrEmpty(apiKey)) + { + Console.Error.WriteLine("Error: Set the OPENAI_API_KEY environment variable."); + Environment.Exit(1); + } + var loggerFactory = LoggerFactory.Create(b => b.AddConsole().SetMinimumLevel(LogLevel.Debug)); var openAIClient = new OpenAIClient(apiKey); - using IVideoGenerator generator = openAIClient + + return openAIClient .GetVideoClient() .AsIVideoGenerator(model) .AsBuilder() .UseLogging(loggerFactory) .UseOpenTelemetry(loggerFactory) - .ConfigureOptions(options => - { - options.Count ??= 1; - options.Duration ??= TimeSpan.FromSeconds(12); - options.VideoSize ??= new Size(1280, 720); - }) .Build(); +} - // --- Show metadata --- - var metadata = generator.GetService(); - if (metadata is not null) +static VideoGenerationOptions BuildOptions(int? duration, int? width, int? height, string format, string[] characterIds) +{ + var options = new VideoGenerationOptions { - Console.WriteLine($"Provider: {metadata.ProviderName}"); - Console.WriteLine($"Endpoint: {metadata.ProviderUri}"); - Console.WriteLine($"Default Model: {metadata.DefaultModelId}"); - Console.WriteLine(); - } + ResponseFormat = string.Equals(format, "uri", StringComparison.OrdinalIgnoreCase) + ? VideoGenerationResponseFormat.Uri + : VideoGenerationResponseFormat.Data, + }; - // --- Build request --- - List? originalMedia = null; - if (inputPaths.Length > 0) + if (duration.HasValue) { - originalMedia = []; - foreach (string inputPath in inputPaths) - { - if (!File.Exists(inputPath)) - { - Console.Error.WriteLine($"Error: Input file not found: {inputPath}"); - context.ExitCode = 1; - return; - } - - DataContent loaded = await DataContent.LoadFromAsync(inputPath); - originalMedia.Add(loaded); - Console.WriteLine($" Loaded input: {inputPath} ({loaded.MediaType}, {loaded.Data.Length} bytes)"); - } - - Console.WriteLine(); + options.Duration = TimeSpan.FromSeconds(duration.Value); } - // --- Generate video --- - string mode = - extendVideoId is not null ? "Extending" : - editVideoId is not null ? "Editing (by video ID)" : - originalMedia?.Exists(c => c is DataContent dc && dc.HasTopLevelMediaType("video")) == true ? "Editing (uploaded video)" : - originalMedia is not null ? "Generating (image-to-video)" : - "Generating (text-to-video)"; - Console.WriteLine($"{mode}..."); - var stopwatch = System.Diagnostics.Stopwatch.StartNew(); - - var generateOptions = new VideoGenerationOptions + if (width.HasValue && height.HasValue) { - ResponseFormat = VideoGenerationResponseFormat.Data, - }; + options.VideoSize = new Size(width.Value, height.Value); + } if (characterIds.Length > 0) { @@ -152,58 +220,84 @@ chars.Add(new JsonObject { ["id"] = charId }); } - generateOptions.AdditionalProperties ??= []; - generateOptions.AdditionalProperties["characters"] = chars; + options.AdditionalProperties = new() { ["characters"] = chars }; } - var request = new VideoGenerationRequest(prompt, originalMedia); + return options; +} - if (editVideoId is not null) +static async Task?> LoadInputFilesAsync(string[] inputPaths) +{ + if (inputPaths.Length == 0) { - request.OperationKind = VideoOperationKind.Edit; - request.SourceVideoId = editVideoId; + return null; } - else if (extendVideoId is not null) + + var media = new List(); + foreach (string path in inputPaths) { - request.OperationKind = VideoOperationKind.Extend; - request.SourceVideoId = extendVideoId; + if (!File.Exists(path)) + { + Console.Error.WriteLine($"Error: Input file not found: {path}"); + return null; + } + + DataContent loaded = await DataContent.LoadFromAsync(path); + media.Add(loaded); + Console.WriteLine($" Loaded: {path} ({loaded.MediaType}, {loaded.Data.Length} bytes)"); } - var operation = await generator.GenerateAsync(request, generateOptions); + return media; +} - Console.WriteLine($" Operation ID: {operation.OperationId}"); - Console.WriteLine($" Initial status: {operation.Status}"); +static async Task CompleteAndSaveAsync(VideoGenerationOperation operation, VideoGenerationOptions options, string? outputPath) +{ + Console.WriteLine($"OPERATION_ID: {operation.OperationId}"); + Console.WriteLine($" Status: {operation.Status}"); + var sw = System.Diagnostics.Stopwatch.StartNew(); await operation.WaitForCompletionAsync( new Progress(p => - Console.WriteLine($" Status: {p.Status}{(p.PercentComplete.HasValue ? $" ({p.PercentComplete}%)" : string.Empty)}"))); + Console.WriteLine($" Progress: {p.Status}{(p.PercentComplete.HasValue ? $" ({p.PercentComplete}%)" : string.Empty)}"))); - stopwatch.Stop(); - Console.WriteLine($"Completed in {stopwatch.Elapsed.TotalSeconds:F1}s"); - Console.WriteLine(); + sw.Stop(); + Console.WriteLine($" Completed in {sw.Elapsed.TotalSeconds:F1}s"); - // --- Download and process contents --- if (operation.Usage is { } usage) { - Console.WriteLine($"Token Usage: input={usage.InputTokenCount}, output={usage.OutputTokenCount}, total={usage.TotalTokenCount}"); + Console.WriteLine($" Tokens: input={usage.InputTokenCount}, output={usage.OutputTokenCount}, total={usage.TotalTokenCount}"); } - var contents = await operation.GetContentsAsync(generateOptions); - Console.WriteLine($"Generated {contents.Count} content item(s):"); + var contents = await operation.GetContentsAsync(options); + Console.WriteLine($" {contents.Count} content item(s)"); + for (int i = 0; i < contents.Count; i++) { - var content = contents[i]; - switch (content) + switch (contents[i]) { case DataContent dc: - string filePath = contents.Count == 1 - ? outputPath - : Path.Combine( - Path.GetDirectoryName(outputPath) ?? ".", - $"{Path.GetFileNameWithoutExtension(outputPath)}_{i}{Path.GetExtension(outputPath)}"); - - await dc.SaveToAsync(filePath); - Console.WriteLine($" [{i}] Saved {dc.Data.Length} bytes ({dc.MediaType}) -> {filePath}"); + if (outputPath is not null) + { + string filePath = contents.Count == 1 + ? outputPath + : Path.Combine( + Path.GetDirectoryName(outputPath) ?? ".", + $"{Path.GetFileNameWithoutExtension(outputPath)}_{i}{Path.GetExtension(outputPath)}"); + + string? dir = Path.GetDirectoryName(filePath); + if (dir is not null) + { + Directory.CreateDirectory(dir); + } + + await dc.SaveToAsync(filePath); + Console.WriteLine($" [{i}] Saved: {filePath} ({dc.Data.Length} bytes, {dc.MediaType})"); + } + else + { + Console.WriteLine($" [{i}] DataContent: {dc.Data.Length} bytes ({dc.MediaType})"); + } + break; case UriContent uc: @@ -211,13 +305,8 @@ await operation.WaitForCompletionAsync( break; default: - Console.WriteLine($" [{i}] {content.GetType().Name}: {content}"); + Console.WriteLine($" [{i}] {contents[i].GetType().Name}"); break; } } - - Console.WriteLine(); - Console.WriteLine("Done!"); -}); - -return await rootCommand.InvokeAsync(args); +} diff --git a/samples/VideoGenerationPOC/demo-dotbot.ps1 b/samples/VideoGenerationPOC/demo-dotbot.ps1 new file mode 100644 index 00000000000..989462c6f21 --- /dev/null +++ b/samples/VideoGenerationPOC/demo-dotbot.ps1 @@ -0,0 +1,244 @@ +#!/usr/bin/env pwsh +# Video Generation POC — end-to-end DotNetBot scenario +# +# Prerequisites: +# - OPENAI_API_KEY environment variable set +# - Reference image at $ReferenceImage (or pass -ReferenceImage path) +# +# This script demonstrates: +# 1. Image-to-video generation from a reference image +# 2. Character upload from the generated clip +# 3. Two character-consistent generations (surfing + groceries) +# 4. Editing the surfing video (sunset palette shift) +# 5. Extending the grocery video (fruit juggling) +# +# Resume: The script saves progress to a state file in the output directory. +# If a step already completed (output file + ID exist), it is skipped. +# Pass -Reset to start fresh. + +param( + [string]$ReferenceImage = "my-dotnet-bot-mod.png", + [string]$OutputDir = "..\..\artifacts\demo-output", + [string]$Model = "sora-2", + [switch]$Reset +) + +Set-StrictMode -Version Latest +$ErrorActionPreference = "Stop" + +if (-not (Test-Path $ReferenceImage)) { + Write-Error "Reference image not found: $ReferenceImage" + exit 1 +} + +New-Item -ItemType Directory -Path $OutputDir -Force | Out-Null + +# ── State management ──────────────────────────────────────────────────── +$stateFile = Join-Path $OutputDir "demo-state.json" + +if ($Reset -and (Test-Path $stateFile)) { + Remove-Item $stateFile -Force + Write-Host "State file cleared." -ForegroundColor Yellow +} + +function Get-State { + if (Test-Path $stateFile) { + return Get-Content $stateFile -Raw | ConvertFrom-Json -AsHashtable + } + return @{} +} + +function Set-State { + param([string]$Key, [string]$Value) + $s = Get-State + $s[$Key] = $Value + $s | ConvertTo-Json | Set-Content $stateFile +} + +# ── Tool helpers ──────────────────────────────────────────────────────── +function Invoke-Tool { + param([string]$Label, [string[]]$Arguments) + Write-Host "" + Write-Host ("=" * 70) -ForegroundColor Cyan + Write-Host " $Label" -ForegroundColor Cyan + Write-Host ("=" * 70) -ForegroundColor Cyan + Write-Host "> dotnet run --project $PSScriptRoot -- $($Arguments -join ' ')" -ForegroundColor DarkGray + + $output = & dotnet run --project $PSScriptRoot -- @Arguments 2>&1 + $output | ForEach-Object { Write-Host " $_" } + if ($LASTEXITCODE -ne 0) { + Write-Error "Tool exited with code $LASTEXITCODE" + exit $LASTEXITCODE + } + return ($output | Out-String) +} + +function Extract-Id { + param([string]$Output, [string]$Prefix) + if ($Output -match "$Prefix\:\s*(\S+)") { + return $Matches[1] + } + Write-Error "Could not find $Prefix in tool output." + exit 1 +} + +function Skip-OrRun { + param( + [string]$StateKey, + [string]$Label, + [string]$OutputFile, + [scriptblock]$Action + ) + $state = Get-State + if ($state.ContainsKey($StateKey) -and ((-not $OutputFile) -or (Test-Path $OutputFile))) { + Write-Host "" + Write-Host ("=" * 70) -ForegroundColor DarkGray + Write-Host " SKIP: $Label (already completed — $StateKey=$($state[$StateKey]))" -ForegroundColor DarkGray + Write-Host ("=" * 70) -ForegroundColor DarkGray + return $state[$StateKey] + } + $id = & $Action + Set-State $StateKey $id + return $id +} + +# ───────────────────────────────────────────────────────────────────────── +# Step 1: Generate a 4-second intro clip from the reference image +# ───────────────────────────────────────────────────────────────────────── +$introPath = Join-Path $OutputDir "01_DotNetBot_intro.mp4" +$introId = Skip-OrRun "introId" "Step 1: Image-to-video — DotNetBot intro (4s)" $introPath { + $out = Invoke-Tool "Step 1: Image-to-video — DotNetBot intro (4s)" @( + "generate", + "A smooth 360-degree tracking shot around a cute spherical robot called DotNetBot. He has an spherical shape with antenna on his head that remains stationary, two arms and legs, and a belt with buckle that reads '.NET'. The camera orbits to show every side, then DotNetBot lifts his right arm to give the shaka hand sign extending thumb and pinky finger.", + "--input", $ReferenceImage, + "--model", $Model, + "--duration", "4", + "--output", $introPath + ) + $id = Extract-Id $out "OPERATION_ID" + Write-Host " >> Intro video ID: $id" -ForegroundColor Green + return $id +} + +# ───────────────────────────────────────────────────────────────────────── +# Step 1b: Trim the intro clip to ≤4.0 s for character upload +# OpenAI requires character reference videos to be between 2–4 seconds, +# but generated clips may slightly overshoot. Use ffmpeg to hard-trim. +# ───────────────────────────────────────────────────────────────────────── +$trimmedPath = Join-Path $OutputDir "01b_DotNetBot_intro_trimmed.mp4" +if ((Test-Path $introPath) -and -not (Test-Path $trimmedPath)) { + Write-Host "" + Write-Host ("=" * 70) -ForegroundColor Cyan + Write-Host " Step 1b: Trimming intro clip to 3.9 s with ffmpeg (re-encode)" -ForegroundColor Cyan + Write-Host ("=" * 70) -ForegroundColor Cyan + & ffmpeg -y -i $introPath -t 3.9 $trimmedPath 2>&1 | ForEach-Object { Write-Host " $_" } + if ($LASTEXITCODE -ne 0) { + Write-Error "ffmpeg trim failed (exit code $LASTEXITCODE). Is ffmpeg installed?" + exit 1 + } + Write-Host " >> Trimmed clip: $trimmedPath" -ForegroundColor Green +} elseif (Test-Path $trimmedPath) { + Write-Host "" + Write-Host ("=" * 70) -ForegroundColor DarkGray + Write-Host " SKIP: Step 1b — trimmed clip already exists" -ForegroundColor DarkGray + Write-Host ("=" * 70) -ForegroundColor DarkGray +} + +# ───────────────────────────────────────────────────────────────────────── +# Step 2: Upload the trimmed intro clip as character "DotNetBot" +# ───────────────────────────────────────────────────────────────────────── +$charId = Skip-OrRun "charId" "Step 2: Upload character 'DotNetBot'" "" { + $out = Invoke-Tool "Step 2: Upload character 'DotNetBot'" @( + "upload-character", "DotNetBot", + "--input", $trimmedPath, + "--model", $Model + ) + $id = Extract-Id $out "CHARACTER_ID" + Write-Host " >> Character ID: $id" -ForegroundColor Green + return $id +} + +# ───────────────────────────────────────────────────────────────────────── +# Step 3: Generate DotNetBot surfing (with character) +# ───────────────────────────────────────────────────────────────────────── +$surfPath = Join-Path $OutputDir "03_DotNetBot_surfing.mp4" +$surfId = Skip-OrRun "surfId" "Step 3: DotNetBot goes surfing" $surfPath { + $out = Invoke-Tool "Step 3: DotNetBot goes surfing" @( + "generate", + "A cinematic wide shot of DotNetBot surfing a massive turquoise wave at golden hour. Water sprays around him as he crouches on the board, sun flare behind.", + "--character", $charId, + "--model", $Model, + "--duration", "8", + "--output", $surfPath + ) + $id = Extract-Id $out "OPERATION_ID" + Write-Host " >> Surfing video ID: $id" -ForegroundColor Green + return $id +} + +# ───────────────────────────────────────────────────────────────────────── +# Step 4: Generate DotNetBot buying groceries (with character) +# ───────────────────────────────────────────────────────────────────────── +$groceryPath = Join-Path $OutputDir "04_DotNetBot_groceries.mp4" +$groceryId = Skip-OrRun "groceryId" "Step 4: DotNetBot buys groceries" $groceryPath { + $out = Invoke-Tool "Step 4: DotNetBot buys groceries" @( + "generate", + "A medium shot of DotNetBot rolling through a colorful outdoor market, picking up oranges and tomatoes with his small arms and placing them in a basket.", + "--character", $charId, + "--model", $Model, + "--duration", "8", + "--output", $groceryPath + ) + $id = Extract-Id $out "OPERATION_ID" + Write-Host " >> Grocery video ID: $id" -ForegroundColor Green + return $id +} + +# ───────────────────────────────────────────────────────────────────────── +# Step 5: Edit the surfing video — shift to sunset tones +# ───────────────────────────────────────────────────────────────────────── +$editPath = Join-Path $OutputDir "05_DotNetBot_surfing_sunset.mp4" +$editId = Skip-OrRun "editId" "Step 5: Edit surfing video — warm sunset palette" $editPath { + $out = Invoke-Tool "Step 5: Edit surfing video — warm sunset palette" @( + "edit", + "Shift the entire color palette to warm sunset tones - deep oranges, soft pinks, and golden highlights. The water turns a deep amber and the sky glows.", + "--video", $surfId, + "--model", $Model, + "--output", $editPath + ) + $id = Extract-Id $out "OPERATION_ID" + Write-Host " >> Edit video ID: $id" -ForegroundColor Green + return $id +} + +# ───────────────────────────────────────────────────────────────────────── +# Step 6: Extend the grocery video — DotNetBot juggles fruit +# ───────────────────────────────────────────────────────────────────────── +$extendPath = Join-Path $OutputDir "06_DotNetBot_groceries_extended.mp4" +$extendId = Skip-OrRun "extendId" "Step 6: Extend grocery video — fruit juggling exit" $extendPath { + $out = Invoke-Tool "Step 6: Extend grocery video — fruit juggling exit" @( + "extend", + "Continue the scene as DotNetBot leaves the market stall juggling three oranges, rolling away happily while vendors cheer in the background.", + "--video", $groceryId, + "--model", $Model, + "--duration", "8", + "--output", $extendPath + ) + $id = Extract-Id $out "OPERATION_ID" + Write-Host " >> Extended video ID: $id" -ForegroundColor Green + return $id +} + +# ───────────────────────────────────────────────────────────────────────── +# Summary +# ───────────────────────────────────────────────────────────────────────── +Write-Host "" +Write-Host ("=" * 70) -ForegroundColor Cyan +Write-Host " All done! Output files:" -ForegroundColor Cyan +Write-Host ("=" * 70) -ForegroundColor Cyan +Get-ChildItem $OutputDir -Filter "*.mp4" | ForEach-Object { + Write-Host " $_" -ForegroundColor Green +} +Write-Host "" +Write-Host " State: $stateFile" -ForegroundColor DarkGray +Write-Host " (pass -Reset to start fresh)" -ForegroundColor DarkGray diff --git a/samples/VideoGenerationPOC/my-dotnet-bot-mod.png b/samples/VideoGenerationPOC/my-dotnet-bot-mod.png new file mode 100644 index 0000000000000000000000000000000000000000..6b47b49d6e695bc89b2331aa6291c14f69560dca GIT binary patch literal 124092 zcmce-hg(x!&^=01DK8yFQ3OH+elNDsIu?o8u*3GSw&Zch^RV=>fDa-Q3?;O$KFIlG@XS1#J%t` z2k?bh7vo1hk8~f%+QZ#M>>S`v97Par9^lhNMDj`q4?BBTM<0$Sj?OOb3Vb`wC_WAs z2L(Q}ySie!9%_y-7p*`q$A^J>#`b}(_A(B9N{ZB!@(5Y5f}5j{9S6egnY*_vLILt` zVOj8+@MlqoqP&-bldKU`<9|N^ep7(Je0)4)MMeGn{YCsGMBrY|qT(_#GNNL4MDN@Y z2A>f24siFeLkPQjbN%lEP)BcjFBcCV7q~kIVL`hma9Wy4tIs0 z*hx4Di;Fwh3EMeHhzmPNNH|E{6_>u_B>n{QznAxMar%F*@9zD-W&j2eCA=dlE+R(Q zGU2tXzL$$5*ahK|isJJB-v9snO_~nf{M;9o>OR{ekUn zr@f&fBD!GEhN>7NtT%B1Y22eB>?ff+2}sMQJ0I5FZzqIYQHkVeH;E=U$Ws6Q+Ch8h zr978K@8YYZ-u|C2^Atk+zc_7HCn+deuZ-Uc$nCY?7*A2x9x-UmGCGsXqLTty1|o6$o2b&Umtg_@X5i?x39iUPftJXOdtCyRTRdrWc;J5zf3R8YS^{L&{^o^GHYwubOWzEjwYH; zbzF@e{6iz`Ns@tSc=JXUxbkBBFQosDoSHcpA9sDH%3U2TS-smth1n1?`oY7dPNn+d zafPqd`%mv`_sF7YD_A&MnThMjgjqO-TL#{s2XvvI-}x|U3Gk%7P8JM*eYXV45wpjs zPA9MxU~sFLkL!{^eoUGUBr<#@p?^iVWDlA@63#d1#wMU2a4YS3yfN95%JWw&>P9~e z_@|R<$>$4JT%dHSR0V3}N?)>)sA!|k@+B%5AvaA zfuR>x7aJS2C{d-0}#*?38(hQsoWTIgAR9OcK zg+@!i_1_qkGHun{5F9t_g+yGB#x)I;x!7bU)0wHQnFx0k4g_krGQ%RSD_2k=dC1{c zl}QCZ5_`?X%d$%8sAE+1k%a480l$!I$=>}PJ4cyE# zk%Lt(0$+*`9+$wPnmw0r@4CUG zLLD7qb4?87y85e>P=5JFp70j~(xe5};T_CtrxtV#Wpu5Tf$Mis%ds7)(`A+Q%Xz`B zTi^47!sFC{{aJ6EbLc^#K3&uJXqu6Z7rJ3Zj0di7C^)tmkd-eAVTM{ero@?B@Pl%xd)xWF#ZxX>hYueky_&wvPj{rwSAI3b^gQK4xczTE*cHKXSCr8(xn18nFx*~ z%u5u9M80dEPHItL3B)>a$cvcxRFG`b>*NgSOcCi%3Ha1fYV~hxLKz|PlF=w$`Q03H ztc*Zc(`D;d<~djECYspE;F?t}xV^Qc**`VSz#r(viM60mNn&>dRLZIA#;6PD3g^L= z`G%Y?Q%;&{o0;0|)tIL}`?Y`V8ljrWMyy5~{3$=ET6C3Bg=rU=2Y&;IcW%?=($vYa zx6f`K1+7?P+bl*lISB{e^gm1UZii}&{_UCUoyeI9{!Kf>s6MZ2RDP^F7g}*pt!38} zwHM%M37M$oAEa-gBVp4o6fO?762Pf*wwiG)l^dzg8n;c5INi&|V|J;V<-m@0?}bKy zoF{8wK85>_BwNXhYNNr-=d9kf%wKKwIF6jJ?xh~3u=A!h6w1X1HB-ml$rnXL1Y4jsEh7Xwx+#pAF$5|(9Gw__(Eqdq#ZeOcm;x7Fdg!V!k2+uQ zxAC5F2fhlulJsvsZ0Ln$(QE~g$!QZM%xt-d4@=s~g;sQZ&;WJ&V!B@(vQVpYKXBj? zi&3Z{#bI&u@yo=n`dTIy8^lTGi|_$IDT4b`|MQv;FyS3sPkPMA$VkGSjw|^^JCRq} zU#j|cs7YSwUz|0B6tWpoV(x{nLZa~|oVx)9t))ITdG2Sp3N!Y$_x8uY;DxP=EG%Hf z&gmqZMw1weofy;ED=&VMdz^Iv#*X^X9Tp5NhoRjyq0m@m0bzN(Fm>>)T!fBV@Zt4^LgMHlgcTcwzfzGac5i$#Eq;cI8937q1j^{=MU(-Ee)Ec zCB>elQ!4RuW^6eTGL%3``VMNDBKXoEURWPtb21XEAKL4#0pj!b8zOE<;rINyhJB$G z_@A7cjb;kSwX0A3|6zmz--bDyld5&+TibaOG;Ey{03rdQxoCRngF++QkF9E z^UfsF`B6i1LM_mOpE7OjZ^dGC*_x@rT}VG6L?umEj!me&di}1IswVrW;XyB_;~%gS zN#V@4SS7zs&)dU4ONCnp))u8~WRh~9FnFZ6l)c8B+oWSc6);OI0W$D9akhf%P{Qz9Xcb>~_VjZkj5Xw7!g1=0w0*2ZsVIDYA8z%fQ?fwP_~*WB9!tlW z)Q77f%Z@Wl1^c<#A%!B8GrM~VD>#^yaJ^h||_j%0v!z;B83 zDS|;MWOA6?;ZSp{LaakHT^!t9pz}lqAm3M1F zCuW&LIkTJo zVpM&)FNw<-Pg@ZrMPhv@j0TQ{2Y{th)NL1)y z=nA)OjBppR0heqbvRcRNHtnRnESnhRS0g9Wqat&^M&`3a-QI66YQp6snilb|YSm(F z!cN_u9dR|sCtlDe25yNtdUs;RQYoX?B;7&s#H6jVI_~GCmF?impRNB;vrz9C2 zKLn(~@8k9&pZl#~#a6+MR=PaI+LoH!uOmdw^PM*AHPag^SLH^x=e^-pJ>6>Z(6>Fw zMsD+0!t&L(!a^>%_}J^q0W&yyx{_B6&iw;{>3nHgL9yE%qlN{~IbrkTAa-besEyuU zOf0!BCtus&7I3}aMrMlZHp^2*rx02TyXZq}m>DB8j1$EhvfJ;JwP%(WJU7uPmzV=6 z2PGjU$+*jLfw&p{I5F&w?+}XBt9^VU4oMR(dMR}I$dzt$ervVDlqD~CsfCS~HO$DG z-ss|C75ZEWb=a0Zy7iO*0AsjP0SF`$q4d7)_}8NIOo{4Y2AVc3a^;At(9^5sac9!B z&l7$2M_d)V@LYwcP8sqyFm~uF2j(Koy5fjdFAUS=kH`f!oJeR5&IRE}rSm?I#N$QU z-p;u1U%z9C{-L4MlATe=`Hg{fV%Yc2;5gW(rPOG9Z>3FLr`*v_ZWv>Z*0^2acL>ys ze5W2-bGqN(20xW#jDSLG8JB*hfV7os5lt61N@FzszSqWeW_NW%X;e-LpEfcNSDnW_ zxS&Tj7zKvy-9TUbr{>d8Tj|Pt(J*0tww#mHb`;l)IzMXO;sr5b(g1SKDtQC$l|(t+ zd_Q>t>T-qt7&z%g*!$(h#d_70rFcpX^c z-M@P*6!-77ud*&u+g(Yg#7AC{aRs55AVEA=3U6lh{<_T~i25|r!R0IhZcCsLGCz8e z$`^$?tf0+1PqexZj*XCo-~Carkq`PjyM?RcH^wLMiNnDny7B&1eTz80t6La@w9BDq z!~2Mu;kt6u?xs% z)wcHekT4(mz`u321O@| zqXUDArPFQBMZ!{YU?Uh!xHOFo4Ib@V!Lp!)3Z}8L#$_Z53g>BF9>Q%HQROZsA>WOBccPy4e|o7#Et?jzttU8mX6V257%G;m zl3HtKwf$_R#&r*_ zaN1kxM#wvoIh#t|;D^(8==qP&>u%@1CD^dX%I;x%LLgx2lXI_JADSC}N_R(rY*Obe z?k{4KE#xQCj6#r^bN!I+Fld0je^&Y_JtlJHRO#$oQ%}Kt6dcjPI6BV>Ae^*6{)a@} zYFCFo$IFZB#gFQMWsw@Xs{@@6;aw0rO^6;|p9Q6bdspV?5%hm1`@aAGuPhb-W` zM_)B>p)V{Je$AQ+p0ms6I}zY5A=9khwdMuQ7d%8gAvo*b8ax?@9?r(a>iS!W*w(Nm zmylR1z;z{`4(`uMWhLk4h3S@=pGv`oXWQY|NNYmGHTmDsAD)p<3iIc3c}Ny&agkp{ z)Hhx1m+myij+;UX1@2(_CBW`x?s~DP7X) z2S*BFy}K4b?&QSzxqD?PdghA|{$`v~=?_Y?NYPRu#t7|90|6f?sr&#f42*wRV;aBT zA9m6dM#XvgoB(xGyZ8C%(ZtOg-tbnX^|~;9@r1B{-JJ6HuEb?JEaGPI zgOT!)8I|@=my+>HY2|FUAH5=gHB?BKsi||;F6gv}@Ii@~FVMrI^?$pQLNNO$U3_oI zxFMMaF-k??24s~^g(iZQKaAq9Fvo<^W|m(7vfzPGAwtP8@{N3klXi1w$(=esdxW#p zy?JA@%zVq4?dqmEMcG}fMHUhpQj>D@DtQlY{vikJZG%mIHc7C`h2I_vu!`SWpQcTt zx!Oe#bUkQG7I>wHsyc@g8^2FTSdCa?2e-5J7{bk6%R*@d z|0_s~Tt8e$%NSgGFecKqa-Q`fHI<)tZ_};$sqGCR>jm<3W5t$*4=6V?lD+J3uF{f| z6pV9Qp0+ZmTkibIvj?DEBF7$r6OC!bum|R9iBc4s>`C3I5~HWzLN~V+I5oS%&xN4ts4pA9WxP%kcv)ZE_d98K zsh~d$G%$=V8;wbXlzqaU``5jFOg^jc?6iLXkwJ+4RJWs(?^bNvs+ZaAIS)EDpR-Uup-|)bBTXB8*=o5> z)KDx+9uy5n)0^MR{SSy`*AEA#AF(&c!XK^0sJ8^N4eoPBnWx-*$r$U%YN({>AYE0d zN7UoL>?c!g)+YPuZdEmViGmKPtV>4a9An9)VCL_w+{q@56Mx026J~m3nfk9to0Hg7 z9KG&Fxaay_?8mVot>9+{?q=kA(C1-Q`0fIcFTN^mM9;hW$4*Z#g%3~)~OADm}iOUOr*~27gp?Q zWb*d?anzitcPnESCfiej;|BV6=?tg3{P%0TNzdy5w5ywg8^*#K_y79SGR)4Q!)@b&$&T#LiM^>gtC-Yflb{JU|AGB3u!b}7n^bHgG;ddWTTyOA_ z<_eFA}eZrR^AOFS$(R2tAxupLEvJT(G!wRMNv3yD4@<&_DAVC*lR zkH@Ob@krRS0Fd3NUeHzkHS_xB;k6xQvC!1`+{)zB4ApL)jbf~x zBv$kXV5jPriCJItg0QY6Au8inUHQRN=244}>a3D_xsUJGZaF7KH5xuRkzouUpFR>I z+Q)iXT~wLaXwC7IsNNc|T#_mi!H?z;Js1pQC2wYn&hax>;kpVqt?~tWMac`dL%>v@ zPc#?B+yGdXxsqTuaWi~x&L^|7ma`^#@kF)-%ZnkP+7Y8iKYEB$G-naz*lbSnOUsR2 z-V+S=_MIXB#_>r_`_?b5maT#9Aorpt-G678@5EfnT|6y7?{bqI42#a3F4Zv;yqxAL zp(Z9J2)$n|_UiD(CI_0EvJ-FpZ}*QE=m34n@c4u;9?ARCbk(v6x&8WVfkTyCa+zg7 z{)-84R1MEXtuLN|btz!akW!eM5DG z|0zz7R#}C+7y7P|6n=u65finTQ~yq1+-<}jiC^bY=zq)o0)My0W#YPUwV;1X>8YeL zCpu#)g_qSso;I<|&Mxd`(6;*8O?*A8>$6x9P>6aJbl){wA|tmbn0=wE;IUa+-E9`48drwOz+6~u+B&W% zh`q7Jp%+k7TQShI`0LSY?;Ltn5IdPXL(_K2Ful!awVZf%<~AtRzLiTdprik=;(q(q zir7GnJ$Gr>&S1AJH1@~t6r<|XT~ukXu)K4b@{@AYUsN;*)R%Iy{M4xE{emrS+cq~| z;(eJd6{PN6_3V^SY3J+@VpV^-PI@h}kJ*9Z=30~@4;?+5wtwiw<`p^( zJj1mU-FUy%-=?E9CQoDO?_sw^0mP4DvfYO)Wp;`>H<(LN9aB`YZ8Jm%iE-8kRKkp*Huzs`NqHfq z1pA{GGc14-FNZq7z8BlXvDVJyEmrgI55*)Dpvn}yT^Q$SO#cITqfkNr#R6s}9c$g} zINB2(J`1z;JCt63abHf>*o2(khNu`~c#l8YA;PNqLxYK4M*0=jAr#%83y{N5=5rN&Em3lthBcS}xbc5*Q9pth-}vZytv zX1F_bWwg94)6sf*buNoViV$zc13CruZO=!#sgT(17tut@1F$DfJi27=VK9%SdDLX< zn$!1t3St8HJg(*7B&1F0e>9qsO-#o9or+?vd%KCTedu-9R`w=@QpscGxaIUA0f01K zgqtdGXoKT%liIL0zfiWku#Gzt1)t051QSX{-4-6gEu#fPhyj-u5qv;H zW!+4|Y?8MxsYg~>SY0cBvR*fNbC4BPZrW0B8ZGfS#e;}z-VBz(hHcIXR=1t$yz5z= z(`c34B{+%|Cq^LT^;h|^HiVwmKl+dpk3vGBnm<9oe`@pIdS!O^#@DZ9K@6jPBc5B& zNZaV7fozxl``#DquuUY86<*U}vLwp-f2%!9{q_Q@aBxqG{xE*U%;JW%2BjMK>9 z$=#fyHV?C@%8&g|MlM*-c7nMf+I=O*G{fIf+_ro~33A@epRoNOcIwTM*w&U@8tXZH z`U<=JVbUfbya~_ucka-m4ZV8pS9S()2~`xJ69(9RVHBhoDkcG7z-bbiPpJlQbJc$$ zd<65<_oU82#iRf(KF{}Y>j#<5$h#Idv&W1~lu_*JhPjnBsewxka;IixHxa&bq0Lr0 zg@eS5*at(~vwI7@?klvB$Wsx9>n}S{R_;|W1!ipk&r1qzHVNpq zB}#5f!gfHMlJb<@+=&PGiR4&be`a(jdE{;|cw2<^y_8v&&%@porU$)$mP{YZwV5LV3?H?JNwMN8A3;7%%lQDiXV#o9~Q-I7yD)^Eib{ke+(yv+>X9^=rmZFjk?d& zzb~~qFmvAL{%~sB-y9hzPj)gOfYVTTI(7PgsOrT6r7}eJ^LFUwl>hrseE*~;u&HN$ zx~jiDbp_SRfU6s|B|SM2p{CEo+|VUg^02O2DTpF@tnMTpzmD5jpD~V&&>v^G_&9^z%w=H*OL9 zL|d_M60yV_+N1I1(SY@#GkhgxK@9T!Gj)BK`*7UXy&rAG2E%=N46OdRk*mgI`ZoqR z(K;wZo&Ta$lHzSu0%{%6nh`OeUorW};u%xJKnbcn=uPGh^25vsZ`#DgkG0(o?U&C3 zNe7QPIc|OzSCh$jU8dDC-svE;QEMeZR;2&_8s2%-F@}5g(NnYA4wIp+iEq9Yy*r{Ixaw_U*548+hHiL3)nB}16itWW*zF@*8U{A;+@@1j`BWWi8 zCT4t1Rh{1UBc)FTMxt-e%>2;#>L0&}3opafRNnbb=14!U%4<3nuTuj@G)%1$-);?d zmbK6A z2PPk_bgR-tz7|!&qc_}R=$5P3z)|b_VO+wIu+MAvad@eJepNAO^2~)nHa+PW9K`=Szm28XtAA{EmYW3SPMQzyLR59J|p>y8Q>3khIe)Ckm zTc;@E6$0u-)5Y*V7$%>zy?nVg-rFFw;yLiSA(kZOzUx|C97DSPp}Q4V{+WUnXx*z? zh~6T&<>-z&J_kT0B=f4ddJOkt^FdJPPbSdPZXd_W%3u4=*#VSNhkQ)Svo&i^a?Ku* ztQN~9oxhEfB_EHzJLNu-Tr{2Z3Hwtx5|vtDUWBTP8;RLpt^4wMMGz(_R`WT2;(Vdc z)Oav))%sg!cpOqAI;<7Bs%6kViz&_}xIgrdst_~Z$D5~h(kkx?u)FhBcJ)~mUN>I^nLhI3h1DfA{Okt(Nus1OPsIE<_XuTR$wC z!KZ##0*r`~a;5&hmrdo($5#hgKg=eXT{>zS4DY+s_&f$8Oy#Gox?ogH1Q_a&P z*MvFNQqD%Ot9$d)>`4cEK|TgBphxo@DCoNfyZMPjDiu8TS;Zdd|P0` zVryFOBI71Kx~uUl?*8a;)yEJ`ov#x9p!9u3C+VEBKg4W?qiFXWdusRA){^P_2j%|B z!_#eJCZ5>I5AsSGeRkYymH`SD0;gtUwJBkbt8bbaB12t;l6F#(84hFAPY%zuOe*d+ z-PjxFl@drqkwgRz#~LKj)ADT>WA5*$X|WB&TsXj^)ickWz}c*GprIm$g3y z^JM3E{_)eG9@u4$E^=qFtEXaqu4W_6&C`l-%x4+m2xd<%luoywNDDBTo94#bR-Q{d zIrA;6G^NZD%0NGE&d$*^B#-FnfMotGPrjw(x2axy_NG)&{$jQ2l%o)awb#8Z6SfbzqvW?Hr!yfl_v4at4|(FWPKUHG6V&z>m|<=|2il4Az#(jqDS{ZHWOpZ=(9ECfXN$XsgWQ1*6Y{<`_BK$yBe zmE4%=;uC>zwi3Dy??z1T;~!g}W+0J?*?(I?ziZ@@{k*}%2z&giul((GNo7sJ#(0k_ zhN$|Ya&oT6JQ)@LJxGaArCbSY-4l67?25X7=GwccXy?Dea&hfBxb2@A1JhIAo~V)! zty~3#QHM9_c1j{v$+j1O1pH>0TO5*imO5!wgRv^?{{Ib!htbIXOK)=as}Lr()XI-m z$bTXKdQ5kml5V9^u-?tBFWtXH!YXJqAgBPhtrnSG{9urcW&D)nkthWu5H~mG-fP5F zGs^a!?>+81RxrtKn9(Kh6U(%AVW&DyhxynMwqY4=4D@Vv%{U=lsN~4|&iMxC-faZ%}U(GIghij~g`&4oCcHYPp}OD)jO4gLKui>N{dG{x(din%YK& zz1m8|;&aKf*_3Ci^^-AUWdSPGzwLjPtklk~ zK|!<4mel5HHMXNbE06_nmAao4^VOVG=@ERvR8+b1EeAg(am&r4-K^=Ww>Es=kaNpA zsOpdv8ia%bQ&=n|EF(_R8cUIWZ?s@yd?E^)kpRzl^bTxfWjhPg?j0 zE3QHq)>`AuybUPp)x*Kc!^{4c|zA>V%^VOA*v*YF z!}qIs6P^&(L?X;ZexlM{iKOzSZGBt3DHE7K$U0H2vNr?Wf|O7ql(p#P3U1qizvs{A zDuvu-<;>@$uk*{(8vPg)Y%Fl0E-}vj?4-jPL32Jw5z1fgOZ3mP*po3?M< zbN6Zwtb;+9r=B!YpNSr=IJRVC2`LmWscYEsu2K}`nsNeHI(l8)mFL#CNLts^7NT{c zfr@*TwoW2tmc zXv<0KA}Rk2N6-@h`Pc)$5;Y6UuSWc$A$>qXqb;xlEf2uv1_k{+w2$lfoy7fJoIL7|i^#&Dm4A3~XaCqE;Jm3}R?)gqTZU~?$r}NZR3n~eLybPrq>lA+h)f^Je zO+v|`UZ&IIu(!$MP%`z=XC%DDaWpZ2@v!w4d|rZDye50F^F#LsbNbfvqJo0D=agsU zL2L(dY|+y+fVAz1T_;CMf4wJ_^mfVQ*TIAVOJ5pPnbB@eL+!8KIsN+-L7PaRP8Azb*=FR z>cZE}-Bg%JRbksFauhoH8al{GOu{}^=c%{1AM%0f3_O;w4~;YJJWw*V?%!w8JG?h) z9d&Rrqaj(`M0av!|7U{>)F#}@I!X0BJ>mZ9Cg#38;Fa?Mn2m&!A1I!sef&~ZR6ky7 zOpNV$u>Js1ad_IT=!~iCBu9j~K$#en>8PM1G+0rExjUANw%LZ_E%yx^r`fTamGl+1 z-J9?AR70_+u~m3$InefztFK3;L8114EBis^TK?&H2Ox^c+1urtp)KOBv6|cUO~U8w z71ZFfSD;$g`G|8?dKoNF?>T8p~b{ODMt0U8Za4XbG@i)BIQ9e93yuPcho}J`c zy5P;d&acXJluGroY_E|iwfMt9FrtpNxW4A0fX_%3$Fuq^G2-y6{cRgXqo{pGRWUUk zXszaz-IYwxb(}gKHw!Zn!Yu-ZF{P(Ni=XVETo1C8skvcQvCA?lx*%+Q{mtqCIp>0! ztx&4t(s20uCkRm?(kuMjxcKYso3MF))iDUfL zUB4&BJ%p1ZM!EQIA?c&;is+g3`JQ1j_N%c$+RmW6DwZ2|DGuqkIx-tjAmYSYrBzb- zc?0=vYsKj*raJHs4f`9>1KAUs`VmlWvvG6Ijjku>D$Ql77|oZ7#{PKq;IoIA>m9MM zG&9q?KxFK2_g8+Aoy^d&KR0{>BhUWXG&YU&=rBCGRYW#JbbP*Ag8c+>ye?mv<(kiX zY2sDK3s+%zpx;cFwLrXP`AZbwe)9*Nd{SGWS26MS1AZE`3JfDpPc7b_bFt1uCLE=rxy4+R zZSZ41WKC^oQcUbxj(S!(#(v)PpZw> zH6tg8)4Kt<$`)qqEw%hPhPu4rZ0zN~dG-#;6EMX-J=^oR1Ee!6q?th1}=e%70GpXih0r z%0H}|x@U-p4h_hvzWF%#a4}i(k8Cr;Cz{UN+GCSb9y5f_w5%J9S!DywCvrUbVD2qT zlwamyJ-miR^#!BfOVF841OscC2y}XCIeiO3bemd^ZjV3M#1vqNvk!XgZIESJF}U0G zWrZ*#lq2OoYp*=+-7l@_6Fg?eTYdxZy8NSg51WY`_u|iauypt0=Rq!;nGNVfI(l>; zVLF_#|F~mPp`zDbh7nEnhH>$5CCu^HQ7kgv_!?xh77ecl4cY<2+EVX%6SNK(I2nMu zb+2&!*K6B7OJ9$s@+^>$CO(^Ra-QU#&(#Wk*re z1^ZI9J+u~HYl;Mto7@({(YBOyc&C>_rG(P#m`%%d^{>GM|0dU_572SCS&mnaHBu~# zr_n@;_+`iFA2^9|O0M9;^Cq?#8}p6% zwTvG+w&6tal~DJWrdxh2Vzg1HpHv^)>Y3DTwr*yw+H{n#j4-NMg2oV)Oh#H8Q&OXuuTBJ+=P7hfP@A1R1$lPNwm zDr8e1+f0()%`h+zJDX%d1+pk!>_)u>Jwu&;=dtxX!?$R6&()c@6*h|gOP5Ov3bzpU zLE5=f#ja$Q=L6=1)+XFFRY>bOH}U!2U|hA#!IAT5-(pH;OSwJZufzk0R<6UXD#hT3 zy_Gi4-8rT z%T7ejX^@qp77`&dXD2|^AFUu4%r7F;O^_8$7|Wz08{26>xz|S!c^HM)kFU1yFT{3; z^U2eO7R6x~7@W-XZJivM0cAW%&LvC#FBR(tLqG&Je;ilIHu%{Deu6w18 z4Rn5U=B0QbnXm$bpO}_i>BLu*#XCj!q+;KuFTSygGdsDIu)Lb$WA7!sQG61JdVxt6 zMHLVSSN3u>A(3U7dxiNG7KEX%Ei|$%2I!vuQ3RB-6?Jk&5dlI1s7ddyAD%-_E1V&C z{$R-3Pu5Jn`L zKD27|3rN_|OC+lN0YtG3bo9D1Ku&>u7QhTvU4bHQMUJ0jhe`;2*FmX0z`kwDx-pO$ zi{CaTo6Cb&*l&cJ&AU*`AD7UOv4HXkeNR%d9d85jKVe`R4gJ&Z>-X~L6&;X9B{R&# zngdA=8&9jXu_YbuSL~V{V_mf1<>Yux*J+xhq4NG?kF{>P<2=O^wQeBiedgy(Grn>j z_5rk(=?Gyr9z7>Rq^XQmdlaiVRGKa zBrl#h16gt+Lv;5C>g+GeWY$SSA54_e>*!E1_vG1)HS|O-iQ-8ut@saZu*Jdv5*_zv zd!m=<(YF$Iq~suloKRMIfrDJoQB)4GqmUSXifVV4^aB;BWg}Sy1a!-^Nl#D{Stl_g zmu1g_i_?wT>+`l3%9Cp8V!EsKty3D75_V-j0+FRhtN849w-byY5j%q}cwyXfcL3Fd z04KeqGeL&}44CXUZ&$RXt-esqQTlxYP88Lq6!n~cv+((F!hMAJ=FU&i(UGPO7Q8KE zzvB3J(~I%#puX5%OA&f{8EEeMdn^fll@T}|!L(>q*Sqx8ybA1xr|Q2dZ)t|u(GTW* zd?DuW9ThrTSr~Hglxd;L-LgYz+Pw1xdOLSxMOs2G7TsG%Wt9Y9lLve_nJT%<4VQm!U#g*SqpPkB9h(*hI#k zu+4E$5Y?X$E1J8`S)-ma4yygGHm{xi1A_}#2Fi70RULTlZqdofq%_=solj^6!KC~K z2+u_2?)@#tu0Ql|K{RhpfHtsX%N`#BQb^&o!QO`b%y*dJmrX?A(bQx$HOXF|`>U@W zo_&|W3np@wka7}}sm?9hxXxvU1`~x=m7`sRqC==FJ{1+`ZOM&loNSUdTX=$*fIr%l zyB#&OR$(mBbFoCgdFD9VfyKC)GNK+Av*))pM1!1B)-U=Xcu#cG58W30>N`q2pU9`K zr#>^p5+T$TQ^PqTs=4YbX)2tTa~ODLOh)+p9ziw@>eU|yOGfk6Z;lx* z4Q~*NieEnjBYXZ@4%sJ%x>DuT{S4~AjcyC{_LZFNl)n_bD!?A8xZ=Ud$f_0q~eSL0<8nXE2S)}jT49>}Zt;t^*1Y*0-?TolZ@kDEyHVii3c zLnX_JE`=ZG9?kF1g+d@CM8Y$l)dt)lWGB=vFl7RKJE$Yo5~TQA+IJgST=Z-T=2epgF5JQF z3Ed?p=b;i34?ROsBo*k%322rXHs@N=10s_4Y0B5?}k@cC_HQVqq$*9J0Ce$3i|j0rSdz^K6$M_|8q=! zOHdh9YUisDpp08z^iiQz#;sMI`@DO&U+}*CdvgBV3}Hlce$8OgX#N9AfIhlC?_~ON z3ujoiIcY|p@9wgz+p~(;5{KDOwY0VFdmd{AL^pO0PF=v0GddLWN{4;>NvIV@c09s? zFhtyk;gX=&t!-As4N~iTTOl6WYN{q!ZC{J$f+)Yf?z+ut)Vm+Ox60?&Uqtou7eW%e z;eMXW=~n}%Ics9n)+@9uPa=q1cc}7{`l<*5F-PQ_M85)J>v-&+?Y`!Xff1W2jXz3P z^6h(jU(pewrKX&3;L#)bJKvQmm}SjGBu=GDW~61L3;a)2X>G#J?^uV1de5eY5g9a> zdHfFBTzD5Yrn=7~LdOZzcGYhzTi1XJL-Q|_&YTP!y76=|%PHzbr-1=i@l@`MXn9$9 zD?q6>A9_1~h*xbbm5CWiqH4m9d~@6is!=heE3d*+W4bg+rw+-M}TXsutAeU(_>=TW{2X;e>NPpFaJ6(bKMC4x?3RJGhn@ag?e9Cu+ znq!X)V#BqUJgrK*IvnmU+_rN`Sqb^Gf_LCOsrg?iNTCbe?c2B^3EJDZc!D&Xv#1FL zMM8bpayp7H9Od3in?OgNDN**jPxV`o=bNgE*{21~)lRW4IyiLyu*ZT|_TTMVNcP19 zYg10A?)i}^k2P+e`oK&hHTYP~)|2|pB-yf5_~~!y@lmamkUHxN&hm3RAylMiLFE)( z8KuQnEs`w_FISSRn)Zp79&E6syVT-e2pF}dgWa@fTgf{99rm-56YR@7^c`sWbJr{E zVe?+eVn_YX^VtrW!^9J2mU#m$vivUTRtMQ&cH3(dWh=;M1HHDCX-?TR;E(6hQSwn9 zF30Tt(TR*f-dXL{KX=_ujj|6Io05OtmnA2XjRWQBb8h$AFMh6X9Bc)zkKYWm%^SEf zvK;Y~s=eF3y~Lh%!+SSg6|{)CxeLb|Z)EB#a$pHf$gQdHq7_kI+AgixH91)XHQc6u^Z-Mxv+kI6^yu;{Tunb&KKCa@s%!<~YzuS);q9nV(JqI#W2vB8Y}B&HDYC=Q-)pQ5I;-}6Z(X|IUI!=P>+dPYMaIY4 z@!=>ZHSSdjR-2{&DvzIx-+$pSo@e!idQwD>{L4oB_+tCpRMr*KaJ?uB6^QK3JLI8f z<5#M1cK;+@iLPEr`aepJtH+vDaF%oB;>jM2a1R??=@0xVvUF0Ie6w)fojSmwt35J=en+0sr<&vvuQ z^vtfz2Ar;4Cjx&-p6^LqPd=T+)UK@yYKED zkD4zgrPg-FcQyGYNbp6BR+04G8eJVq=m8USzg&M^#QkBHy%HL&fR|B}=p+`)wsvVGUdK0!MYW8<2QTO_(#3Y2yI?6Uc~ zr<$6;TK!UBeaI)?dy=8y``5mO6LG<7u0d=-^QjS}sTLOV*+3WfK0C91xsyJ1_!N2> z%{GPu^vum?%Hhjuu>!NSON^7H`14DY&pkb@WrHN|HJV$%BA-j<)(ZK@j3j^Y%E?_` zKWsdDSp&;%Zn<6VYZB2BH`Z6?IaK|2q?=xrJ=D4Gfx-fQ_1W}0)q)--CW`FT!zWzD zQLkVoogb7swd*f5z_UC2l-)cq8(EQekFdF32LHX7C?stilpgbUegWi;g6NW5TdLH${zn-8|U(@ zezoH!GF-=+lwjxo*iG$y&K&49a z&JSI8BI@lf?Uif1+lxfgc!iu@zp(G5>7VXpmGvub?KahE@6Yj|K{r zA!iP|ZBsXWR!}{1j{VGP(C#^}y28t-Ni6SHi1ZDlmCw$#vfpfx+Vj%)8uEL3NKzIB zks^%g3CMExr*0>LNRRmj90C^0yp-%qA6Arp5PLTB_(U;XE7llN{QE%dfrMmF5-K-hcfPT-}~*trXTtaQ%} zl7p!_9~8eiB6l9JmK_|aXzQ4go@7BO3gKH?9YP<~NVR`!bpcD#)3duYt4;tlP1o|%QDxzlXKL)!H5Sc?x&xXGFh$!RbA2(O3#a7HXyc1RDZM)i zzJcJ405&|>TmHpCXc%{LQJiD#R3ePmC%jCJ@ z@*)a1)N1n*(bvN@KCCc)^&rDW&FAGi)*Sk!VAdSaWVgSX-hutzg3j4L*Pb&8fBu&E zc`{~s_ru_?l4LdA;lmi|)Fy==DIdQ7_(^tuYOZ?u7IDrzNG|D+!JjY5N5`$072VY8cIsChs~=Y{o*|-tCQYEwxBY6tg2eOOqXs{PBbFT z&<%{p>`0-lOm@8ytT~1Dx@lb3Z{$9;Yug+#tQ72Ez%{+!Oc~QLL|{(M3=_mTs3PDJ zAveg32rkjPN?p&b^T%W#YHhfUUCp%1lWcyR$=LjL76j9%wh$;yl!KXUu+)Qisqz-^ ziclk605*y~6_49RF~tIC%rYZ}&+0bRo^czeN)s=3XnEa`Dg)c{7Mna*NL}~rs9B* z&(B5`B=Qo4J;fBCt*lUMY`kiVX%dvh8%n+w8)%gDOX<`QY1Uz}MVt%puJQIAxPS0n zX7u#@1xO#kPhW&Mn$Fx|dSw_?3LD? zJ}O7nTXchVPcLM$vi0vyE4w5+PBLPOPubVFgE25rEItfj#e+=;40Ax0QdBhCbUf(P zKJ%M&f`zk2rPR;A-^RA}>d7?KqO;d>evymL3cs%MbM}>xw9+e5NHV#~OX}~nsOaVs zmcSEc@xg3TE8cPLXv{H=Z^?XUuN^jDcV<(>YE)c?Hz3M)-!!9@ue-1Ja$sEc1vc-( z_%~3kHjw^W(A_CC=zlcn{8gzO7pX9@Z{&exmgs| z0#w1RKqAGHqb|v`<;_jMQ`?qch2y!nwm3;8gT|i0UXIFc2{Wm=<;(&bx*jCYV>=UN z4D^rjOQ1m(6FyPz7?8Ae7LX(x9BDx>`S!RUzF00=jt&m{x;$75_6`-P=czd|x*n7{ zS~@Bx7!lBydBR6)9G z$;%-q$X7Pfc%wryXN)BxBATOUw2F{|*Sl>6EsQxQ{t?E%cVNRa*>qcrw9qQbla%pp zuP-x!V2-FgF)la*pT~$c)DBe?B}j>G50$%n)y_-S+_Mz}nldOd;nKzX*@5AHgyDRy zL-Pa0U~}hCiX_akdL_nWa5xK{fSa?gJxG|5yCu0*T@5Hvox#o8MyJeI>n6eERevHD zU&CIC)_Ai}_>_1RC`94qS(o2msX&TQ)$HtWXZW!VPdr8w4|~I+O)8?CmXJAO8~s5$ zM&)tEMiu(knK`?0HedMYtM4DwIhekH4|t{Djpdz>^^l3<3g0$T{30=-s+4?R&9p9? z#dSx%dm&Y^DX6SNR66yOoEYu8`f%ygd{lvr;sfP}d$W=YK3~*+Y<@LlTcQ|E`t6KT zAY=exI;n|!<$8{UMr#XwD&NR)89m{@a;)(TuA|hna1#mxnPFB5Z*T}}x%xy&=Utz6 zBBV5Z9L-Tlsda#XN)%fR>5a=`uM*eX0)P?!mQC{w@j2fOy#u{Ttf}w$F_6x=`e5sX z|9S@Ld-tm!xy{d4KNA(tEbdc_8x79*9p)R&2&kBmFBctGJaref>zDfSCyKfG-GRs) zvDZAxMT0JM`-9KID^Y?Yp4qciU|+@;}1z|HdS+J8p=;5{?B zF6edPT(vbHGEQ05xPlS&+JUw0CP1;JC7!n4n=)?G8&U+CJ_(xxD;+c}UZ+0cLWQsY z#s1^^>TSXtMTB_lcGU+%Z!I31O$Eaf)gF**0yyse(Qj8Sp&Cwk7WJyEP0}fuh$N## z)G}{G9h7i}WFkWykqnXeurTK1gKD>2YQczoD8z8wTIT^Ch4PcwPou%)nU5CyRg%7} zr$x|8Dl#IY92%j&awIRNbHvj)d>j>Mdvaerop}zyBh)h(1`%K$YYR}FMuXmET(!nzxr}ts!4y&N4^V5zyX28#(X)q5q@x5_KYi7mZhW zcYEyK3iOVU-)(2}e`{JZMKtqAyL$}Sb|`r&@7T`!s7D(oc#d|D6p%V5y1}LBZ^-i8 ziCMeRsz8UmM3z`d$ZBmMU2Hc6br_45uRyevzfVQRcH|vZmUhUx+1U2d%fE^8tQIQZPPO&c#3!mp1M(Y{+8fiH=xEDN#46t$98YNF)8X=>VW2GFL&z*eNu_8 zl=xA9Sk!SskW!Icld88pgi1&?%|4_W17~>vFRj?=XjIf0>ice|jgW9gSDv>?r|%fs z0k`{FU+rOhQosyfOB01021FiQw2{3VqeZp%YsJimaM&xoCl~iaocvk8vSAgPV1CZBjVa1PrqXl)y81JZ>Y~*xyvbHNgIPf7V=Q3 zT)UwOUCp|#d3Xo6t|`x`Co|zP#d&Dqmjgv1KP|yC5MxT=*ipN_KtT@kBI3yXzC-l$ zD*J@%#L&A2j)~$pLp3WXnqtE@NkpY>9(8rxFQ_FcMr!Q5YI}W8FT%Br+GytM`##Y{ zuwa5C^`EqgjqnLTc$+xYaUbbvLL#sB^K?2%enQ2awE4?HyuKcl;4LWk z+{nD=@8G>d$GNb=7vBc-s>Q7qEMGrUy5BtelBTrjV|3=4*D5{=@Xg=X1du_) z%+wR>g@q!cDYkj*v-{gb{;=kfZB6(}e*u3^$S({Addep3u$;D8Q{vEtj3Bu4M2cX0vm>7i#O@Gj?l5w*SbMmg0 z`ggM#YJ)2IYPu;S1dg3wPanOyOqqF;3B+J*Irqr@y-sq8Gv@pIG_!}DzKfif_&xg= z+`ki5UfINw4=@#l#fS?194KY zpsE`<%^(tf)oQQev}t;DYM{HHk4AA`$F_Dy{N}R%W&YO0p<2Fc=XvQn?Eq9UMDb4j zL#cgds|>UN{;{0Vmo-SDx~qWMe?H=Y0%<*^fyRtqxan>Nd$zakSiAKkENJa8IR?L$ z{fpQ#rS&2ZqszvzIpaIa`9TV5&HwZ7eP7T&3B>&afsjP zdZopYa<)T1%`IH2y4B{PnXhHd)E@CD6*1rMq3gV!Ej+&F5#-+FpIEsdgp-T(mlps} z_AXSQkXsE=Zc(asqs9y0uEOeP>@T&Xs8E&OE;T9jtV>dUfkhz2!{X7ap%4 zMT{E1`)^V{y&Z|9i+gFCLDW<3@yv^vc1DyyI)i+uB{Uj_46c}@5h7_fXdZJK?xH%HBUAkbDP*eGJ1|leM zMTQGmqZeSG)sVGz$KHdgM|%Bi;=Qr$ zFLVf}px)=j56-%6P#zno0ThWX`jH)RhE@o zVz>)s?+nS<1gBqk5(mYg&Mz1|;V`S@cmtCnq0XnvuV4ma_pi8#*L$=P^#mgv6&>YN zT}fJ_<8U|(%s-fZZNiX#z(;B+D`mjbuh%0%#hhojPyC+aXc-q(P1eP|Usz_$_hhC| z7lRIoJQH5K#P#ZTLqW5EJ9|69E7ejrIM~mV6nxsIpt%3`ehcUTg>%?2NlgVs)aSAi zMyU|$^NeHty3I4I);0`$EHs9Ln=)N$bIeo1%#g6f^h6{DzPOP0@U)KUUi2(BniFik zmBkXes~oY5pk3E4e;|5)F^}4D*sopYqezCjILm(V|-=`X=fg;ow)(CmNon2 z6rgwF3les&>hvZDSqLv&YZCNMA)uY|{6y6jc_!hjOg<}eDWK3M`)aw}PWYj--3ZpL zzx099Dq*Kx&UA=2Y|)RcJ5=K#^0$OOTkywG9OY!G+bYO&)79>~cK4JdK>rN%g|PuH zX{ysn`n%zza0uImsv6|W#HyQzM0_>1?K1IrIpTJOuMw52^SwRT{GBV$zKVUDVjNSe z$8v?GD_8>Do+f4E?&srPvv30@4I0Rn%CiMOAE9TL`e}?(x8$lfXAOPRXf{NQ_aTN< zb7f@BF-klVQ)?==1-j>Wu4A~w4Maic?iw|f;CpS`0ve>Wry zT?~>PpbcA>d+pvUO1Ma>{BHXS-D_Ljnj#%Y+3Uw+^yzOO?fz2%HBRA~uGfXc-+7ja z3Sk6?S9Z-K==Zi+k>tB@{A*ebyZxHl?y+%88sG!bJ?p&%L&atJ8#gBo&CObeKl-uE zLdC;BHLVk|Vpr+DW0Z0Dnq6sV`LRy2c#{!xyW@i)2p(g0%z@N%#veYL^swu9Otvtj zFtwpy@D>fsIKG!lEC)p$cfgbkZ@BWwv&Zh2e3NlDfp|g+5cfK|V7~-}J)L)I@S=%| z322TebkZ2+NUo;x2+PwkS1g@bs1gXVx#_ZT@D1G~cE5XxFBosh0G~_~@e1v6HKjYDk-)4y(D*Y0@cM2~CDpvvg9i!ae7Ak#Dtm)jwRH3_!V58UGcJ0hUI-ZsJ-&l>^) z+%AI2yz$8}5e1gJd(ulGjgskF1jca?}BlvmTBW7eeLM(2>31;mm)%g>?$@7D&6ojC$vM` z^d8qY%XzqAq{xO zb51P7k1?620{;%^bhHmXzLZ@J9hm9(amR-GzQi*v#9qzj%JNdp(%yp!d%jKDFh|v& ziPH47br-Q5ZXY!EC_Rt+^*>g^Xpe?cZ4?7Iha%UGT2&ZTu31!w;beW)+h>ghKR$}f zWc&d3DmCq1sWgLgvg_d++$9)OQjwbd!fqa0a$UG47sPM5x2m&p|DZW~etfUg}j+ za@d2W6p#^BC#74O@X(Tjukz~o>uvi1YH)@98S=pHSgXNRfb`=&<`ur!N5cd^xm)t- zi}k0S5TlQ|5vlz_!m1)cYOy&#vwS9+2XD=rZSX(AaeeOI8;czN8@(u-|0yhd8M~ck z`GXpcEsS)=m*O;>eZ};!v-sXShCcaOD-#}R2_{d|#E_WTM@YxAQnaWvU7-0|O@zCVyXrk6S4ho(Fr#HsObtKQXSO;rw$^jo2-=?aKJAtN zOU`xu`Xzs~TeDR~d*H~v0pp?k8T*Z7Kp9wo+Y!uUcE~GSTGkf`=8ew`e{PH@E5^~> z@ikuyh<^d_In#1MECp#L~M+W0E};`$$UAWLFX60J*`%81);N~MG4^NQ2NjaJ`>Q)3?%vT67@h|3iE_e(1-c7IUKvFKn0G=g&v z6^)L$OUsxK$thGC{umwjoo7D8460SvR3pxy&;fbN4k$BK;ZthmzqBF|IYU8?*W1?Y z7=2V9v+o}gVIC!sz$6fosY77KzxE^3YAoYXtzm1$dG}$74%CI*-$sGv4??gLIaZGs z-F`7D7Nrm^lH{1mjELpcv$+wiXn3Vu3p4WfIZZxqTO%oE=1sdp|SukGCLFjULjj^5hG>|7=vm6TDlAbMI4TjVgM zr#R;|09yt4RKQ>km>#ZxL7MuzKkrZQ*{|xcW2~jm%}88!B|Qb4$uk0ag+G+0IeW})r6 zUzF}9p*|{b%*K(aBjWDqDu6u4r+s59b(!(p0u;kgH)HOd66}*9u z^^D4)laAtX32iUu0#8W;imr&A87Tdp=@~J^)1?~s^q5O7=fyVm%XwJ1)no!|lrF-w zgV-A6D&P`^$NR!fpM9q1MD83~XX<5bLAuwu0o}3c`b^$E!9L^#&uVsRAy1dTE{u7a zs9H};`(AhP3){4KUT;)Le(Y|T*ICWF ziSU^n&5(6!(Cx~L;`Cgp01SQAHLg9?oT8+`r8wk+X?cDURs?NVJ5Gcr zxTx3ClNc1&ttDHG!RB88e5XdSrF$|Vsr~!jb5JgXIBc+WiS5>Jjigz<%!UbrKbI&f zquiUbM_omYS)}CWMfX)xjnXebU%m;j__Js=Qf=P(l{&N+@mN>QQdQScZ}Y49LtVf1 z_6Nag*LhRs_bhNcGOL-z+SO>;(Wlv73&;Y*o6R=fV9MUHTAkmQZtv=X zR8s1AA+4Fsq0vF^n;Im@MsU9{HYS9}6IrmY;_QHZFNML+AxKJB#l#+&&S0fn0*7LU zh<4nP%$%!yymjELUi&o9mf%|&PrS_qa}BNV5h>mJ?Cv;NFrQysZ7~5MPp2Pi@!NXV zUF1A7)M=L-7eu9h_rXzE4qx1-Jx#4Baimzb z?(uY@qPY8ayO5pM;lrxaK13hxm{$<6o3h{cF}%I2Z`ZOFA>-^UCE;RsK$M9n^&y6s zVXm4bs6Pp}CZv~V@Im%tW*SX;ev`RBum8zLnZe;C49IKKjzx*R za$QEuI*pHvcr??-i`IKtme5dxEZ55yNr#?c>iv6vQ&>rhu3=V7US0;ler=xp zd-puc@epT1Ht6WgL{wW`_PfdzITasvvpVdY8Szn$xO-Z)T!-Ak@6mjd4#P{M9#)3V zavfa@r4vf4aq*foEf`@ACD_kB4w<9s6|m*!O3DZxhGEDk+N)4A!)4X8#5e}T5{x(Zk)qTGf@3j4lV5s#j8Bo5ng>V_i5zG}R2G~~j#xr0 zGQyAPIiDRbQ<1Ok*WxNGS=KaOI9Q-sCxqE-?39>USNF#*=@c#B`%2?>Xl%1h< zlnBs09zhs=KN0?L(*3d7@%*d*NtA*aFg%%Huf4`v`*C&p&UFoc-!g>=E9>7BW*5}3uym%M;dUHez22+O4Xt-<2T%WbXk z>jW<0qw%ZdD{(Z)-eU=G7yC+x+mr!o#t{?aZJkgTD;>6P{;0UQ;%@vEy>kVbjTt|M zH5y)icVZb08XfR8IcM7VpS%0QA?oE7Gp2s-NM~|!E(vv9FRf*n^aal zp?i6##y!)Gw5YBdCCztd*i_e$ZcvTM5$xN!)@lE|r4C@k=%2wF+={!E7daftBoiQH zz3lF<_~Ba4mC$CE<^9Xfd}f#-o95@Ahbvp5_jMVk_t&2)?rn!8ue0Z)GmiPZjx3o2 z!>6AlXSL7f^kTn92gNYysZze#=2~-$lU5px1nQEmDj>ast6rRv+<}|mI*7QJw@$kbl{~|<@oci1 z!n1fSKK7%wm0OkYR7p-t`?swWKcRq9Yrq1j2DW_}@s{mh#Bt2l;l~?ysDBPlF3m(v zTD?$twRUXU(qp)qlI-5u)%dX9Blm0RNV6>LOQNFTC(AJbc@_6)advz(qt$qWH z7vIxI@Od^N0#7fS)}tQR059aW*2^QW3jJqWT=buW^$BUHlN{cYd#8@+TxENZGT;ne zX!8E<^}u+GpK+;f;^5A4cG&{~5wAjmNQ7}UDGXSOCS<44Mu^%?MgM3!fBtopjrDe* zIe)&7>fhQ+PpgFD{re{Ih^X`Ez;>UYFoQXW6JW2Mn2+X%T*9@hJ@x5l>10wK4#N}C zmhO_^z1Q98Hhza6v_GU57eR(?Cwg@ooOU^4HhJc%Y<%#pR*R1ehuU|kb!EN@zQtZv zmLR58CJq*i$atmQceVN5$`_annAr8|zWpd=t?4GOxu-HH9MmR<`hcZJ^wcYDZo+)( zzm~{(e=ITF=}9+2>i{}FaPfeL3hy(bNfnX&8h>X~9$dqBk#@X2kgBj?fm@QafOS%m zCRpJ1sLsO!jcuYW?Su4p~Gx@GPG;YtFZ`eVynpzm=FyvGr*smm}K%R(y2FFM>g_IsyKIBKqoW-sH% zbahBII{`t%G#06cm2Bbpu({ZCUGf3JFYdsv&voOyre@9C7?vIj~X_mwI? z8RsxLSIFE?z08X%$i~QbYv+=Xp{><;X+~GklVb7=Wn-U8%V?@E@%mIt=atJIU2gH8 zCw2a=+z`~Q=aguCUAOOUcldMp$7ujpuCgaciwFMq-s~?Qmwn8aQa29n{ z16XD>>CvFqGmsqvSaxB0sK5;ORE_E9y=(?MeB7U&iZBz3Y*2W#y9-aXkxBjdR;OFZ zA1gbZzaY;R10eiD{kLUB7uE<^4onn&B7wA|ntV6;?YyQF$~!jBA^Qn1|%UuDFJ zGYjZAApD9HWSyJ63Bmrv1psEnS=9K%$g@=~whBtCc>qJ@T9rI9(k;0sZXq*RbtR&o zsrg}Z3qqCA+z_~d$yJ~DFC;mAf`wj7Ua7h;`3uNOvrr-q^h3#xinG$|!GFhw_L1yS zf}gzLzrQ^+VCZMfgA;++&+tE|w##4XvyZshFPLIzB z$a-<5l@ib*j4<(_6Fw@bq4;Nb!_Q1MFSu-PLx5z~spRl4 z4l|h1-sR2NCm;KX-rM(-XTpT|KmKh}C@;ESLEn<}n|)_&>6$k5Tzz7lUFYv8IZSY2 z&i-F!5xvnpMt%0u{mm2w`5ctUbt1U!=D79{8sQo^+VplxaY16NQ2xilYkFOJEMC!y zX)V^puk^9xEo6qYbCOR`h{W zu$%?CWNGJBk@}hor0d@U!~-_uiddHTM+*K7A%cAR2S-ZH+g*0`uJxF+-5k{~Iy2xK z=IRAAEAPj8tb7GM6`fM@oJk3v3uY>!s)V&4Ar2xhgDI=dV{fUbZoWy@iVg*`%Wr|k zD6H`^)7UZcSRg9s*Xg-P)HS4caZcD#GBM!}Dd2C%khrbu_0w-USU0 z)fhnn-gT_mtwPm#V8?x=z`m;sxT6ub7OGe5mUGRY>p`I|U7Nod_I`SoPVcScs>k6R z#daUme%5ZViVFFdt=Cd(z3ROim3>74t@pg$Q_aVv$8-EFb&I0W%{xk|y3#z!Td&vj zNfM^Izi%(Z%sZDK=35&y%yG?dkM$ZZ&NW|NQu>-4fveSK`-77%RR-M?iNqfN5t7at zTY6)kwEE6uf+d#^uh0hO0j}DQlg~Xib_GZJKV}fM4PFf`ZKl&Z9T+EN8;0J$#;KFyz&I>5_{}eWB(cJK+1iYYBPU3mz z!1W+2hFiL41P3rlp1aUBgWw#iroNkhX6=%K2k|Q64-7x4vOUvN^)f5Qt|(0Dc5!5w zzpicBb1Q**RVHR1%G2wzC-_Wmz4@hwpDn+!Uvu96h_51jUtV{O@cfxbz(80qeV~I% zW6D8#Ca$&6-+Zld*(ULLV+{wkG1V> z&#*JVQbKUwWcK&^?7t|ho7RU#_wMLKudhe*XL5EowYAyv`|hyeD1?7IidG@5Z!r&M}f<)wh0qcfD;j_ch&S?G0|b^uIP_elDuRswY(xL$wy? z_K%J3A3FxD>>EBtUY2+^TpUwyDF~Z|3g`FhA^nff`k!CpfBcK;CH2J7()r)N_9v3Q z9vtL|4$ag*95bkzP3g!kf+UH;F_TeMK9(+Go|rtt{z>8^Log01M#=Qb^mMao30mEl zwNtH2tL|BgAIyh6Fa2rDk|C8yM!4KunY zS3ki~xnlGG-mp{m?;2-jY5@^bfa8(DdEv^@6}MXhLZ62rr~b~xC#N#3PW1Bj1RWjx zvR-q0{@m7tR3W0j!aeIR>>uK-GOuUhXGfF-{}@6mD`~-f)TQG2&jgi|dYKRi$vf|Q zvx~VK{+j)vvL$Kn*Hv04+H2;ZiitevZSE;qDG{|)ENt=oA}QLL4{LH&fQ**P(M6wK%2-@HE>$7nD2H61sD%IhTE zeN}0(2_}t&S=B_soTEK?VlUDh>U2ISr@k-5hMQDj95>zH5k-!>Gj){c`Kd02Z_f3^ zOMD8r?KKA`;{_9UTh(^vX4vcoQ{;N+%xMa z>{2Ql8$9jKmchniXfsn0kt@q=0TR!=Gd%LjeK#L@Vr(XkT~yghX<{2KyLy}QJo_C6 zxjR$URFRWhUKTGz9s{B0yL$NI;8Doy6v}K&RtCup^j;%vHs)<3j5z#g8e-Aua%^2p zX~E**<9T>(wM`e)P$7b;EcOhbs;t33MSWPqIQ9uJG6da@7$t%;?sb2bxp7%#2<+Wh zO2Lkv23Q9j-qX9I)6ui5Wl~C&MfH%%b|Zh&*N%YX3f;?&RR<6AK*)YL^D}!P%5Sho zHSycJQ1$*lo%S6Hw)3TTYL6tZtARt@p3}H-iWsGiV6631rQ{?rzDkeB{s{DDF?BGN z^dnNw-z_6E8zH%hywONhsNMtj|bF~?Q!jjJ!n9_w9&?RuaUTE51zt2v=0m&vA>t%pD z{4$E_5?DP6q{)Sd1lg5Lizg13^R-PX)Ck$=E}m~~U|W|=@T2~*b!okmS^cok&jt>a zhlqc;l@piO8h~9;o<5K49CvYB71l$koY#Nbmf3Mv851<5WP|~73>Wyi8?XnLc8f<0 z-?3Oo$G6W;+Fupfd-)p)e|98A^8~c)-qh4@YTB%+CVXP%;liJsRrTEoQ9{YgS(_S( zI@@#=o5Wdmu9&y;R_QrD`U%j;Mc~utJB$Tp;22gO9xmducE%XKYIVJ&RzO^wgA^{a zotI!`m>SX!QBBBRGcs$J+}AvB&2#W)y4EN}!RkJ>MlIizxT(8}v3&FSHF$WmH&9++ zx7M&GGJ!rDgE{;HjZ&{48pMt0y;E4#ue$WZnZQwb)r8Paio?Z2-uoLsSNkLMqzo12 zNK8d6@ji=Fm+aVsg@%sb=hZZn^mQEI&KoN6YCwfK(*pJt?Grp3oH0$dG~o69`@d)5 zU-7*E2qenMF+s@#WxQq-lD=;3+xxS9k6P%t9pb{fhohVAW%$!E#&;+Q9A%HH+JnPt zW!)vKY1U{@mutJhjYSo)K@EQlS@y^GkMrD(NA+{nJ&v(Fl#Glv{VT>u>a{F6EMx8F z(dDL7*lD0q!}c@z!tkIr%dIA}{Xz4)8%CB~F#}pE=W)>iUK+VsB|U!NLyyB*6u1sL zVdYNIr#Okm15{qg6WyG8K_%A*@8?$?8GOFYEMrN<^m-;~WJ zPd7DRo7`}Kk)n0X8u7?vyLAE(pcwYHZvwZMaFycMl2v1#&M#Vao0K4L++&d{!~OfZ zPg%Ysp@@4?-p`^cj-0f(?F9NyufVNW>m5FGT%ym+6k)?j4n-qYuGfJE|ATDf8IGddPP`@qx*2-@m{&}F23Yg!LDk@d0)}pzzlvnos!Zk8ApyQh)->?vGX=- zppTnZch~}m;H|JfV3X6O5khB|JMSF>PY+SQ#kTR6*2~@Z3;QS_V(N~6(4JYstAtct zJL&L?pJ$#fym{|rhG`!(^p;PMbWS6An&CL)yGzZJqP6pENY-+d#5Z-dqkwP;_!xk~ z`C7dLU?24J`R><_sgQK;`#Ekms`;nnv&I#i{g0txqSVZRfONkv+qawZJ$&2L5kAt@ z3Z@3fv#pr}mzI%nb{}o8)H`^PFHW>{a@bpoApnx8<=E`pOQ6Sx!ZB?WH|^6y$z2vOqLYV`&^5>=KMU*S|8ZVixkOs zDANauE)qqa{J4lNk!(*LRlRV&pxjt@UeJQ}sfcep2rBsGq?q7o6O!$`+gW$m1)YaF z>O0B5Mbk!nC?xlmRnaZvE&g$eVUX#FVW7l|0YRu%n*J_42bHJE@E2H4J^^_TpWa#5 zKttt-mMH&^hP+4w|X0ioJ^F%!wH#N&&*sgG{@ruRpWjtP_#Db_3ror+^fpM?A;lH)tb~liH_2gw8s#Dt@e=T+k<&QTgez&^o zd;A@q;~&?*hl5sXof!}p)8?Q@FkgRa#>~~f#y|bpD|Tb-W&Hp zj;SWuSMK3VXV4!Tm)?>aUtv#Nu=fZOlscZ>(zY_HxBSai+1Apq3Og<*pW=oCS(&!7 z0eF^zz_87cB@1)fr^q#C=elL}7Sl!dICa;YujM2+YXBHNwdD??+Xs6Ub1Z<}_ED#6 z>;ce={59e9Y%SoXAV7+#E?_t|j(i^0b}E%mp9W|pTq>LQESAra_fKNUk*SrBV z?6gK=)s~VF-tVm$AK3x#jNA$q5V5oFDQDh%92YML%vBQcr1ThUqFgmJ=LjB_kbT}~ z(7yDT-Aw|AQ>YQ#5Q&lPat0T5E5+4I*;+qOsW!sb4#-h`*kleAIdQC?T6Q4mjENhu z_3w3uHFY)*Y6K?z6>m1^a2FY8>y#;_pg}Adjhuw(iffOW+>|PoUq*1bjcwjxc4(Oh zC!W?e<C@UqH>sjL zYU^zHMwA@onRiNMx-qA->b~aN_Ba{#D4EL~(Ad+10={dkaI#EsO5#TcpsE`N4n4n| zf3x=ALp=-Vbe*$-;Xd7Asvz*i6Xcj`zWwHem*ukeySsP!EGl}rt+@FyuZc<)R-xx| z1@B)VETns71}q_VEcL@IVl^mLo(I$t(K9`>4#KHH)`Z=ka)s`twNlS|iV#vZ4C8AT zp?k*!9;?%OEAh0dOajOJf9y$8P9l(gkZ}(^rR);a$4ZsN$%_r!`#Nw+=Uw1B(L9|M zIPb!!3%2gv;qZK1Ft>H@W#^bbK*zdaN(19Pzw+Q$XGXm%i5aa~p?^li7Df`tHWbg) z|G&q-({W;>%!4bv{|X3AOUJ$m+Btu-v2rE(`DN+Rd9XLfZWMn79hWSdro#JVknJH9 zR!nWM)&r_AsmA9)(>*I3=3+&|5gogo-d^q2Zu`C7)y&Q7gu`Q~{DOc&^_Qm}^|9{F zJr(X3{8n#6Gqa#-9f7183ne_W1P}u@6B~``(F)SpU4J6cmR2$M9T4M`{++pVoiZO* z4os&GZhXsb+_~&n23-SOp`rYt$_P5#%1Hf^{BK*dz+WAz+RdU8B5Sply_l|4`^b(} zyW{H=ZoLJ$g-WrWgZC-UnVJr0xk6c{A-ICYn=*Dn%;H`!r|ns1B=N5O_nKUeo& z;<^L$XICCnn~-5fNyqWQ5if7AF&EfR-ZKa#oxah;ebnupn>)P}2H-9@fa?BPMhZZv z62>y)*4L{}b1qv?6FB*=gSe7X7Yj09K$5*F{Jr+?5Wtd=$LT*45dI6I>l}2Mc6dc7 z15MPOyj{Ilq2;W4-mz09X)=GV`2ET+QK#WeXbo1WN7I*0#_zy^SMp2kIFnq40X3Ds zk5ggx1J2*eJzTe~4P-cz(UoeOiUfE`+if$G0E1iFJb_`t%|hRbN@AhP+5gLXk8hC= zfoMNkfMohrro18f$VfiF%5?ADX_DZ7?7pgI=b`MBgGr=?^5<-~|J}|nqCAQ7s*`~t+OwmIonuw&EVi2Q<@A}AzxxFm5=8px+2!3dhm*ak~S(y`3m6+anioxeBISkC$}S^5}|X z;-IB7qS``nNU5lbsr53Uezf1ww^2!-+!EUt_THlCY|MkFPkXH{q&R#pZMj}pNxS5K z>D4SaGWL|FxUGxzlB4}Q&kU+Pa8lC;?p>7#m>q8Vf(1iYPtwXtd`*B8ul*D=`M=rB zYZrxdv9#Q%sa*7EXo5%WJig!D>4xzuVqL&?VdrBw z;cN#9BZjdGA8f6(`R@4erR@4NrCW-XjEw2s2q-B3D& zOSacMO|$CyJ+Rsb>Dd+z)Up1NpE9ZuD`B-{k4TMd`ToLFK}0}B>mQLD_k*|-&rt-0 zA3krBn)IHS4=kbpRqY2Vm|ldZygBxPa;&ZpjhPSgsQsuI>D{3j5qFPv<5E1DJmp62 zBex#=@h(5rnz;LeCn=Xflhw}dlF^|TSISfMhI!Y<$LNr#5~D42bAdvxUKiK{LP|W7 zZkA-P!|_+ZfBymebHd2iXlEBLV-3J(ievn~>iHR^B_Y!{!mGG6h&lT{Z4V05?P2g> zBq?UhN>a}cHY5gbjlmr9tKG6*hE9cNfFZE&yz}Zf$%PXziGcT>H|emr;y! zkT#jTP3%*lVl4MV82=(UU7G*7Z=2W0>fOpgiWT9rSEoeAsh#tl3g__!5N$% z48Nf!#?L(SNGB!ujpZ)hp_+)d`&w*No_c9>lIRQm6qFj`{n_W2JB_tPBWu}2{oJth zVrF*Pm3PMUhm(h&7yTw0fKE1p=Fg1HZyt;yg(ZS~Y5hBz!qjl8-5hc;tpUKf*2TLnFWyBjI}wpz693IDG#*czU*FNal#`B>gFu}`$YsveW9^)<)3d!%AiNaQBK_8m5IEIQAb&|5 zpb^DI#QJ2z_a|G2H$b;XqwqfuJ05FmYCQkhwVziptN{}!9JX0sI_8=rONVWD?&ugC zvI@Fz8~u7_kzl8n#>19i7%1r2rq!{;oYa;sFq`!SwWS4J zTw&?1i~ENmG~Ox?-Zw+l^XyeyD;2@#uonUarSf&jW>3?}5gr=&M|9A-RsPiVb_0e0 zfq=d+Mt}Tw>WZ_kahMzH&m=Vo0T6$QVNTKS#$g5bNT6;-3jf@VKkv5f8q&#xy>%0hNvJ%=NiwdD4i#!rYmU61Hf zyF5e$^lX3z4Qjev-+Jo9?CN!2Ht$=ea;hT;&QrhTP)}nA{NOQ{cYUg63DqP*EOmxO zDVk=HY4CoJa$$g&l*|n&=};hd?>8DWI@qUU(WY*~y$u5>f4%rChpf{jW^P&&Ak7NB zTfwg7sa1Tv>^((T9&v;N)v&4ae;{s8vbuc+@-~c4J@VBQTt&hVbBk3e5j+~+Wlt@$|^PN$KEGd*t5Tuoqo z^RXh<(HD41slav@QtxA~Gh{7t@kjymM4gpw-G7TB=)Ob)GJ7HdC6M|xQSD3gk3Y3* z13mHFVt9b587?J0L_nuo-yqSG72^;qKqCB3gfk%!(sPAp2gS^>$B~;4f4*VpoZVP; z>~L^LlrX+eayH3v*)!$tFMzpU(z4%JmR@)`REm)ibkIFwGcdve-Mm}*t=_rq?MoK{ z46p*av!WJ5d^aBB-i^A10QW)zDs`Mz89o$%kaX@z8VqMtT^=A8fN}qVZ1xROU!AHt z&qOrn{%I)!II&64{IvNN{)cWMJw|LzW5EQ|x0orP3rk!Fll$yY@}GUw)A?ZXB1`ja z38B4bN@VJip3oWE^!wA+?|UmdOXzes zIfdTC$J_N*6L}+LOemefXG&4l38UA$>oU&(yiKU5lf9?3-@&74PWbyP0LIEEZ|k0b zZ+3m(`LMe2u6KFdRP)l^p&Gj8d}&eO;CY_$xky84*6`YeyKouSa4Dqk)t^H`b=&EN zo$$POD*ru!-g|S7bdzg!?OF*4HVH^Wlz?jU%RZjnon8m2(3AHkg^+vusFko54^d3R zhl6Izr)V)G?6ROF`p^;qpsar95&^&yftiToP!E{S5w~G+&&%M$6BMc1 zmk9~TPHMIZyV_&?MH*VHkPqyg^AIw}R3K!j9`PbdkF44v^@oPm`teF+&f4;%dIP6u z@a^szli~OTM~J4nLJ`eiRF?i7m;I+g2hrf+P64_XF9P&H0l!~PZXht%pDj=5Ot&6B zXJNn2zDbAo%uw1H?1R002KGmfPDH>!nlLbw$@pEb)2vSKRh(Epc4fLtQigJhq2eCr(XETse)Z(N8S;daOo9J zQq1)@vJ@RxYGOO?g^@Y6PqHBx2|=uD7gCm!u6YE8iqteTk5LzYCVJ#}=aaeH?t{;S zCF)23TWbHId_Q-~3-T!*7$w=Ej+StJ$up!cq~^(5`-ul&VE(Pw20hgm)VFT(LXu8N z@ZSdu9{@SL%%`?JlfT`&Q&Ng(8QGd^#1~?cgss3%z9<3kY7*j6i_tU{#J)&(@ToGjtRZ;(RI5t;d`FcbP zNME_^WA@xU%xKqKBnSuN8BCRabh3)pt+@oGdkenoDMnM#(^ig7$<$^Yg5jQ2(Y0K`%^j*5J^Rt{V6jfqnISE(54a%*m}Gpn^bj(>gPN|U;Kxk1#W0^-S|V;s*m~MhG*WY z4^%zeaQFHLAU7zn`uYG<8AK_P)nSKw!nXrN!6V53AVsyO$f}9h#$N=7B#9|+uT00@f1Q*}koD1aqvM8%c~B#=J=|<=xIH}~lHKn^xgY*9-;`-Y zqV>9W50_x?>rjf5rxc}7GDNcq&yp$RZg1gD$Zx$Y-m5#1DwKtOH%nCe)vBUq=8R> z*s{ayKUJH7vq8D~?^jX5nhqVjPfR_NAzVAbIdJ*U$s+${2FZWESWfDMXF37DYz=M2 zAmm=Oq6EE|&>Tj1!>gw`!sUKcrbe|7JaD|MtUke1KI{#?u;PaL&twO0%k+u^O~<@- zuCW~3dF;lv58XkdbKLv8|8WKXt9QsefTcuv3kvwYIfJR!;RsA%kO%$Qq1ffna(l`F zrH^Teql)dC%;shZaf4kQNvsL7KoSVUAV6Vy9(Uvl#{+Q5*g+cA0>IUZK(UsQ9RP5u zgbDmfT1g+_j|A#kfUn{@3Ik0Y0j$%$pp(5k1M7qvKJnEX7*S3&FV`qyN}{Iw`Q*v0 z-n_+y=$k{!oZqKb^^s56zSk>WepZV#e#sW}d;ak@A0xf*=knW_hI```4IqLPZy0(0 z?(eVug8;^LH}r>6z_%;>e+f|TJ0t{-&2c>3{vQM+{iWFt^obtG>;dWz6BZ_L-nt9l zB!+wx{~LlKkUWl7Zim_6Yz6D@GGPA9@$R=D@vNKYsUoVjory4lj~wZd2QVchoC{WI zx{TGqc~{P?>+Jh9wC1;s>^eq{SUW{sY*N+lDc#vswN-;GRo^$TTDYdF<780x-{cgQ zv`xsFIC}o*38FjSpB@QBzvo9dzcF!0QHxHci(*7Fn85>eA;4W8$I;9o$ z6Q%uS`Tv=QY7!D)7HTHY9+vWj{)-&Q_#da-p&p8ZEzg$otNvC3&CZL046b*LUN%wc zI%*c(5ZRC-8nENMGY5FliAKd5>X)RhMZpQTytOZbDM4Zza}oN3VaW8MexK_W$TRyE z36xB%*U+lVq;itwk(SXKj%4>hiN<*EMN^xI6 z20RRScdEE>uvw(n$$V7dz+3U!DEFdH`SUoaDxY-mesJEWQzlmTykT))^2b@in*-ER4Uqg&TCA#7p}kA+&lL zfWq4@lF6XluI3VMEsE?gRSL@aQeOJ;9{aPd)HOvDru zD6eans<6_GM6Gd(ehmz)Pcme>zM;+Uz?MLwEV-QgdauAhfDZpEAQ*Jj_3`Cc05uUX z%Nf_iU>NahCIbO(?r5@HvHjsimB}tu#0>5K>cwJ%$?}FUJtBlRL>d__uNHakhherb z?&b4Ei(1-~=LUi0sfwcxMw7v8nx~4I8c*?O77n-q*A`-(x_4vY|H~J?3OTW)BUK~K zlW?Xa?GD?$9_Iha7_U_J_>kZtHMS*P(4yY|4|RrfkJJbH@_ya&NTYCahkbkQAYPe^+f9)5Oq zpPx$}aY1I+{j*<#^UIpcp6vJ%TZ2NJasPb4WL#6SXp%_EuLzOENtRg2`QVeZ(t{g; z(2Z^z7nhWJe4na|Gy2unq^hC@v5G&ux14-!D=hO&!h>n%1oD8G$oRJiWkFuYzbenqvh&!Av<& zN3x{A0HTAi>Zb}$DDo=KMx}N}_^oa|o^j!^+qkAHF_Fr$^?o-ZuBVmVi+dg-!l?E{ z;+59T+nMZ>_1jdvX}UZ6a=8F49%+D+!5jPlc5uDg*a_oI&qw??!FmD$^!!mU=-c0u85PC?epISk3 zI7|w-&m9bVG|czb+T%+{W_v>!jkap(6)~@He2=BI)d~x!B$S`BBCyKqS?tWOJ)QLI zXWzS>lArwP=p5#wIf7xnu*{Cmalm`(N4f8-Ehs`;JlaGAQ!d+grlV3RiO6pVI$Kd& zmN2C~?6s15`ICkDjxAC;QCX-v5lb7r+=P3XfUh>IWsTt{o>*@A^KT0l9seXFr#2Bs z1^(8NjXpOVZxv(s2M=G`vf9ImVE=m}@W*X7l;vE4-twps|MQEV*Yyub4o<9O_h7I>~O?5r(6i0zABl>`x*F7LG4$>s+5#UcE(`5QMSJKK>@!Wt=cA6n_=xaMDIN_?d+qnA!(uXOf;r|&Mq*!%AI*# zQ83kTqyM`<%=2XJIGS)_S=Yq#Am4u-QQ0vT)aD$(GW^>o0na1{iN`U?GAxs!XE75Q z*zc^QkTDeBU=U;9Cn@T2>tKs#BqaZ>K?Bg zy9_4g=@Qigtv;}ruiYt&n5*1BueO?bQjzv_iubr1|E|nqNluJCrJQiufr&fF;*iPu z(Dcio8S~ZP%n0EZrv7E`hWG*#qZ-Na2##+=29`sx!Gg>aoJTC6mw*(ZuK;~x=s|9) zn~wnb;)6c(n4Nebm6HveCvw6$7dW+5m%qroWpiMSS-10*bi~3)+Z^bs8s(X$)4L3_ zM&Xde;&iUBNPhhVsVhegR=}{Sz8omS4W^E^sAmkj>6l$r|DD8kmBVDkqbaML9aNzY z@uZe89Hl#9d14#0$!3X1cBI9bkyU=Oiyk+l5RKh@g+W2QOy~% z%{lMZjK@UCO!QpY^E+xay^Lb4Vun#fqiN+!EUi`IJ(jRgHYe)2c&_?e%%i({6rAiL zj3i2D+dFvETfrp(2ItafO^TYWLEb3hXTOt{b=u+`ouzqv%tevN=;;WLntmSqUNJzK zz;G@r5yZ5O9l%$U{f9Iy(0kl9{|_^Sh|Ryz#Z3A&36RCmTyYvJq+@cV%>vOk@bjmV zoLF}Al!-n?6)F$G1NQo})0p<27y_OB5*FKOz9(wuSyO2LTIN;I!RPf{p>6aIwqa~wsd@p z1ez}(blCe+g+-7I{I{uk1qkwqU*9I+sC@pxA(-xP#hza3xcwE>MLB~zc(7d-lOCSZ zzn+mRKC=8Ihkg95(H#F!veS`N++J}7e*zcYlGn(xs+wZTO|d1d-&9kneB+@0&zHE! zT$N%IibbN*PyHGRlftQAXvg*my(`bYB&0H(-F*98?Dfc-Q0)OWy#?<0-$bi?wW&WK z@qY+lEjfInl+6XglTaGR{>wT_bc>k}avpY;-F~cKX{!$Vzx5Q@QHd zv_i<|Ra@olw`t|&AQeOKSz{dUcgLLV3VGz9_qcsO>9c5b`6GU9f7`<$)Y|V|J<$G* z_a$%;jfRYfk1Xy?>X_DEx)U#W)Xl)~+oF&`$llDn#gP7qd3$n`q5mU6*}kuUQ^jB)VHrw^{C82Yl&-We(sBBj%u?U z29=4P-JHoY9SVWxp;Kj&ndQ!)lv6U_9zp6ilSE-bgLb+~xDrGi9O>H3I3=2H8mT-^0{j0VOCENvSZ^#;S{e)vPK|7>$ z&M-9zx&#qN|G9>5XeA2QfUYCTm*_E05_qLiN-M-M+nf`DmgSmvXgl6G%eK@YboG5P zvW}{39uBbHxnCkp_ckW$T(Ih_Q)+K#3vilG2|cYqPl|BC))c_j!6j0heVh3y*~dM+ zj?LhA!umi2yuCPkT#B^JyDsr~u*r+vr-Qjqv;M-E1k=Y6LGkWuAob%mIX1f``};St zR>lU9IYK!)j8#wL8v@{t3oxs297m2>#gJFkv*O{q_$%!?=;t_wLoC%;Ms*$h90YB* zp9r0LS=uLUI28Ii?7);La}H=6Rl;;)>|laG7DIq-N+_gl$iNYb=;@D2y%S<@Y3e3B z%*@tBW*on-NH|qNW~_qMnrn}^)jd_V0#|4JeWOhm;_C@^$7l->Mq^ibOev9+U9tov zJ$!3;TzaQxQ;`0AW|T@kc94RVXYJ^s(U_z^F;Zr;cS^md-RT+mwU3nqJJ$X6uUNb{87^HdiMY-i`LLXBD&4{9S*@$i z(=_AWY30Z6X&1`OEIRG}i=Wq@TfaAvS0Rj>-t1hTO-5a~h!gPI<~r6$lBJdh>9|-w z0$-!y)a?8Wx-KvQ+ufBX4JR#32|!`|%ExM)g5O!p>mODe^}PW0Qo8iEPktsAKBF3K zcwPJKA8+tb+lA!V;q1pE#XbDwl^LXpHij%rLF*$Ia^Q8$hY4aX8}j-??7O^^RqIo zrLj0n_~{?-ShAQE5SxZ>ewn6S>u_&!rwrtmeNa7COrMa=|iY5Oi8XD_K|kD2{tN zf%{2!@eCntMD^v*U#ufNJI`Zb*~3-YGSZMaID>h8|3pqz0rCrIvp!D^>*9V`v4Lo1~5C7OM1D%&om^9Gv}ekEEXd0m%p9Mm;A5T;c{C~nTV zi?AS?qWPadd_$8;JNkby*}%62Qb>ee_$V_|U~K0i+!jYI0TQd;3f+xi&icKBKC zhO_t)Th*ar_~ATaA0eB=~@t&o47pbRvh`LJwij%WMdT1J-@!KTqjL!ZDa;(AhTZn z_Mm3@|Dh|y-54iuO)AYxlGOvkk;@eKayEQIU%$*rpQ4gd4Lc9--=G<&ZPjV9YDnM~Rhf-s+S3ecA|9!Uuz z26Q`o`1@-IfKG--e)xcHtEH&9#hzi$t?DR7;|VW1;mZ9%o&)1hy=Ax!|NdKQ%M1Xy z!_F<4)gm-Jmyh0_&0996&63Q+%VAnjd?*@zAu@Da{F8!E82#?ljY901m@A83x%+E; z^Ah8Frc43>k8lo@V$mkw`@{W6dlmJ@lVG!h)klZnJo>ayS!MGNJv3unneM2yA? z-vF>y^EMmHVkH5=K~FiF+tzo1`nwUt$cCxbzA3ETgusZmRX$JDFX|InHm1j3Z{_J@NsRh!g8og%Viy=O*nnfU(s)AaOm-T zwPMGE$RT48lYxcI16E%g>|xxX?^PBU+*BLwmcYSHYOq~|!dhp8G%?E?mPUZNa;+b0 z8v#f2k$LsJnLQanLwLQQu|FP@Ab)SSy&ZxAjXXkcE|Er6jciZeAqGfoy3_q~89=>L&KkT83B(+-F>Bhj{ zpl*gMik}u!9F%`wKDeAhrH`sOyplz8JN&~iD$~Kh672jr&5?}$y}o*K+dBHGSUY-- zrq9D!+#+jJQX2=$DiC~q0cs-z);br$iFw|zd>gV3lZglew~C<0jiM{UBbLU;fub#R z1tvX@vcDzv74)WX60=3qcGUZ~Cga9Y^qn0y-f{g7eicj**qnDCbv$R$`;8Bswb2Vz zeStGfhL&v-kb zE+vinpTgqbU&Pr$|APty|2=3S7HBCTu0H25r?1*h6wH^(HEG3;VGgd)73n*POPU2v zn)^(zShm2KOTjz_*J_|1Tt8m%h~@QB;?bC3>;`B#NWfFHb!kGI-=ie3cFHYGdGV?D zD+2=ylvmM>;riop%Nslzi#N#_dJq>fHc_77QTpjX34AtLQ}d>ysjzs?nb;EKM_RA% zZth((?Z`dkKs!@>pO|ckJKA#vExK{cjM5<=$UfMk?TL~%WZ9J&AZedpmN76GOO1}MI%Mo7bT6eH; zRGzlYJmv(&$qM6d?H+3DN1p(VS0pn-v4%0;9dS1U#h1dUvRk$z*-el)JvVvk&7*}b zX9mjs$%gKcqRrRbs`F_=U!xtWIPhVebc(U!i$sbO9mTN9vTHM)&rQnenJ7%A27%wX zAi@lyA$nb`##eK5g>5xA#=snzCiG%?$g**9O|-Imr=~2~CzR6CdHYFYO#+8WWd%cG zZb+x>k(+kz$Ifm&#MJeo0aq|M_}i+dpz!m>FcQfj(wE?H$X=iVHibN8(2iBWrHkgCD575|~7N6^CnbhpDu+?(fxl6uGxxG@_Z@1F9~R5BKu zFbA})xLRkJmc>6_ob@mQ%51|@x5+4yA~Q)1>^$B^^g57{Z-Y9fkyYk(@Lu=z2FO8% zd65{>!2)=_ZR6%KY};v*ION|U8_(Wz*7Fmx;Yg(7ZBnFVb0PKW7fL{-nzX(4nc2+M|7}sbhqVzcGE_{!&f5k(pbHMK z4A5EEhy%%I)iNvqrTBj;nE0IbPtHZ?7)q$5;9sptS z^+&T*Ryz3(*=qsW1smBra$;pRH{@C}t6cYl*)<#5bd0k(2r`t>YX|xD(bYN_O=N~P zxi#Me8ww+rhAn~zm5}Q1#DmAU=fc5lXPKdB3$m9ihyPCS|C^TYaWE>-LwwMXd*57; zvK}-g$GBcMkAx;#>+}&6=avSYC6dhQ1BCZ&7>mqdu>rn;7m;1T*kl9flKIt_wh7m8 z1VrUY0nZ-+8)PsEPYy_pv;*hM)C{dx8<=RNp|WG6(@#&>$KICUPu@!(t~I>^?bNq} z)iQ<9jdb68VzU?B-X$}d};SD)(F2c+E2hBYg+i%4Troavh$hgw+w?6ro!!_`XmulAAW zcgze(cFeA6Pf9Avn*R#xbI4?GH0HTO$_soM5RSV-oSaEsn&&K_wJ1xH?4dJ|ibS7Z z>JyvRaexRzzYRZJrtog4Z@O3_e@%@LB$?o{>9skAv23l-nYIYNx8X5%2{>AKK3Hai zvUom}wPGLNfj;R*HTuV_=@B$Cm*RNXw9(t?rTs%m&=1%!yUP{LZM~60)>9^^eU(o5 zIZJQ9k(&)8imsuP@EXS>_Vliy(ZWjlFkkA&+K0DyvCGoJRz@STp7P2!$LF_3+ud^D zJ3B0OxjtO%t#C1YT&E7KM}32mZhNHyPSaDekd@n=J?zbM5%Wj*tD03$=7VbrJr&6J zDbS&O#)ckSpV!r4`dxbs#vMH9KXout`>vt8bf(-|l+6kUZ6p{D z(MoozkZ~ETd`Fgi0F@ZF!*j^H0aa1J6|`Yioz#rcQj?|jbM?#T3RVsQ`4ZXMTHTQv zT>!A5`A4k8wyo?4)rFC-T9&w4w$?p7+rIAMb!g4d!^NPnc#*=w+;`avSvl<3t)MWI z^O7lsbr3}Rlcgai4WOJPqI4e0w6MiZ3}W=+liuDX3y>^JDGolN|9*!)nfzRYDf^DK zov+Z2yyZu>Zn1!wg3DVrRo&lmJfNhs(B*R!JKIm5#lCVGZ05C+I%l$oIQtj3r#{a& z-pKX@qOA72mL?;~EVtv&r1fW?wGaBhy)N`D%nGo=s2FaW#D`bov)&%+$(-3)R`Xfl zQ;vTIbE-3B(@Vxj9^fHhFKtN$|KVq(?kwUPc@5Wfg+!e!vy0GiTDrU^XEjRCRleHS z#iw*i1AKdUEubiB$|>ON*k{q?iv1t=Bg>R{bi%e@%SCBA5M=%exuQlK%23)8t?9dE z(prSowRdYK=fPMyD5KAAvkoIiXAF0{?#_BaDq12f$!rt{b{C=6-X1V0w^dY4z#icn zNOL*g#$iW6O|UQSu07s+zL+f*Xe=_yixmbbUEj@qg)h_1wvzaSQu5K2`=MT_iiH%M zqg18-OEzLGl@qB2JD-uPnJWGoD|Hz20L;1EG7i0{BS^?B%!kjRi78fX>73qd(A;0d zCcYi_<9}pPiwb_+D3a$aLn!0Wf80j_c@(C`#^n$8p3*qcBPU`&rH+t?f8WKxV!rj@ zm)Lr=fZ$Qp_he7hD|v>B%+$=UI9N0#Bn#1tVnP;3P)NR+RC#q&(ywplt5j_9@< zh|D~Yr!vR6k+)j^nVEB=s6^qs_nG+AXl!8wnxgF54kP%wzd`+1QbF7@UUL}4X0Gng zIn%^J$mnRC9_~bIc^ttln@I!X$)nk|5)D;I@d`oGg3_w6qLtu(to}ISf_pee|Ab9_sESitGmGT9}`EBna zJesTap;s0h*sM&^HJ+C;$lz!ya-Pt}vC>{f;IgYF3XHa z{B_>0qchu<^C@3Y$1>ATJd;Is=TXsm4UdL;VC!)DItly&)}nb&7_be*7c3=k3ExH7BaDH@||O;BBi@Mn!`T(rezS+pX=PW1X3 zDH|0DZ_Y;Up#5Oy)-H|9;LX`BjMjk>-XejWm%hp8{zXEce;sVTHO(d0AFN+UMjA*O zxRJFs_Zm;|YL<5T9d~=9r2j>@X`Xa>rN?pkV5GAEOX>Ach@$_+5D(AERe%RkXkr6cPoJB7B+s{ zoL9Uj`$?sx1%JD+FQ>#5IzU*?bsttyLrtDmID?(np3wCx#;F-s3+*J(vQd;1Kl{9e zdGX|scYQ#;cs!W`QVd$qw6O#|O4VO(7njh~w%9f8MIU0z1lX>O{D<(2+O5zwBPEvE zHXw@2HC_X9+mm~&RBiZruY$F(gf=%j8fCHz&IZ07XsEZPOQU_N`;Pv@`J0%Im`C=n zn6~I&h{sZI#mU>#wDq5LnxQ9@gvWkZNSb}WeU-fsCzYU9K>VV(^oYolZL>|wie<)lu^2am3=LiV%g4sn4g9UYf$3De~|Y ze6zlu2A)wUdA%cnsiEZ+Mm2*ilb)PES0*6H4gPz zmc8nwkxdCaC@rMacHv%@Rg(>&S{x(5FJYf3TqY?uw-jMLqAg!s7FW$pbfVHWrNyuh zWFb(zr$c+qNf)Y{T27yD6Nvvr#S%@>S|aye3Yt}}iR@G`p+YiZ4Fy;NiA z6_e)~?-6w$&$9s+#T4 z9bWNu1KALX-IEn}&(h4hmYyi@JR~t2H7QJHlmOFxoa)EbX^KbN;*S)m+SIJkvWwx- zV}8%KHT%>nW!u4VF6%fwP)L!(+O|etZ2J(RdRc6euIAul+Q3)(d(`f+DBHp9NB#z1 z`Xxq-yi!E>$URkXTJrr7^C#=NxwR&W*N$0P*K1ZCRE3%7i147a z!{cfKpM!_>`n^#M!;_dALi)Ci-KwIv90n#SK4 z{1htkOcsW^V>}|=4R;%+xpiWScIGG+te)k9)5fq@JZPZ$qaEROQ!0&&@%Oe#ZYW+XysSY)pa75t*lcdVF2DSJF@958{yS4AWWx7xo7p2!Ucp zgVf6%dk`!dob^~cNq{7CG_1)8dW;QVJ~d}&OWDv0vkCI8tJM*kiEj*+VS)4aihx%t z@+Zy=Z00xWriwGy?tMSCYCY!^!Hz0wOs>V(Ti$z@O8R!2FW{ipZ3wx%6Qw3nIHTIX z35oJE)MU}P$~W377E!7NY(!W`OcJN6U=0NNN3v#jIe}tew4n}u@C9%0?nF!q(Wc}k z4_FUmrs!I>@2~&CHcoNnxI;E`G^w3UH!OZ?+&5*9IQY8Tlk zpckWB(Y&)!M)J;dDH!Tqat*gw+j9m3kq*tm*@nG5aIIxbsPDd=<{!=XgN*kf%AuIK z`TGPNb4ycQU*pfWV6}|-pX(-WPejje+-&bIPbHeV<(;kOrrLk43kC%}LPFj$8>o3K zUi0J2spEpx&D$_L1t`>IO4i$bX;4xO_op)_8hce|X?FvHH%7a=qxTgAjT@9Cv{9Y$ zRYPhe1Vta|L~?VU|0ACi zYYRKhT$?<7|4NgWxT#bRm=e02uVuN}PAd^)1ph`Sl)|QnCPx(ZHhejWkFHB;SE#RD?DcAbGmcAj2AM}+#9v&BJ_k3&Hd+VB zId&}ufa{WbWlgQ9nuSD#)7&qlT$ah%t~x4_1pQd?znLntFp6Yr{Y=1bHSp~~hUSrL zHE-PYx&vX7a<`jnG{7^J5I#yn_S%gyJ3Lh!T<-FVev^9$ccp1qL`e+y6Uxr2JvXCY zIV|v9k8XJ^%8~4{(b-i+KzEm(s_R5lL197kZQADlo?z7gKZrrGYgCLk%Fj^wg2w zP-bt+=F2TD^Y%j!{ z?NRk^!|eF-`kJ<*nSCr){Od&PiNj(IR#H=x!5!YdC0Md4MI;R)W|^{+pOQ$Z{8|X> z=SlYnQQ92V{b|c%hy`&L4i76^sL%pmXsCHQHH6qs3m3pZwtie&Z9Me~O}!#&87z+W zlEQF0Si6UiUHjC8xG>#gXrQHvk}Wr~^(~cN<72^{#VE)MuX}s_2Lg)@cs=JX#+9sQ zB=&-9&~3as8LF)E67O?m1yLRapf-ng-6!w|i{9PEuPYd`&Ms_>%r5il_+HcBet$@H z`w@h^qo^|Mo*f1Uu?SZZOrhe*<|5Zx)bWPfyo?Ug`SV%dvatl zuN%o3UE!>2*jF(QW=GOrhN9wyj1rId8MB_$a=Ftyy98o)M0N4>5SF#)i|g-vw^*DX zu%pR0EO*juaV|`A5U^1s1`Ab(nC)?3UCm-*+OFALPSb~v*g;!4&1yBI{ET;(y?&Zt zFXr0d5PtFd_OVG)zEF|*vVu9jGRWTCA*6^c`p)5wd&8H#wl)&;wqtD&k(Bt!K_cXX zXIVPq1<>D+LUR!JvDYv~;Wak+r!*8z2}$_y)_wN;Qj@vQd7*6_kxgW-^}LZkcsx^n z8}|gcM$gN3s`cw^}?{#OpP~8vVc| z9UICAWTD&-&IPu>I(njxWaJB=+$q&O>q8@E*D}8>=v>JxWR@_;0nPH%Cwa=p z4!PFKkF8dYpYcK<4KSivY0u6v|H9sjIVqPq*MAP_IOIEzsEIrgdq;OU69!&btVVZ^DMdu+6KxQ0N$jc5_;mIG9O`@Xe8CByB6(}W?6D+%OdS$GD+)Z2PwNJHZIAq_ z+SUqpKeMZN)En)qTGV-{v(Lp|Ge$uk%qxS>5QCK{lHP{2VCWb(*{DzcdW>OM7-D@gQb6)b zO~t{+{)X8UdSf=KwBCnR*4t+*ntmS!f`xHE4cr)YG|9~n&~C|m%Nlm#gJnuIX2)E$ zP`Q5#-jOmD0V+gDi)l9+#W@_JYYq#IE=ca3T$L$y;R!FE-4r)@auD|$`na!?GhMZa zeBi^Dzj1H||K5-vM+AF#U;_yrj!7a-?dCp+K=>WAkvn(;rV+kCJ%PDAOxv4tS8ZmC z({!*sGfen$UMe2SYPqZqaU}q(lEjv0ZG3X|_cvkH4ZJEPm{%9qaiNU8^1x~x5h6N4 zD6qvJz)9xZ{1dx;*6j@Q$PHzvLEJlgN9KZ_)*2a$cFVBCP-_8b@=s_Vg*WmZAtA&* z_6(Q*rO}doE+^Mfj^T*Rr|y&$*p$6%c2-*jyWB9l&*^BXXx`XI?poRG1B11(ii(YM zDeEvP&7A5J_H1R(yo-i9UwROM4YS+?G~s?D{3tQ!URK0D*L-L3VBB_4qAp+f!iDX1 z$;z@t;qwyum(b$Hce0S6?5p z5g2x@_n+9^CvGdg*<+QnWtq==1)6o5V1S&0_gesCtv9AyE|HW|U0{Pu# zF>%%kS}`Wx#fRkqMp$OdDxQ))^6hVi!kUB@_fBzwaK=!joAGf#AVOW36`S zFsnxX4K%;xWH%>XeHS|YBdU%@2C8!uxjpQps9rq8II3(mBzQSC?G10V2N{qpI1tSY zy-EFIf-%?CveGc0FtK znOl+oLu5)weBUvjs5S#ubmQ6gav*rf}@JXyFWO0?c|N?kK1~s5h@N%^oQ)AOKg$nn)a3M zdv*HQKV%p>``_6yg_Tj(F2$@&5Ndl4)Ev9s8*R);5GU!5xP@mQ63VOquqOZl1Mnq) zi;_{B3rqg@ADpFz*quARuoBT`-_DDwz^C@wmo>w#rQiBO!McoufQ4iqLx-x~TEicdrN#x&BhK8n*PAAf-EJ z;ti5PS)x^4l{&`VM{G zrJ56O?EeEJ{tMQT;~=ovq)j>9x-J+c z_&id42q=zel1r5Uf=aes!|{=bZ)E|C`3OQOHb%&03&-I9uVAg1qvC+3*J3HJ=#hb? ze3%bt=pzatiQ@g%dHHT^uU8zA--)um2T@5%H-TWpz+Gc_16e;_`$Mp7FpG6q%5Nv|TVb2Jw zxwdq3_``hZ_hD_``ut569pIWgw1x8yP2uYQSd2ZR?fATd!oM-y%aa+aQF2&T!Ol*+NLO z*Ec(^1ooFH8A*?o5}LY@oN6*W>%oo(p!JVI@?hO*8<*@>s?`t|IaX#UyJn`Q_< zmtky3_@(YCbxp)0;;GI|hp{`J_i(}2AN<(5*SvIM*y5t_@IDhlWbBklo$}EDU3pyV z=ysJ+J91vBKZ)vgbm#7Meq64Rs>t>v68vUP|Cf3gk^`l{E8ZL;JunHCLpO7`>X#CwCltw+s*GWukDSUAhgqsp8p1Ot(VXw)2DNThAF1i=cW$R|Ho>mX&9-*ZYd4vC=CxIeekIVg8J)8BLel9uB%gUpL;B&hC( z!mJJl*;F`;6y;XSjIZY1`>$!lEflY|I2+Nh^$&U#P`UZ-YXaCHN8IyX{ooj*UH%&n z68-C6?x`YpK2PRUm%3}9)1kIh4xI&*>NLzc_O%GYGXs!sn_o`sw@uly7!tgGIclkf z8RiJj?U*=0cOR~)ohzcBVv7Iroe2e$TxP-JzlPY#nUodvr-pQ(Tt*8Czh-L;L)3nO z$|Y4VrPmuuNQ746E_H{1CG%8-F)emA1|iDtfGM4V!5&Z_Qd$AkMH z9$8D0(~=eu3$(>R%+hzQ+Tl!F+t8l}?{C8;Z+=D``Um!XeB_&$p4>Sh>W$*a6>vNS z6ChdIhKWY;>6v$mD^F!13(gO`Kj1_bE}cwlmo5FC`sG==NMZFmm2vWMq|?i#+dGxw z2jX%55^p4$I_Hl%Dsk_VLRQMUyZL!tTd|h$cE_w?J-479cIXbp1kbk4oVlWyj+2}1 zkG1I_dbL3N`ea$3p|E4nK{kke^Po<4zsesNh1Dw z4+8=YCJh-6(Pm!K%C8M|_yz@=2UXXU4%l;;X^*-1a$vUw6SMJa=SIwS-G`{K3q3Rf za`Mz<#pi#ViOK;JkbNnWN-8z7Fyy|euR!$b?NbOhYZ~Py8%JcOh3zJw}C`AFKsu-#u(nLx`q?d?@h%^AMEcD)cm0qMJ^e#2D z1VYGN!F|r&_xsPe|977!PhdgBx#pZ}zHN;0mPtl;Ir~1y1HyeQuj`WX{msOdu@YW`-;b#=&NHq_IO^}6jDIefod9e#XQH>unYydIAgOiiqaQ)hIkr<86VzCT)I znQ(J)>~3IXw^i6gaUI9+x{|1QuO5w6E6AESalG587q!1iocMtw{Aa_TG#op{!CS2E zFV3ll!8OW%Rs8Oo+4{mo74xlh3VROJ&BCwQ4D%{27!^Ao`+?64k{L4|jl)l7 zukLi+Ke;%&p4r4D%<&whp$Nc$Cm+F;nBXM{MOuakM6&M3iXk#KotH3f8cuW#xh0oj&0D^5f(cpa8 z@7thdO7D>$E!~w=CWJldnuFtO4L^OJ`1h4-4|q}+lg7xY`Wy$o{EgZ3xlX9-?@N1` z{b3woJL%+J?92mE{Q^~ud3U^iap%AO5Ol{hvHI=nf8qvdgG(#KI=c;R*VqLVOA5{Fh=?oTKllk?r^vjuxQCOQlmS_ED#Qcc?vMNU|`uzz7F z>w&R+Qj0!@-`EX4qtX@|+EWw3lDLoFw1AdHd(32>Eq8h`q#v?f3r=Ybm}c(A;h$v= zl~oXahyh76=a6(4!1pDq78%2GhA*gG^Y^f@BI6U)n{L-E)!DVGlRhlowle2ku{Y;kHmbOPYvfl`Mnrp-yY#T@{y%FBK){EANfcaqs)Kpco>5m zbLvH`Md?q@WL39`-mYnBCe_hhJJ3HDk=6K2GU}?s@~67%R*G>iDN=2OGO0d=4_Kzs z*&501W7bj$`TV1OsswT1v`(k0pN`kwoS*;(m-o}_CgL8l+VAey*(pJsvo{)s zZ5ke&IPMT1jyuII+%UjaH)ctc@2B59KeWawW_4nGX(F2MJ(?Q;+rKAbs7t(_#3q*U zYNY2^zDoSpEMQ#Oemb*J8}8l@0jxorL$%Pr5fLXVY|o}rs~{C4{U(8Uvt}N*J8G`n zIj9Xutb3v|mDfF}vuz!2px+WlUVVAuGHs?HvFHFf37GHHehI!VdcstDgolE@jU*L|iws+^ z?*aOl(7S7I0rQLU>63a*{^$(NYYLqysW+kD^!0&G*M%;s#<55zHHqe^mcy6{-LAef z8Hc$xgjKjq-v|(_-(E-ai+Lv)Z17KmrbAJo|f`KBY!%??a>!Oewk!smaH- zb=T(Jn3S#PwCsw_2JH+ymbk9i&oZAt4423Lgv&vtOIh8O`sl;apZ`Ala}@2!XY!V1 zD%K9^x+&zGr)z5ckyE1n_lrkmOJw%nHvTFPH}yhPgVVqxr080p8_*TK1eAJQ(KS;( zVa9uucBW-gGeU$kruO%(Fg#&gNBx@vy_rEL19L`sF){*}Lo-OIeZZY*oV zScb>4@9M36!Qah?7DvUQ^Uc#j*GX>tbfWTWWeaxPBS?gf#B%7cUe1gfyMBZfbBE%D zMu`>1MrC3=VbScckBQPcLzTY(jE9MQMi`t!m@{DnQn<=e&P4$+DoEm&!^$}TlxRCX z&;V=!v)u!VHwx^+XZ#z11dt)To%z5};sfK%3U7HQZ^b!uLOqS-y!i!4TR`h}m%p~v z&Y9T-Ew?O#^%pOoG8Lg3`WcVC9-N_(uk=H}@jP!MbGN2h7Mq%w#d^g5ENe4YEihpd zkL)G#L`xG+@&x4P+jNvr2Rlb{0_7lG!w`q1r-pm7?pXmNW+fSF(rdH*?K%aNadE>b?8ilt<1H71BUp`p8 z0@Q~9A-OcPc@h{lsMHPpu|+q z+D}-PZOnax%C5qhhRH-UFm3j#z(k|)yrU)O?^)lAk~+!r%C$-P%D1@R9d(}6k$qa# z4=Eh0F{1W+2)C|5HEpv&MFfA|F%#meJf)sWT&9}@7IcJi6yh=mIn#D5o+tS8N#%k^ zAiezH{Q^zPd0pxfg9|_ zt(7ML^rf4Ma@F>))vhF#OBT3VksG*+7kGMK(FUry9Ima946#k2M8~*zSDg}dZ63kG z+@I=q-18wRF_G;u@VUem23eAOX{}KE)Q0{Lj=KWBroK~-cPbe|*ZD+FnO_kcwEz}{ zt71<$)%xCu;YZ=i{bd=?3U}icmIaEPPdQ7mB{$qoqK|oU!;c zMru_|iRns}CbCz6MtBN8@QTLf!lPBu*|G%OFj|Xm^!wPM+RpBW@yb(nk(&NxS-2`J z`gv=wNpxy6w8;Mg&k?D=A>LfUbi1DG#MOl(^ zCyPEwk1A$sXf=(1+ggwyd$?C>-Ah%sx1wk|iMC1EcZYn3rl#sZoZ&G5nKnrXC@aSE zQjc)3P&@2cQ(9@c&E4ffqp`mV%m=uFkW#@fQZEs>EMYmVymkV|>yImxTBDeHlBq8Q ztsgY_>hN9x6}(v=6}zQ+WlfKNO(niYwD-f-tsfR)1FM-!_a1Jv+@GAo#8Meax%gfJ z``U)LJS9NYyz@A;vEH9vcmmo6KpHoSr3BqDp^^$)8ALvXM*Jc;o-q*j@7SXM%dqH^At=AD5UvmkqRr&@e z??uQ^yj`V!Y*_y!HFU}6Q{Bb7{g^LOwMG?E?a}3<<-YJvHeu}ZwCk}4&EiveZy1q% zL}~%BMgDc6xOuaMx(~L9$!mrXmf5yA`GI?<4RiRAMy%WwjNH44X@P$CcJ{iXhP;{6 zLmx65Te3kUfB@b%-XqZM1u^TzU80jzosrt~7j2Wfgc|p)fOg&)v<17-y>x{Egz`O+ zFhkRy2tSxIKOmta7*}PMw(tpQ6Lu&AGN5U?3mcVE?y3b$fj*tg&QQ)Z+Hc92_|9f% zli>dBv%0;hu)?IA%nhE$3i&aUpMo46CNVMS-K_u(jAGrIVQ))cEkwayL;)S8+_g2? z(YA=MG19l(&GbFYw8`q?X*(xq>UOJP6}nK6bWql}+xhcWb5E(K&`lL$q`K#8nXSsQp9CTuVXLqkm?UR_MxE z?{AaqoutEHTqTwDX%cqJhn@;vbEDHE(;#vc?1_G$8o81Xrxtb)`_Z7Pmhua8I4 zJl%yJ{kb2)XM)1=u_uyyTyRNbT?M0+*MtA^S;CN7YcTOS0DZYm9!bRD>S4O=Ii^AP z20(e_=*HKXgh~y%*TU6IY)~sx%8uKTqE_P3$mbm!NWsascfHRpi#S@UYBPlMETjV{od;;CPq_5!d9-a4(=3$HA6rRN31SbU{jrE1m`3>~jjO|q) zAG)Z%xFtS7{ieY!7&tMkAz`XWBl_sc=+hKy1w|T>{`(|l(T;pH1FCChHz9Hw?gkP$&}yDQUsSKQ zxlOBaUqV>4np>VKyI^T6WK3ueiiGCPOo@wGgAT#G$5hK2)(C4rwG zONUs3oMXvTfvMZHZ42aSoh zi%VG>L`HX>;!3;raQO$TN@RohE4J9g#4_$c?y^bNYkdEBtT@ z9`MY0wa_ol+h`sUjdx4iham#H4`n-%(M)Uxw-loNCI->%be-LhEj9c_(G7lND7V;1I|!~NoU6k z1?%qNHVh?p|E2`|L**$8mu+b9`{Y~N^Ze2757f_yxE9h=m^j|N*e^Z zE=yua@Cr1_{@OlaO3FUPJ|kvtKhu(tcviXX@Ta(|F5I$gfXn`gp$$6jW0mjRx(^Gk zQYw|Nb9Qw8qpkn-eKj+)e5vfAi<~1_c-^bXEbVO!$Oj^W5KgV*9p%!e>P|yaae}Mw zYAf=w>Ap-6&AcJhuf`o)&h4zlpN!XSzwPc;9W0m1;sFCC1!q|Z=UNQkVR8>HOSA<{ z+jV#d6Ek1EpeTRm-3Jr1@UwPsxuzbiN^de5?9IymzsgsqpWm;VR8=QNfPWymBjTgJ zxD_dyJkmqR*xu8JuZdi9{#Ss<1`y|0qfawIYJh)qnuz0x0Ma9Ocf^bUsEa*$!%BMV z74XqFxL!sKdniC|7FY^Gko(N$$$(HsAFvgDwz;!=WiD?0_xS}{-j!*bWOUgU@=Nd( zcZB3emBusMcYt8gFpq)YwjJid$oawEm2R;uHQctYz2MNSQ}**^Eo!Ga4eGZR`Y26` z_>3sF5;){-OYp=4-RNE%AM0v6_}K0JaZP^370EdQ>P8o*ja}-yYZimT2(>x9uFv#Y z*f2!zAjInnR)&z661p9$qI71V&h}U`o$BA5;UV6r477wv+yvl?{*zQ&1%P}0IKHPe zdx+{i@4P+xX8@R(v{TY$WdTOlB1`-|7d?Mc#V`^hEbQ z-eIkCcD|DQ*@I2UkIAmiL4=&*0cpn;z06t{qQ-gw9}1H6P`54Uj$7(0iFx=$|ItIU zz_P98{G)|cuL+ZjD!Wd}8LCdjcT=<&s$)K97-gW}=t)TOZa}WoYsCy4SKuX%Oc5D( zmbDF1N3+c~Dy&z`i#_oqx0*{R9+ZV*kK9BQ_Ip}+)L2`N)=0BJpQjcd9VajePoeP) z`D&Zol5y+xsF2UkHZ`fUhQ2cZ=~tI9P1_DD%I~4hVXn z>4T6ABeW^ii%khb)D6UxQFIUU?W%i;y^H=oazCyx#lCSHdAx*2rZu& z9~b#Z)vyOGTmlr)3eh6gK)mgHu&~%Z>s$>%rLg@duiWsDpk1xICcP@txd9CoM1mb* zBbtUo{u$`;u@Wuv3QWX-TAX7ze1#8M$F(;D4*4Ma1q$|qm&BN~gMNzy5{ zC5koJmzsS)%6+c>to~yw>fnhS4ooMCQXjlv5I=pALCJlE%)nPrnxFz!&`KCMf3ztI0|+FB#L<6E0AZ>qYi z>w>aORWG&w%lFK$jL-M22@Jl9%t4qGOCxCZccCU5PV~ZPK2m)iazC9^gNDR>@WrU} zAKbaPrKq29NX(?YCmEempm=F{xX5{aQ1Ueece4`Oc7fxj7zGMbM#5iKaAW^|3t2Ow zAw^O+&$rR+C`b9xcJWjmdH%YX$I%gq^B2_jJsL#3sRwo^+b&+JO@nBDX3~l+0oX

dX5XKP0}xToPLC`rArTp4huE=?>nyInW<5Y`6ehJ_%+UFFcXaY! zo5Ebaq%OwA2oGb`6gx9hj74_#aq%EOCn)9-wO9NPdhTMRPvUausFBaolkwY+zw(QM zK^I&<^iB+F}m zGCxdRBllTeV(krgAE(HFHU@cdWelz&c3I?6w9K<79bByf>7FL`QPgsISmiknS+-00 z{dZZS1P!&0=P6!g!S1QK(02i0_B@ZT$f|OIh)z+C6o(XH|Ls{854Qo6*Ax*PhDTlB zf*K4s5ZifMD$K^$3G&?~-vB;m_U-#WH;`TIePcon^I{hijxrfIkMu))^9^ptNs(5K zlT954P)W+Mb#IHh`oY>b6}Z)2p=#ncl|93f!+GOBNejkbem+53~rnv&=+z0UcDaOupQ_ z;x(r^;!EQr%Fw|CQiYwQV_i3klj5c|AtOySz5EB<#R8*j8O&?XhkW~0ek;*3iyXXD zn<~Jr#N9RY>9_yn{SiO%kI1H2a-xBkQg{lJOa<9r=CbqsY4)3adNA{aki(ibNT-0RaQ=D8y;~)|jc7&;{S>2lGT? z^8e^*Wrg=qeBu}7w0$Vj); zIiDn$Ky3xQNb_ytr(EGWt6385vizeU=o#yrrWcqFvhB?+6%QQwg}3-z7QDouWWM^p z$6t)*tuxTnM;$&0Og%IVafd^iQY4{T;Exg#DZJ7O^U<;4*+2eE>;Nk8uN=-5_^H3c zmUlb%`onX0$VDLWw@P`%nKJI)k>~Y0&X}rH7yyuYzG;ln=5psglLwEO;`u4dI~lWz6r4P z;nKvgHIR+q9E40~9Aj$Cc<6Z7+e~&BpDD?4iY>JfUu2YcO!iFBOIx(yxMiHB{QGtC zD$eKrmc@0$`G-ZKpo(>IM3j$oIQ1(OsUQx&`Y;I9c3CjrQ?yB?o_(&_1mV!528xit ztY3+sbJ*3sJiT`wSzrs4OpEj@p)S&Tr|Pqx?6Qd%6Y@~;0eQnq-#l{ag{*-a9O^@T z`1Nyg4R&K;${E$?r}ta$-GYfrTP$X>espF#wkG9ElLX@n+U=G}4PCsBoIl7phoJOb zK2%!g`fglNR%>ocU;BGELk2a&Xfi#CxZ)x9&`-ajeKtQb;-ftV9iuyN@5s`?W|m>m z7OWeB#(d;Xs=fnYv3n9z`@!B(&mGsZ^pwnL>vF#AHh6C#9Qpz-uwJpuKu)juK8{A} znqpoc+^3ml=Tqq&m%jkZUYoR`(^f|g;@-V1BRKx{GI9LRenQnh{}GTpcexjE&K^_H z*aWO`Dx8p;_ZXovuBXt;GtNbu!zeyGS_#2A#cu+d1D|Tj`p=wTp$Y--QXx@+}*xb*!oE1$0mZ?50Pm=2|Bx!`P$ zq}iQ@TM~NBeaEF6hMR0J+UHbo$eT7kJtynew`UB<*e7Zw=07Re6mE$lx{QmN6X!>M zEv6yKYaD(}tOwJoGRxz-k#2^4r}fqCemEYmeC~#L{QRZT1YhZ!VGm zAc0k&8HL;h10&}tGX?#Y0N&gmh*ORnrmp$qv;vXQ61^QLXcXewLy9Ouj?Ew-vv3ntL?3#>PMXwmrb zIsy74s3FoEv{eCV((+YkLUM!`>@Um^@u#-2d5h&QEIw?dejETc2QNkpHi@^AaQ-vU zQ-e~~T%upj{q>p5jb|e22Um}B6+Vdh_`hag7ZmrQ6Jw5FlBtLpD5H z1FL(J{UGIy{xUGtJC{8!(!x|nm}BCyg7i6dRO}U%s)av*F?|#r?KWq+a>;(i^T$<1 z`t&58WxpEZ_>_DO$w_-Poz_fm`qE(=%DHRi{i(Q}E$YRiDTePNYV!?zW{?*YPJ*b) z<)EVP=2UUZat&%?8!3uh77g&Y%U!Gced}j=yFkw#?**3UwZIWsoWm?ZE@s9?(#5d{|on93^54`KJi;P5s}VLQ*;Em3$AM8)qY ztHSNcT@Q4j^B7@8PY%D$vJPJmO7T8sH}m?8W+>F+g5jUU&^&MGoj2|kYs1LnhC}TKJ7d9vfz=X25bNf=3iaG!i z=e;klw)d38q}*k?$MR|%&mb|k(c9&{X_G}2b8Q#a$6{(>BlkgC^0yI*TO55WA9k&p z2D2|}K_fZI%#N`24Fp`%H3jn`%*hXTz#X-BM5o=DUM)2*JSXQF(AX8nQo^QZ039*I zeGy(ioFKD|EM<5*4i%{4%qYF{NbJ+Za6_4q(kImL9dlYv!@UjnGg2NGp2Z#*B-+P4 z{uT8^+|r@#k}@{HYHR$Yydsb2yMoRVo&wcH8op78!<@d2j20zT$fx zA0AuZDD&vI_Z9=teXCP$e$(tM%I39`aT)B+_@6WH>ul>3dA>7!D4kLmVoF|qauD1xv7u75jyyik`h_Vh=4+^k9- z^ZuOnE|wocczv;X)tz)(C6|G~lc03n=WGCjN0&KBr&dmvzV+QFHd2n-)|q_-ZFf!U zaQX60e1zw)W%^iS3;zYxfj%Di&f+zjc~?p4>gQr(g;vbf;46CjcW34pwpUYW7*;Rb z(<;1aG65eg<6N#HrV4a(HtVZ^{UMkMn#^d0WQJVH-uMn8TqCb>dGC-_f4^oVWP4S* zmQhzv{`LOvRQTFvxo3NIn#&*W_hApo#lb`VqH7Q*$0+E!mG7Jw|FtijCUbpn!Z@$XzU`Hv{Ut(r-;LrL5fzynF9iIk3T6`~zmpg=}$S!P6x2@aijV!J0 z@ni}~0-~PxOq_=#I*mq7Q;=J05>LB*(V{!|)H)UKPEa8;xyyZ^agE21ri5~JCMk%@ zrU^LbeF@_vgVq~g)1S8J&vZ0`G}%{Y@(Dvm%vZqDG_I+u+?4uK*qv*1bu}T9#(MW` z2IgqXVJoG|y3|6wDxrOaO5b3x%+oA-h zykwuPgkh=PDgS_?l|w^l+~9Mm_+nD94Y1j-^QssF?*S@|mTb8tFNJTOIgt9>KsmDMsaEs`t==BcV_O(dUwlt=*drPs}?ST3%a=m!eJ6n zQrB$?;Y}piT5D2ga+e`rWi%is0N?&$Z#GmWb210soLO23(4?lb&_ynG6+V<3d_B*m zn7!kU*;k{f14Qt}%uz)L01doNn?AX5TpOx`-EFO}@UVLA*a-gh#9&i-Bk!tp^fl9* zp5(2WRnC;g_?X33k~)E z7|MOmQQ)q_R8A94Y<6CA+n+?2yy{bqO|0GZ^d$ew0s4Qm&?r-3nj3vBDoMjW$vzFI zY4QmBSG1QsG5fX^o|LL`&qZ0!CMNPh%Q zJCgm1)>Kqc653YA{xN;$8HG0)o?m;)g>9T=N!Z8i&Ts7py@7r#Q{0({YJSA;2HX$X z>N&8#ACZ1lVQcw`=tA*PDtw;GvR4_qMsO+j;Z$5k>WBC17g!7HmKbc3GO}~|l!gdx zQ|6xSzL>irs|u~Fy?MrGev&k$&TeCi+T+NB8m!>U@%UxTTxz|oWNl5{Wwap|m}<_< z7)^8MXy;_%;tPzl*I{LR3WzkyHVTo$FKOX=qJXt1$Edw_woWNRF$$D3av!24tV-K1 z>HZ)>^godPwI5_kXIKODX)n{cWxBEaErj<+*)QYMmF&5$J z92xQkc}Y+`9z!?Bd4dwe2179CPiAzc7c>h%9mXLhk)@!r&yf}E3e^=M+3J`r-EoW* zjbFRnE$oskEm3?{CuY}5)&m{CTkt-D+A|NJ#;*)CrGh7ZeFZTO>!yK|g;$&gSOSEs zP+G@qxE*IP!7BDK)|s*AGu8HD*a>g4?g$3tMVL~iME1YreQU!C0T|SG;ug4C6xb{Q z%u8u&11hCNtrwHw{R_xf{SOgf#^PlU!&}?SynAwO*9^{NKsn~2a&5?25rVwvM>>Fa z_-9T$>MKdG8@+N%`!K(sBn=2^qL&ZIJA&Y7rgC$y;%aGwC+Zy)v8JQ86f<9PB~B>y zbKLvw1v=M&7R^L=O5~^|%Z@}6mSW0YGWjM7ipDz8Q`T$o1 z!CZ5T@@tO5ytepytrxRm<3Ls^pkBQ%Ibh9-I*LT=?ggF2cwR~U{=!LH-t!mR`1v4L z%w)H( z!C%R**b;d5w-EA=PgF}!bPcwlI>cC%6#t%RBu$|tmuiE$!pKMfXpA_V&)AJj32Fgj z61u9i|PY>`3t3Ug+~I zEEChLdJc;*LpZ_(P=zcRQ?WcAh=)%_b(5%r1rOFl?4O(o$4SA<0xqfSXt{-v&FnF5 zZ$l|k4?`Nur|q_vGy@*=y&hqtt_CkHrPBym-3>$(?HG1vjBh z+_7E>NSIZL0^()K0hW_aMSBDCFoDpS%`!xMhwbrR|c z!0|~@R@KiXJxe2&9Vd4WP1?hT zXBKN4R%#lT#86dEO_8m;T1iSk+eKWQaPK0uKiU>@tGKVi`-EjaEI99-70rz0J_|Xs zM>SiZ>ekGPflGdO(wZr5^TnCX)Dv?9fBX8$nNBvcq_Nli--bdqaksN@IOUJW-iAQ0 zoi0(&D%?I_IXkz#(rtF-#w|`91fHmHNrf0?U)`x+0nQQf4q6Q7VTJgT7`iXk^}GEp zkQx5paccoeX{)GYjhSQL(A1QP<$XVh7yOQ8;zGN2-oIJ8!fawJtjCCn>u&T!6eTSN zz9FPQFwt`GG-XNsKC5%mlWBb$k1>Dcziq!nXS|RSL_Dc$WP1=4#u99l~%TzREUVZgfktTyG3{h*_Yr7#G z8tTDJ{$0kDQ|QmFTJWj4FD8W*hb*SZbBOztT~oxl_%A8p=VBv#^frH*P7x*6Zeew8 za>UEqDL-AM%&be6avS(x(y`_a_B^YmOf)r)o})c}ivOK%r;HpChI!l3A&y>}Du3&C z&ZUD%>fbrgoA2jd z=uaHz_qXZK%&zU7yjitquIpziztm%LZM{UZeGauMF@1)_udrcBtU*JmXW1le5% zZ5{cyPLYGYID`C^I~6|u%R@X>+x&Zgz@=3}-r&*r{=SkS@YhyM*bea@yYWzmu3YK(xfKT@>v#S=LIH&b-fYI1^Onc# z`Kz~3Us(C0s%%1y(u57L&X^4~5lm0-Pc<;TTQSb?!cE!vsNCvoO5C;2@P3tO2+2&W zi_lFpgBJTLY@@jFPVw1wd8(Q<7$Ws3y;Y@-wT}+vbN*|P+}&023uN-ofdykRJss+d z^YPeHLWGygY)sJr)iu-ZNdly<{prNPa@*fR+=U=1(b{gYP11QBHw$<{|4=wmp#`^`mkvzE(oKla2#yjRDC!D1~G@DPy&5?d!l+AWW4+XUn>PE2? zrk5vnEGyI}lm*8y?2FvJ(d$s$9)AKXcpliJ?_`dEZgK@^X z0neG8Q6U8^KzSh~w2s7}l=KA3o+nN)hTF}ahUd&V#YA^ZM14Kkkn~Zlwc_7Rkz%D% z-f!x>-4CyveW>((rR6JYaSNWD5vNCf9A(L760dhB+k8JWpSOOcnPucGh$?7fCcjcM z5MFvSSVui*ZraIB@z1f?ADT{5m%2^aHODJXsmEQTF3C+ zmsZFn=1&Q!__$t@y?ksM7XYebI(}og>C%=8zB{1k$LdiRZ1=&zY>kCzM*t=oWm=eGS1OFqQ`j|lGYkkXqc2=a|Y7h#3w zJ*y;%00aR?s=9Jty#NbS@SUwwPs{hIw7@ZlRF`2erc~rJU0ae;mGK52EOvr%TT^Jr zY1UcKdjvBO0e}%w3I_&W*>smJL^G~Bfx0ICMeco7zqUYKF_h9g&6(I(KP4BnmT1Sv z8nqgo{pjB{umAB)al@m;v7>HJ$YI7yIir7^%yG9zBVRW(Z`scDsGYyT9d^xhfo!pR+Ci0yF7Khj<-V$nt+QNF~=RcJgrCz84FC zIFRHt6+ppCjrv)t_&Jhl%V%GaMl$j1uR*g=(ry1GvJ=x?8ao!_WZa<9%5CIMjJI+| zOoB+iPcB8Nt$I3%$BL0gjn_vI=|}2-=R-5zoa{}r2#HOrI&Dgh>du*BP6?^PfDdC~ zFv?KG(;T&Ju{&Zx3OXHXCyX(S#}$bN<@dl4$e_68EQ?WY0mU$nR zxk^lETAnGixTs~OkAB%l8ztWG1#^4*iw*c4?Q;=bP%v#B&RipD%<%>L#Xfp+ez(M? zDw8*ah})}upF<+9;O(k7#*;{VNN=J=P~duY=v7}`=Z$0n|*zwo+0OG z-l~vytF!rRk!y5oVy9m&3;9x>7ylpfJQU$I!)>kgu2~DO>K8&Arc#Db_>&sp5Y9c) z@_ON8+dBt)Ceyug_&7PVInCS)T}c|i-c{bc3dXj2 zG3$-p6k&X;iJ%TDoYru+YwCXhw<3jY8qai>n8X}FA=C2!#sI21zX5brvtvu|eUGqqZIJB+og5)1wRJxTB(q22+uRhle}G z-F;nvm7Oofj(O+~2B^psyftt`Qhcye3jdJ!+weyjyeP+o?dQzAF*K~L76n~dKGDqK?P59ds>WdLFfoWl@I056+A`|LHxG{6I)uhUl%y2CL8{=nH!f8E|N&Y>Il)kAt-oqGK=}f|IM8?al_XsDMfr zqS(a4_ZO`?y%#=ix?t8c?4*}g@<^tS(0rDBu?Lkp9~5U=*Va6?jc5ad%1b{$FdtyB z`oI3$VDIj!;ciw`G-&D8*{X#`4!irQ27Ga7KIJ=Q8GvNl1ySQS7?n1L{FeCy)G$G% zpnR=i?p?{&qHn@qsf~XOid{Qu>Zdrd_zOL4=0=VvTyo2EH5VX6`Fu0r!8HaiTK-lK7cvrS`l`DE3~?tI^sBY0=V?u9>oLc-ZMmr>;Qxd&QcQIcLQ z7N6ZO8tjv~!3;l-K5J9Lum=<=HN|w*O;OPLdXp#BOxvhsd(JnsD6K?rhw`d@vhG(k zOMGTAVTGoC;j+G!8hAPaKd9!17itJ$1PKe=z7~&XB2d{QrPa$mLy}S=A%eA2m_N{v z*cjy1LCRA#Qf5IpnkN8S=F$i&+iY)=VZX>1CQ(v9SvrW?)6dekOLEArqDc4pX!_>! z)k`W=1EaD-05|ot3zm>Lp9_CsN)fS1*zeJIHf7S^5B}9PESD<)r!dxn7##&@z+tyO zcdN7dtZ^%O?Eid?G0r`@^7&~5u}PYax{W*SuJ@W_ny?ryGKW9zx1|+cZ>PjWO=Lf! z$z{o8WkO;PgXM08tXY!p%$YRiG|awdZOcBN4Lu0A%J0t88IMZ(S4*zMRB<&zY6Wm? zKZO5TpUz(psY2?~>y?+w+uxCEOi;XX?iuB`u4b!A?E-0X%XnX#7n75V)-$2wvijmz|wezq}TLrIGdpRUnm?{g*nb>{I4G4|sWX(J8! zI{8pLSnIeE_ME4^Jx<#X{-YC5EK=9`*G80FIzi@dpX};6Q?N?KAfnW({Ug6gXy1J* zB_*Z!{l%RjC1>pFLwbtGJo@=-&LDc|CSiDL(bUZ zQIJX$Kn`BWf<4(RP9*o42cA}x&c-b@H+Mno_HcD#^ymw;JKvInvsj*l>ioA7TAKp6wZTsD6Ub#iP|OJy8I;bV z0J0XoN1Vq??I1u!fO@z13c!tT>ThD^0W&vTH9p6UC=&#wC=`a62QND=y`-jg=4!K? zXB&zUxz&{+e+S1O2ebrzgZf~Y(>2O^i+F6y?UYeW^t>@uUgd6gqrgsRNjSdLdUt5M zRQLZd_7*^GMqRsTaJN!|dx28iDV7#Wp=c?kxLa|z;>C*>En2)d6!!qd-6cVTI~3RR z((nJzo&Vf3ckWCEGMP7dcUJaZd+oKJ^*kuIPRcN}CI2*9oAQneoaP~?iSs8uzF@9G z#H43P-(cIGLi6*}F6imFzW$%8)?YFQNL%3^KH@))H@!YIaiPxVD(eSEga<$qsil&W z50>`YKA~Kmi$e!I5iS-F8r$B#Dh{u1V*cjr7acmhww2?owFJ~XJ2rdd?g*E9R~Ozq z!eFM!{-r`<-q73nG4*7$?nk}wZtCTv32St%(^zcXxQuQ#57qUEd!}-_ zg8?!GMWtf|T! zKkq+O|8MQRGzn{wA^3;>*iZl^74A>q|VQqbZN#p#`NU_ z9FcU^IOlzzh&^`$`kwuhjhVR$Q#Xry!rX*MOsjK|t*7^=rwkbg!wtpPhs)kT`RYC^ zrFta%yu}2=Mt$hNN3m~PIquHHQRQsIQ_!v*mh^bo=za#k_km*hAAu!N=VzFfqV@Pv z_#)x+1Jb=hmaxk!mV4Z>RWT>@#pB@%gD)Z%Y2Qhlo);kwGbq}d+k9+TF<~)QTuIqp zR%xCCFP9NZ5$Rgt#}RE6#)e;oB{+}B4ZHOtTztnhxsKo2efpb}*R}hX7=5I*DGZSm zTd)j{Z0Znn&U>!U<*542kt-)t3svXlBgTx_!fQhPg|g@3=7#X)rbDaMJ>~~#9dzl; zGgGevdDi}gaG!4{YJ<8E;+1h<7xW=Mus@Ol9%h6;*6jm#80`3g-LS2|{sDnn28rIC zY220KYpd+bYqj0b=M^2z{zPEEUm8b7v%SE9mJJdXCxPrCuGsvl;%wbXE|giNZ<2ZlK0N@EB^icBtxPLjM0$Naew_oyj`Tz2tgN1yt#6nJ>bkl_MgAR64>*a z3n%V z4UWmXmS*lJHUH)V@d=8caG!Fy9=gAG z*1XTY7mxS;_jz@zaMzpFXSEKGnlqY||4HbMea1Mr_Z?SxKJoo;>+juuUBPv$3JgPs8bvUsxb zNcQk5Nj6yLe1IWF%!iq1_KR4=K9`IAChf5C-p6>#nV)-^?9#Qr;nf^nvbnW!Fx=L2 zV#Q{6UB%4bg{Of*l_1x~Fb*6Sei5h|m$7E%(B5Y2p6GIQ*^)nl_9O1&@zx2OdT_N) z|27C9Sn%5HZ3fr$)y+em^lNi-w3gg`oWK&Yt-nBurMvcvICM%#*~q!M4j{s0sb9K# zZa>oLS2+NK@O;nQKamhFwSxTjU}Syn`gP0EV!bhl9xERLZwR_bfJ0PvFPw^2n8FS8 z4tD=*vL?=0aY-pkOEZ-=-gRW5ny5D2J^hbq;na@K4l6{c(T_4D*I~l-O7PU_p#%W$ z+?Z-Yl#K5pGOOKpX*qt@yrP+sZ*uyYIo6|iAh#s;l7|Ik`^=byv3b31#PB+XwsUze z)9bN&o}jY)GvzouUF7IbI-b{GM#VIEeh5wf$reBEMy((>IX_Qjj8D{gE7D|Vo|m8n zjBB73MQQh%ItJJ?LdD7F`&;!}23zWjqh@?fv8$41_=4KpH!?r283#e(>=0g*_v{Sa z`(wu;sNJ(z*?mqhk{`4&5!GK@){GGkk zGB-F^Zq?1bI+t5Ae)o4@cxihGed*pFC3;a;#_Ctw60Ir10I!0Z2&y1Z=~5Of(o|ql zg{E^ZA}5hr%`w+f+uEjo1&BiQ6LX19pkv%0JX7+rCo>EH-;Fb5Dw^GUq=Qks9Y*yT z(jxPuC7}keGD97wfv3U2>YLSTJ4ycD7<7AamNzVR7b!kI&pZidJq_ZT*8k-xdUtN+ zivaTNBW((Sn?w#xp<)i>N7`E(E<+3zqPIjNu-HBN0L?73i8;bCMUDqSLhs5v^n~^ zNQ@MdssiQEO=D|3;rnBi=JV)$LY;BhLmXN2n}#3Wg6eAn!=Q-$ImzrA=vqE=S(`U@ zlHSW;1CkM0poQ3zqCwIz@#K!NiVE0aSd9U-abw9ho@F<~Rwv%+k84EDl;oiSXsg~H z-lz3;5(c?Cd2`JAoMR?E`TDaB4NCXRuxD6z_O}b0kK@gAi3-!?CY_2&ZD^^lNc2XU z=r&!DgfE5BHddITH!s)48~y8V-V-kjfa%!~7Upfq0epAy=ndEi`Y zO5v)8`idN#WFQR*pB8zWA~nlTNk^xCC2uW`7BS?YrpX#EYUPfwQ(E3>%CE4{_FB@` zAN-s2H`v)#-wfU0_QaL4JE(1&Ls7?b_i0(919E51-CbRE=&>e{zrI`0#Iwwz4i64~ zi{XYhs8#}l&e617Z`k(0MeC+U2xJU)JN({saIQJ4jp z1KCdrk@9>I;jFkyphoNhZSlC2D)7wh6|zFo z*Ptg(*5-cYzSUtehegId)xJw?rrw0p{&PbD$=meS>BYvshQv+2V%BfSTwiTtm^1sE zFRFBs%qTBUdkP}HvvqSf6#9Q*^wE&Lm++e#2-rOX#idfzZ=LYK9WR)59PUuNnXf&| z+g|9Ly-<&DeP2vYoO0p#D88cUeJcA7fE=%0(^9qcFkB7y*B(?n~@WvgtS|zMoABwZ@8{d2FL}3OkzzMCfh5ZNbxNf&3vE2_e&3 zHq|g?Bu0aFnBYa4QO8xC(&J7204gqj>BH@9m=kQ=7w@_4yR5bg18QE)Rwk$J1N=X4 znoI!zgQZKr+lfa-<*`Z;O9ANVv2O}T5)g2n%COI0^DX$Yx8Cf*+*(ox9s~28?1^_k zX6!iwvF<=kje+~Umza2opPmFL_fxUqz_d;M<*Gb(xG(5vaj4UMW`mwnpBt#xmECSI zB*igM>7H~@bL`{=i8#`}T5&Y6LfSP0&*zdE#@+r%Q_9!5Zjt6UuP=%pvjE-L6|M}> z+ARlBT7K?Pr+Pt|{2XLffZ|+>Yv|PEU-)C41c2B(OSC@K*d`!Yz)qu;pNT};!3*fy zMz1}j>CR|j1c#++Z#%MN6t19xqak(t8TpJwvaxg++yPErb_tTO@P3y(YjtYSR@A>-*BM^mctU z1V$#31YW*eZ@ky@e#bN1%~tJ~RQ+y+vv8Yef#qjVOSYtbHuq9@TGyf-4w`%(8<1aM zIsAnHzSwF5rPWr+-PBjP=t|JA4OT_HH>S3TqQVFT>J`__cW!X~PC~+iARF&Ky8V(a zcntlD<#~kz0EB?lP`usTIEQz;JjP)zb94`Z0K7mY6j_uIBQ$c3Pp7{E{`aFXaztdL z@n+dVy~$x-a$1VyXdY1S9xGW%;(T|A=bkm1TvfjnQd<9>G-<;~>ctw4_dAvH0d@$D zor;<;3dn7nUL<994np zIs(HUNn-8cA7uoW8rYa$&<-){{FO|M;!u?|vXBfj}pWmBa_cz}$Yq~8I7+1X- zMRtW~9J8X!(U;OMH>B>kZ?z~^3ki(U;2R`eoT*HQDoq$Tyzr^-*>FXfV5^_3i!j5P zb%`jJ>{TI+@2aL$IOr237dqHc%Xf7Zs$Zb%G4x1HX^zx3(Q z2#JxJXf5J+Q`rV%zfy8^bV)bu1&A6A)ggi-$G_460%l<&-OnzNpQT>`WpS}%Cq~%) z8=SXXe7jh2oBoAe6wH>22906EpMUbXqmxq(b^l<$FFN}m z!D*5Wx?D^LS24nWwHdJEBCe(6D^a5FdG}26B!l+{b~UF13yvfRZNb9!8m#jv+ z&+DI)sb77+&P;usd}&6*k1TeR^w!~e=PTtL$$s3on*!edI*`z8wt7J9cklVL&_;{2 z2U7`a8VGA@6%l|UVF{9q8=Cv^HHDHb!D_*Y)-pn0AbE$kp^=nb&>wP-}4x1s;}JPXPB$oJ>}Ib@Z(+Ty&4oSzzL zbGZp01a`ITk|)JZiGb($;IG8eNQDGw>cnBs=d)0Jl%Fp%e2i?t>$TA7ln2^L^j`!y zf?{Fae_Z+N^!&4Qg^WJWL?*+XIGCAPcz9lhjaBmSrz%eFo~g0jBS9a2n^PeK)Q7aX z?&7MQD7Kpc8T06PSWSUG7LsE$w}|S;eA?`kVXZ1BK*M zsoB#HAtL!UO(%B%VX)&w@+03(^sLa3;{3SaD?q_J)^S2`K%6JF5&9!o1bt#(a0RGq>mJI4MK6{q$_2h=SI^p!+P+P?U?$GgZ028b?E6?ulO}Xd6$xdwe_jTC64v9-ih3a zD8Tz5M9vt-I?k+jcxv-ta&in0Yq88g=CCN=-!wn*Iwmb&MCzVL${&4WV@Z}?C<5*| zeX`=?Mbv?WhgKEds5t2t|>NT7HKfqbl)+ovPb3!Sud{k zJ!!zXW(9uUpavvk4rYSDAFC8xa3(Y{b!TF49S5#&w=EA|{=+TbZ;*bkW=7KDuHJbc zSkqQUImb%m&6Y+}g%dh|Y4aVg1(f0Q$le-4q$^wcbw{IPJRO5e61m~X%rF;)94u=Z zNzbCF;R-N{r$3(Gk9PsIx~iQska4KG#59x`NFbkrAydj`DWyq(DXBN9GF`mBH09;dh$6`F>=oy#1HDDX_{RmpS zfd(P<`Up2dGY~S=eQ3G-fPBP83IgZ{0HWXjtiusJtU;zOEYAQa(W$%3kH7SlKkSm( zul}WOFoHV-^u+KEUNS{;WcCC?kFbO|c+OAzs!}HNeS&xt{!+8pm?_V%sm&^@SeH9a zxH=rXWofN;VkN_g7$H}v*-ic>z{4*XX#d;4>lmbRA=0*rxeMLqKv!PU>P+(e9aes8 zf)d?Q6Fi|E(FAXVd8W0NbCr#oLWwv1!^lk_svnx{;a#u_qBi}Y{SLT=r|aY@hTCkb z&X=V^q=J0^8Lr=#a10Kb1`E@!?g>@s%#tk;T5+AP<2>5!RV5}D%}Xci=zmB+FwQ2nA0$=ZjrA~lD|M5x=fOaRRrCxY?<8@dyllfb35 zu5bQ=3|thMgW%JvbtA+4QQ2XaW?#p9gxA}sog6>)j$Z6c8Y}iuOgv^00qbumzm#$5 zQ*5*O^kh8UO&f$5{^p>U>(VUy1B_RB(L%>((LlyeQD9J^Y>#)55yn1`S9VT=Vo=_O ztFbMLaX#A$M;z}x;}&|diDv~K(Xlx;hnOAjk$_3=3y1C9MUST3q&28^?t~L8lj$%; za8_jgwQ-f;vk3W8v@e%C&fFI|&sgTU6V#oTnxO*%)ma46eK*dMt1euI_I zm8l%+B>d9ha|XiGVEv|Gh9^KQBQ~f$bcQWh8y!&!ic_IhZ6~x;mBQ?%&`2v;xsA`9 zb2T);v-jy=hPHyDmo$5T1@UJSG6wCmD@QEn2@FO)(Wgkxn%^`x>|H*pWw5eq*sAa) z4)3Xg1B*gEvhcU>BmY=&_vsfgf+ZsdM`CsL?t3;5iGU#b@DRJ2%e@wGxt=uwV<_I< zlHzge*7r0I49YsOk=?$#f2XN87)-A0M?m=4QmQG<`Lk0SC`SA!K7=?L6S~j{VNN^0 z&|VJAIK}_j?81=%j&wJeHr|h`lKF?2d!n9uRj?98^AlG3_->U zVkmq$UH30j>*8G1*edI9#f+O+4AF(=KDiE z5VXsVqWKWx?yzJ5bidX)_KlNVEwOvjW0`RYb2Kz_p_)NS2}R#af6}Gfq6%+M!l+{Z zDYqa7Dp7?nYT6If#aRmnSEDltn4FvB8<}q99|N>^Vd75kM|#{-(!0&~fVWr%4rYxr zT7OCE9}qgHh&KS#J-&zs{Az;icK(zt5BCPw@qxc>^4LuuWPJw0 zSOczYe5)hbB;%HvuHC~^M>&|Rj}$>U0O14QTNj;waA96mjT+DtS5WaTspSLP@BX7OopF}==n<8 z))M4~JJnrln&Gi8e=b)3axuz$4W$68sUj``K|}_w9m{*w?P2>!N_IS3lm5Rxc`uRH zyi~iRuvrFLeMhaGI>S^Bn)ldiBo)uaI>e|i-hN3~GaOiz*D-oY>eth-@>WqR1h0V0e zfI>p;`VaiFYlO##OD947qrZIjIuRErAE>J@H>sI!yD!!1GIXW~!hPO}){>IkmsADK z5Uf`+PSJ?i z?$6ovKc_x-RiUzsAjg?=yftSJ1_KZOtRUx&pIqOL@&nsIP5MHlmi}u1`Q|!MY6xjP(EAH4nU^N0}+`W8@2@0zH|!5Bxd2 zTeTKX2W9n#d$<(zmdxI^fB6&~KrD)je~4voqU_gs-~#+*Y(k-eku1;R2Owa0N;Z7L zRec7w@Sg(l9QPdNVjun=*ge`&Hz(|D(YD3FTB4Opr|#JP|Av12Puqc03X)!Z$7&UA z#8H12*f6Q-hEvd=l(Xux%Co8qQfT-8hnnRb4u@^q6|CH~0jdpbJ=+4s_FH++zm2D3 zuBauJ+|)wsPi?jho|NbUoPXpXMt=vnmx->tuXKbKp-hZtt^2e9-{=BIbb+=lj)8+y z%-(M9a(q$Ib)5Vd+Ez=vsw(yJ_C` zg)oSDRduy5fX!L_ffy`{8tSC3;kC&buKeI0h+$jzS>Obq?9H(Co~W(HBXo2d_$PR# z`bc>Q2%P{PP=)4QTrHC1fvtzwf+Gf|3^-+|?mOYX#r}^Z%__R4`%O)YPGSr`w@2zK zfmpEt60SNY2z;hN1Y2hg7|1omz2DT$d6jvtTqhU=V3~lr+%F44eiYu~=)qM+X(6jS z1+iVT%{;!p3`?%%p?}6*1qs5cUagtmHk_IK@OX#zM|c<&eJINvdvFccev^2Pa*fOF z;nZjeC_1Wd7f?!H6|K?eYNTUW2iDYctGyYkFSeEv0xFDs3#FOaUM;r&mFB@0>uUpXa=$D!yTlqvJKce&ZAS zbRR|BIl{Lc6Fx{L3o05(u`x!UtRDXB4da498`?o8u!iGP ze}9L^+cxBpVf87`2QzKfo_nk5K9&X+e?B&^PN!wL)?>VO-AQ*|enC$i=$gYGTu!Rc_jay*$g5n#1hE>g$sW?n6%|BHoRKVG3kskGUT-r3?y7Hf4(aqs#G7$Z#z%(^Dgc$+khhM18$DJm^kp{ z8GzFocP9%1KCJ)$`(^n)YE<-^bD0mXA04vLKXm(kSaypaJw|Y@jdu`Q*{>(C&!Q5a z&s!I{rhoh7asU}BuaEn0jScIp{A>cx@0VmQH|iSlP{WUwlg4}wAeA{=d4Ho6rIL#< z#FrrNQV(thZPy1_V~cfN}~XYC>D7t-H*)=z39 zGA3k?%jwXqKmDwM9@*nD8lQBni$+tzkG#^H3U63yllDC!5{XTF;)#n~^4+Qc#;ntc zg?B)=5wFsw2HS75Tcs5brP0^%u=0XqXz03c9bAiKBIeLx4H(4w>i%W5++W|?be+r_ z87e>mI#%;w+=j3laea`HdKNdpVU5!3z8W~^lY6YO0HED$DZk$`0Ulb+jiCyj2cwJV zIa?5Sg9}!Cp?z!+S9|jxcY{9LQ&VHqs%H=1^;=UX$7ZyYE6hclQr-dkMrC1QCHh1H3J0q&W!w=gm0%F5(FNu0W{E6vPC z9K*Rc?e;|Jq>#RA4P-&d&Af+|{05Fp&$_63o9%`BzMcxW(ct(}`L+{uQsMe-greXm zRast_Ajlp~Xp}*RU37u;0FE29a04{!KnWU@?TRe7e@$V%cOKbrFLa?9&=}N1K}sW~<$kY-3Gq z;zm4tO^jfv)BTZWMcVyuAm_@Q%4Xa1L673oWYo$Y3J*QTe&Ju!RcHR4X}1~w&10KN z!O)U-he#U1Oc+tsLjVFZ%O{w^M1)Bdl=EZ8(44@*Ux@s3X zF)bG$?-5ndg=mt?26)4Nu>@$oR^W&@#F(=aaWSae^M9&Vm-XLW^>-15K87HwxlgN0 zjKNw^)T>NvEVUj$RZjt+UHm^f%>M&FJ>|>)s^7EBWv}?SPzgGIWIa~Z92N>LW>DV1 z8d^VTfT%gKITuTWA&TPg~fief1JKxC)$(wx#uA6Y(f1&BPa{FdW@GL+tO;Dz> zgCy=j1gpji>n*Wu)k%7b^4>YKEPtJsbyA<=cw`JVb{&1D)~-v^zn5>=``J!XzFv1D zSun<9;%f5ndB$JKJ7`JURFi7HQshS5gAkD8Knd`_BP^M6`tWZf_O%9h^;L&;`89D2 z>m0mGD?l4;I$tQ~R#}i>KZNS5+4tb7I$FHD#}a-`(G>iFFG_fz87(=O$Z=fDSJ^rn z3G=rkvRBD_12?ALh_|-EbRXKB$#2NXbgJ#1GHYnPJ{&GUjwm0$JDA2XcGB4<2*;y~ z&z#CW{ejS~AE&L&(3`(dQHj4N?$Gh>43NRt5Pt0EXZ!~boQ_{n@@@{E7oM5fzn3imMUSk^_|bD}_8!G2pE6ZIUn|Xn!ldC}^~Dh%4^ zK?Wlnr8POU@C?kF=MN9$$TInUk=)t>Rl;D68?1lwI4%ES#9%d?Yz(!sQfx9pvae*k zL7Qmt_?f<~l0myvduudlS#|j8e1%r}@s}c=P}ER&iTmbcsx_njXBCR$Yq3IDrh$nQ z5j9hJm~wn+7{a!kzlpLZQrq#2GO+M(aTT!Zsa-C!dLxcr=zxTqG5NK1)Bv_DERIE= zt^h72N8QKy&+(rHj`e;Y3tHTJTrvMN_LB;d{cfqB=Jk3e`A!aI?c(s1sl&x7;dG}O zv>REyRc}9&iI{A!#Ss^wdXPnzsJEkiTUo$DIah89t(0C7+Ov;Hc{HQ)^qFt}d#R$citJKE}V!4YqG2SAw@cR#o%r zK8j8}qRT03)#B8}M z12GIx1p)@j$a~iE%|1lPn{PMFd7vd-_H*Ij9QGIwAbOa0>QAn*CU;xdI3tEoF2uke zQ-~T=u(I7DCFtfD8%nvP%q&8s!x-MM!bNWx66tNCnv`wlj?DGPo~Uu{f15yxk#%(v z@SCyxa%E`LI3grW21Qyuh>U9JNw@PCsI+aA+k4F6VxYSzlyy*_tM>|Uaozn9!^Az; z6_29!TrHqBH@jRnySbY4@8ESrlYmT&)bJ%CRlyf)O?wE-nhlX_XuuC*o&@ANS6YXE z%Sb7xOZvrOB}itcPnHEtv#J1FKU2_~JmM{jN_?jtFM{4_^nSEX-0A)3sJL%9-ehen{7ney=-{M6F z*5z}~He2MO6|LH>BsOH;mfqpJ65n1~5#ow5T>H5xMEz+$52|-__cezyTC&y^O4bb1 zkl=g$Amrr^LPE~t;-b^U=b&Mwrw{q(-wOXu9q<(ae)R)id(>{*#m2&a7Gwy`0iX)y zdRv6|b#!m}M9Zm}_)`&#<0%|vR26poiU06&$uMOC{>xO4 zPIcq@G4Vy(MRd`3xq>OpollqE5L(>?AGHq`HtC5Ul`}zi(+>y3^s`1nOlMIU81|bU z=r&>WQ^g6g7Jyt3vN5dVK+MZAnML>0{F1vv00gscX5}c*pHYZ8md4U|L~a4~z$4I3 zj&Rzd!xX)>QOrlGKiF?;c`4e5CpDg})chmTP<(Mx# zTB;Rn&Fw+&<^x5cE5s)wrryHlWH6fUSV~p0AGoi_k|~!3wzt)_*Q8oe!N`RpOqkEH zJCEEkW$fgPUaS)<=xBa{ z&d<=#wVIQHk%%Q_n?`$dC3_Pa&f6P4se&hFjGAY2t z3%^vj89~#ZXFAHGRL8%2=ZQDW>$ST%|KR1jV~ar=ava5}-iCORG!6t8E<3~Vs9%Sp2?+qzddOBh^JR(NBokMGhG0-iz>G3n@ z73TK)G^Z}dMTR6S`oQW(^T%))JAl#mqIA+%s~b)(7mN0XaAugw}yy0i<=8= zmxdy({QBCif&z1usXY8lg;A!syzKIhs!<{gWrn%pP=g9~@rH-rjPA*9GyQ~A6|S*r z`Z;^;h#7~^_was_-y}sM@8<*E0#79$Zt;o6ZKy)+3a0$9=yS!lx)KO~n|>sd(q1gJ z#L}#EBzPrL(o87JG6((;J1w$n3i)9?Y9Uc3Fz43D=xaL95DQCtu;OG^@BQ>czWPBx zAU25&nDFh;vX{4`+lCEFnDizHkzIut!f|yfGq*jAX0}8;D-skhoP>nv?WefV$guTI z0l0NovuJff$s!cKQ4-NK<3$OL}bo$$6&o&zIaX^er4U(Upu%gJh2{gH2srjiXbTt?z zV#kX*eHmiUoD?*$gIqpf%N_?Mo4})RaIOvuY}#Cm&q257D&A z%c&er!KjF9O|s3J-wLPk%fjw_!F!3vW<5+HW|H8@<3?h;qPu4EaQ*>S+(1AjB+C-$ z%*<@R-#(H_6So^kC@C0>{7SNS+K?yQexG3#?Iw-*p%MExF z?wBWzU5#un!y1TYjfuIUN43IPyJjbf2RkzcG{c$*oKQQ~hdrpD1(+I#>dNB5Ha zPUuHpu9pI#h+qAA0*e&CgOA$h*}M{dWfC_}N^jy#wT!VwK_3_sxt&(^N>Ua=@f|1q z84Fc>r5DtE-fuy=lZV`IcQr+4k@qhal6-8j$x9}bikoHgT1^~*QDnOGyzCj2S9P0< zC$J&Q;<+gBTDbz|_A{O3X>WZAQTiERnwQZ2Gj(Tr)B%!)Gb%P)iRAklH1>ifJ=f&05!!!BOjT4#ywBYQH{fXGNvYRh@e;A_CZ?pjfS85=yjCN7$RCv3Ux-vU&b3gK|8@ z4$l?(=a5I%*bDX&d}kLgjzU17Pz1Zt)Wo6T@i`;2r(w!hiex zGkCWrTFAm&xE~1m(sSJdv>CoY(rZmnI4bg)K4u%qeYdK6l9u<^Yu91&Hk8%a-1OAZogPzTJMDSdFyigq zv=~?J;91jywZ(K*{6tbDI$I_m(Vp)?vY2yE^5ozZO$Mw1zZn4~bH|3)weZ!C z*+bdbouP}Gkn3;wUK>i3rc#R&-q?E4zllXrm1-)?5n1|bN%3hgU1_dr%;H!v zwYmy=#V+&H+EqPf)B?x<^0&is&c#c2+j76T;FqU@Qub<^@@NuH1pyPpMm0Y^qO|*w zy5fkrH%*}3nQF5no&`vhWBgpo86dd&Div^txNTh09>l@Eqx<#4>rEMZH~G&K2{N~Z z*Znm#e&GS47f;}IleJ!&UOJ&0NY8Q+tW=HJ$8~)!fKRmg=M&{I6@{!rT)M1AaY%?Y zHg4M})sYuT4&cs1AN*v>oQ|?)=G;${1Loo>(M9yEwohLkndQ+$Bw~v>rv_lyxqTuL zzH(Gqu5uH;+`&`Ye^}(oemNO@gHGyZ&vig((6*V0z!Y{xINXn}bR%xLD`~LT5q{m| zlGrl+qudlc6?{b9E6%t=1S}3C?&#$iRDZmQQ9^%t$3tx^i)e6nkdw5?bO=0?7V&9l zaRcuJ=|0mFN;aYzyHAJ{NX$0S>R{66$~#|LJf+W7r*_}kLDUqsIGV6YpqrT?YgPnlXiOL_|zJYw<{ew|utGSLl1m)qxFTlFlgRVpi zI{X*cbC?%*g!|Vx;Tu)7GJMSD3hm_%ST4$v=rD9BpY77Y9e*dqL|7Ot8TN&;}S zgls*9C=QnD#V>-%O76OgzFY#K+~vvoRh|g(cks6jzZq{?zd9f4baaBY#FhR)oqav} zpqy1I<{JFTqDesAzG)yS#P zpLQY#37it{sSP6O-5C1+X}+M(dlxgqe~RW46bB8vJjP{;A8YnPzeY2c>NQ5 z`_J|^H3k-2_THiu_Myn3qw^G_i*KGH#%MH(8awB(4H&rtg^u85g|!_%EeyWv2MLpE zflu+#>55p%uy$wDF%h$`sOlnf?Y$TRX6z_CP~uSPZJq@&{vdPx(@BPF5o&?W-&(tb zCMlR|qq$Udf#4d$;RZH+Pz#J$vNmw&N8Kk&H(?vCip%j8rLHH>UB|@yM9j%1n{GIk z*VB>p8^`ic6qVE^PI%KT{F zCZOK;IbO{g*BZU$imtaxOm~8x)!%Cme?#vknp1wU#5R3X!tVJmzC7qUkmOhM8ec>} zza8)~sF=A{$F9TYrI40~FUQ3vSMKn6(F4`cIt{5kpAhMY682E%SWwZ@LQ!DoN6^~W zaKY~#S`DsUesOJRe!@5YwKwrOS;VV zxSY-*L%-jbpzK;7Y4@Zs$2AOM%zc4bOa=VUei3SkE%^;Of$vwuYfX9ugH6RFNHP}g zzD-))$U+&Z=02@QuiahuY8KS5_xbbtOK776mYNNQem#H4*Qu}FQs5QAqsBHTgX*jG z?Yv#GR)<{?idP|6tW?01e1#==3MNA4rpCx;jH$CWDQ0$5qeQa{tz73SVOWZdaV1yGNH_!T!)ot32IALjCw<^jK#u#Y@)=<;6BZ=aMb3eq7|J^M-xMZ%*l@<(-Z zK^v8VbS0$bII3mu2k-3CaC;<{G)JXXui))%bi1gn!+VV;%h@tWVvDNOI>z)v?z#ew zej|T$QI&tUREiO@`~H$JCW+t^HjjPtG1rAcMk?inn$unBy3bRGi(fukV=xW8ZFh_j z;#gA@qGC@Gyt9p&u2LyVUayQJ*wc)0BLcpf=7qBwec)+)dtK`$7-Sc-n_hzK4|7I2 zV3!YfK(`)o9vQIYOzmPR1JlPJU~wcdka4FX>&-ISJElS>>Ffm5fwIOm77aIjyMYKW8lHcbl~&(kRXm|*!X?9aMEU}Na8o$zS? zNz`j1XnEJU`sAoD;UUOodiqpk|1y>18ajamU$CV_?<`GaPZ1S2`Fq z`PKB_)@GxO?2}yf7Tf3y@q13*caTecuzP)p@k*x>-f(A$UjuVePwHqGA|nc9@luW7hRoVOKLY8-h&YI;?Z-q;bVP{JYQtNj{* zYa!Knu7Mdc!2=gd`=g5>Z~o#4Du+n~Hg%8JyV6C8hhe^DKwnM4mRivUn|(ik)?Z3s zzK(p10}TKxo~sKGPkn z*$*&1Ld=+`&zPD%n2!2#vFHBm!NTcPBJ6>9hM^TSt6-(v`N z<=;-Va3g788(1EA`I=NNgkeURC1%h%O<5z|%oNa>Ega9{M*cm&0&|_y!*fuaP^Qan z_n{6ENas!HblqAPfx~JFVveT%8{$^6u=}p=56>S|*`QkYIB+NRpV~s!(UFEoT zDZ>_KDiHLVAo8<^tG⋙$~NGvI0cC)Zv0*=2=!}$MNAI0S-Pq6Db=_$}eC%IJl{M zBDBtk<_&U)5dMr!kLeF`Y7s33Rr_7pqVf)i`(M1b0%$1oc%leHD#}nq)Ywy+WU4zK}F9(afMJvh%zUgv91S@jUrn3vHg zK6nD2;Y%MR(SgAlL7!C|YE_DN{x0PAT&veg$I!p<wEsD%?RFD**+!^boPh2J&fg^?8gAhZd@_Z_@7o2=R*Au0b)(cC|0hWF_U%zm8CV%@*Gq8WVpTOB}tUe#yvL^2# zBS%SbK&7bCq!p}ar+)WZh0J(Zx!)9s&IfXo34sg)eVVPvp=E`EX#H}%Q@ntaMV0`f z|3w0JhR@EskHzvA!th`2Vw_2q5g+hpAg6V+50?ed{k(^^TwlCkXJ8#QG}{_=yL#?k zb-GK7|0%Yc@x+*}YcaMIM_bS8{GiW>p_SWjhO=Z_ zT5~nq5j~RcbDGp9Rbz*tds0j`U5O(QVGyak)^M>4LGIZ~lR_LIeks!ExArcPnlDNc zWDg~AZkG;`SFn(`Q|GX5J-z2B9(gN$A<(0u=Y7PtiLoXEY`uA7uxEhDslII8-egs{ z4V>9JxIGV?*(GYg1(Ah1!W+zuvqA}xeUHt3zZPVEL5_=q9pxOlvwZK{XgL#HC8oU* z@>)gECJcnHjpqQXSSUM&D$y-gqKmw9!Pw&2=``*G8t8A;%a0EhG)Ktn^jvF58)f~g z9BV|gXznbljzxrUeiI|rcxp(3=>fAPIeM$C}?_c-*&T?;0?%zd9_T0xe{n-44 zLkxk7jq-Fj%N^ngnRV)vxNxQcb2b7-yE za9+~yZk_6~$`o^@2JKMT*R!T({d_Y-nC2>)g1z3*iRGIWb`;(p9=G0_(uv^=v`On=_aje^pPJ)$yhsz7c23GAc`_=gy2cIQH z!n6&|9XA%~PJ@L&)NGZB5j|)!Vg{p{(WM0c}23 zpPI^oQcN^AG6$g^`D=^zsU0VfEeIOUyoIt(;Pb( z47T4(v7$tY#Y_3|qdQJ{vTfM0-k#fi8}-A)Cbv*CapGdaZhZ_Gh!cdrht-Mo7J2dF zn9sW^F9vxM`Rf@GE{?vpYx7EL+o#oDDw_!c#lx}W`$x)JFC?5Kwj8nb^mNA%q?j=hR+FO8X{DHpV545Qy z1Wpu>8=0Hf*M%_db7scbq;E#*3}5$;37NC$Bi`cT2I1k7QG5sv2Hu^`%KbENn)oAv zDuA9^hS!1WT~DlFZGNOHUl32)U?N4vm+>ZRgU?&e_c;^3XZP|a5`d!!=a6s$ssuw7 z{)-Ojt8=&Goe`YY_B7Bc^O65W(p5%9)o$UTYe?x11*E%k0O1ovy1Tn2rBfP_ZUrSI zB?cL~LqNJ2y1V-xylc(kH*f8DVxPWO9}nz}yLsPBzDg5(NnqrMTQQWP4!uYY3|Vm> z3I4DPZ4}shoN5XHqPigXmEIFCp40_|XtEUP*J}Q*;eZ^)#@9Ht_BZ{lmz*~}2WIWf zU+(RXS~k>dX@&v%_3L7Pa=h;N>CdyOpIA@L?8d+rtYasE^R6q$*|i+4AH8vQ_r%)F2N5 z6a!1J^FKH=^BX7;C~2hY*k4LMkhBrR1RL~=Q0xY~oUEcWBCiNt|9HB(VkU|?t@wZy z4%ypPE!I>Nx-1IpD3F%`cb5({`F$QBd($BjZ@CI}=|L|V%_{aFljHFfP2w1>6a>tj*-bebmcgkXxr#_osd0c zz4DZ^m|5SevGJcr68BHJjtBS50L}Al`vn(}>e2sB{t#(0ek3m)C*k(G)b$|&NmQSHr7E)|-J|Il3~ReC`SC9`m6UXg6F21)xWvkOP%8Jj zTz%d5?#pqsai-a7FQuT;g)%d3yhP4M^U{#wx>j^wUGVNGwy4-Z5roA%B> zoLn%?aa$CX7&9I*6dV}NOmEWbUZ@S^=9bj&?ogV<4&8dUzRA8$ybv^k9k^IkmF~q^ zw^Se}c+5KG`$^ALD|;s$z&JwhQRsrAP~ujzrAk*O0+j`>cSjH z0x>`?hZM$N7&F~45DfaukA}uvDJ_0sxixN{;RfK44Cf^e?wOHN9KFUL9$D!bdEoby zJi~)F*!(AmS>^=ydikRBh$9 zu7E4F7*pshrWI$$N%dX3RMh7CeWs8Tn%zrDHlU)h0?=K&8h%oJW05I@%a&uysv1$S z*WoFdPSMYg=pguN$L(WGks7{PhN&$C{eCd=(RS8cOL6)`74b`YCG`D(%QNT>Z{R1J zdY_|i2aI!qKG2uijm81fnUL9^c2u*f8?WBv2LO4Sm> zwz7=7ADfBvf zo>yK@?wFLw)LF4e2D|zVc6h{1oMOm`^WZ58fmpIKsmrA>37n zs`F3he+#0y?c!^;76P`f#&*NI-T&j+yt+1XN|0#PHkNU2>mk4LL!Yt>UTZcoEA~{L ze;I?J&bEsktrw(?durD#mIX#go0lu!D%RdwYT1n;x*1HQP8`*ANGK1C#u_suL?O4{ zeLwJwcg8V;UWPfu#g!*jTV9Gkz2f?H5_ZxNlYnUE>P&XUL&?JyDEDUsB@m6L~d$G`ICyI`8hX3}J!|Xe{HQ=s3enME_&g}RQ>bn2# z4Hq$Y;^-uQwKj1Me zvl(a^W{!@lgaaS`%jZP;7B0mS-EV_VCL<$j=r_{qiI*z1kp1CWNMkOxE?FHO=BoNE6LQL`f8H9W^hI&57jk?1;v=6?vDks#3=VbAsA8X|-8G zzU~s%n}qYt1eRIXs>$&Jf?o2RF>3f_uPz#AuJ*UuGuhqfkcTB6@(oZxLnKsRvcQ)5 z@r(QRY%6Ot4}GObokiR1I_QNNp2H}8pfHViAWn8>kUpPO8Pi^j=gW&fSnD3)YnlwU zv0?h6*?QkyBce)a&246gu~t;(uy3BbpuKwR)oWSQ=jZxIiV`SvV#7XkO>x)yu%>G( zq1!@XG-EDP(BnuN%v%}%hqhj-u^lwgp=K&DK9Y=7&IgWB8G*=R4sm4Y85Z;OR>Y(xvt9FQtTb1P9D-xbWU zA9*Q*{MsWepfVw*v=wyt!v5T2S^TeCm(_ukLGQ!HqSngE(TU4!(r|ZoRvb$s_MU}& zN1=U=IoAdK+O_&5!b?VaqtRE(_GiV}E2*Y}AF*EV1=?@K0$>F9yLOwdwiXLQLF`wq z%Uz(uJjyoz7i0^5O$YoJi>JEDFa z*4F!Uz(ApgucJE8E5@QR5W;!JtN&>rgXFr~bwmdhSLt`~vJ>aAeK%!5c~^Q9&+*A8 z1j@I)CO`3z?Xu|{ZDy#kR)9+kdkWy05cNg5=lS@&L zApxz3jmEH!u2mMuYj+~zKDs`(bhqaP;_s1Wz86SPcT3Gqt9*o^`Cai#OxG>tsmxEuxG{`XAAcF$I(=5i~yu~;X6{< zY2=`cMn#1@e`O9%x053Izdy~*>)BNF_Da9m3*CQsiv=?9E>|~KUiN&W1BZXukBW5Vd(nIF?KRA5 zp4O<K5xB_Ua1Wq#-_`Q8T%b=$j)^px+Il+kb!PuNuZZKhutq#hRu&mz zMiq0G_~+LG-odRq3~FuMSqBROpUsCiUCWm@b4d|8>WpwYg%)1PYN8}p!9 z7}1X3)lHju-G;4jWJ(0H^-w*E-_gLhn4f9ie>5VC;TTdP78+XK^+c)f43WD#)Yt8V zo-Q7wJn~b*<+w9uk^ll>G}^x z46`3STW-!6!p;rQAS=Ez4c^Ah9N}S)d+ekvxSJcC^0#4ndvOOG`5yBMn7vG>$M1j` zdE|{c?H5>h5-MGe`=l_gNf@R{B>7LU@o!1=Azt#hZ7U_bQ3k@SX=xstJ0pTrEFS}w zB7V6=po2{I(AO)J-}e`3n*a1?;ysyF4DzcuxI8}FJZ^fQ*~wW^Vn}nHbm1C}Go%>D ziPqj`<$BD7jpU?v#$iNCYVa=IRm$Q(&K>9T@XC6cx0`Ifp@L)R;DOYk^xP;1%$Js} zK9{w-!4u@Z%G3;lcgV3zFQxNH2n!7o=|&5}F%CQ&3^g zMd38s9v{UTS~M|{>POhWcx*}ZbWhz!U?#3ZP(Yx8Z5dPu*2cBr!RO2|P|p-6uoBr4 zxEp#+G}O89u~(u5zg$l)(bOH1E3Vlv#slfb)p-}w_s9H4%&Md4nxhZT*xD`CKiy2b z(B3sh+8fShu=!=Qfl(?RVx`#S>qamF{G3G_2}i!e%~r17ks-@4aj=X&^IXRvMah)7 z>4j<6>thDR;%pn9d+ih7L!Sa;_O26Or~VjGXp96@F!KU2RC~<`8wIVS5LBf;t2l(f z97g!~R@&&Hrt8?G_0Pnd9NZf<-I$#X^BaaPcF%&*AprCq%8~*Vg3HH$^+ea=64xq# zK%zoKJTei@)VZSag7D7UL-8Pp+f-3_HLoc10Sx<-o&L;Km-W=d-H4$7p zmO$y$K!P9_R}__~SN{glpW>B2D}+V#wq3P{y4$7pTme@rk;4Z`KpQYN z@~;BJh!{RZ5OO`FP;rluflXEou_#NP*fxUYLDt&By%RZJiwtR6e44fDbdXm5|nWaCn?2oRSTvckqW(GH5iPn!u7LrJ6 z->b)6J*?&fDk;(g@>+Nwz*B1mNOKSXx|VNVL;x-!bUxbX9MU1lI4v4%rw_d|Z$&fe z8)(vcr}|;xqPZ3M6`^x6)3eM?Q0wE#pAEO`jjKmQmXz+5KDDJ|N9V3G<6oDrodb4A z0&vdG+}#>2ABi29{ z6{P14h0SQRIqM5eBVX?!XOfg%1a|DSPTodHBZ9Dk_KW#|tm92{tlH4Iy1;zIu%|ur z>%zk&#xgT3gkD!TdT_KYgKoi72t-S}ivu&4+}mb!5Dv9=gPomqj#n-aC|kJVugjUjI*!)$en=jur=g=K-Sl~YIXrS|%*)_6Il(ZyfMAhm)1rYR-C z4(Q(95?b*a8Go@7_$C!=-bdK|Sj=_vvQMjl(O9*8MR&sewkR9*zEeg?i>EzUl{qm_%brH`%wlS4)k9Zw5L&|-H zl{kuRHlllWoDX(hXQAH;5!d{`1-+#S7x}}w`~h*8pzM1jJ!S`f(WSxNNYj1x^nCxS zN|h}IhS45n&j@yCT*bY}BJODl3&)7HtF1)L;F+#Q>7wMvTyE5N<9NItU0^EHPAMJ& zh?oU!;hsX62lK}b$%zlq(FZ0icBV^M$nM|6zuJ!5c!C7FHdG8|@6JD%qddzh&M9su z6ai{Gl13~r$7Fa-oLU|W%}dhXl)ehy)S`_z_htn^S$H;1pDNz(( zI~B;IJIBk7dzuL2IP@OmyrvAUu`FiCmaJahEsoXGg^twbHx4M9 zb+0=|lRmvkj1I%cicE}$Hlug`My>1YltyTCqf|7XoM^;P(pr2Ct+C1!pESg`+7}*9P6$aAv2GS)!9L{i3*$Q`e zikcQ*iPN)yK3;u>E5_Pr5JpS{)w{J#tGMT#MYL6}__~UkvV>lacbdF-n8s2;5AIdY z=~N`C^w6B21!r?JW6JXpcIdDKd$2{fdlN>KLbQomXFt5uNB_cxrmd(sq*0n)sZ;D7 zh`#-yp1tK{O#H=kQ1!6yiqcOZ7Z5sOY?aq# zFIZDJzQs6F&km;OtKw{cGSgalB~>T&!*l+Qye_}DHIER}m!f^`8p-w8`7qXV$6MO4 zzn1IhPot1Fn|X^4q|xBb1U-cG z>s;zIx*fV@himO(ew~wV3SCKhjnv0y`vICj0mZErR(y!>TyT62(r>Yt-$8(;9fXMeL}4Wr`Vl-XE&3i+tQGWhuC`78u)_ASeWieFN`~4`8uyP zT;V0lk>1$GVA?T7>ut;B<*5l}LBk{Xx=+9dSC?tr)5V7_#(mr!T9YVO0K1+1svpAb zHS_&zkn5olD@*$76!dF|D^3p)S9mBl4kLEPIOqJ#$ ztm8Q2VqT>#IVDLAhTnmYy&N9Ra3SbJM2o`2Kh99t2;cUlF5Hla(nA*^E4C=Tk8C*g zLyBrV-=qb-;f)Ssp`4?{0=~CbcR^r=lCiuXik8V*$yva&z#lpDB${5{T(-wtCqDf)$*aoo1OS3-srW<6PoT+VZb!VWVLhAv^h_qrC>v^V^L49zL;5r8FXjg zBuh)b-#WRpY~%&46|r6VZ*)37d~@;KL}bKWLadk+70VPz1oL*u3??8^Q<8UW(cCVS z`l234y5O0wt-1O9r~RV>o>W;M$ZeHM9#*rLXuw#;Wz4p6GQM@vH~7PKa~JGZ{X7DJPg8!7ys>8bDfJuun6hu-9I!A4yLelw=0V5R25?bZ3|FLMWFz2qTCJ3+ZGkfRe|N zV*G~Q2+9{;+47NmB&R8gSKHd<16?GfaqRO}rFOVAAz1v^#{|*gAeSyjrXhQ#i>)dB zJ_hfkImlX3|4Bd&ZsM$9s50sWughG#ZVak~d0_r(gqUnh@}EZRl|~@2Q-)K&r_Uay&8C)O&hV6JQ2^U9gMk~o6c~Ux z_6CWDQ##8DTo=a`iLbT5c@T&dAhmWigNe%bSXv%TPCN+{0ZMrjDln;bu;U<_a@h)c z@Xkm4_j%1xl{rG8X&grF9s+2CcFTKjIiroVxQ_1!M&w@_8DpgT7+x;y^>`vWL!EyK z{Kcc5o|A3({EAW* zjPzU07`bGe%bD&=@pluUpXR`^a!<{br`22gYpr3&Y8dzuI42<4B491Vo#U@zE>HU3-v$F!rFa&zYTj|rdC zj6Na+9{aU6n>ojd9{omhemp!%8dSVp4+)JVRuW1Z?zq40tjY@n+jLfOrVi2G#~UXN zT9A$8wuUji&q*FE`?@Vc$N*+1j4&gGmrmg`eThZDmy`a&`!Zmk$Gf^(oQap5UbXwZ zsa-*=-&T)Hk7(E828u{r@6DAX*nKx)>kpC_8}iA6Z6#D9jl|9S^xQ|u+WnNH=8eqz z-I<4zC<7x43wq=V==;sj2Zbx1XO}8Xmri`O>=Am~=NAlT-T_`~XB61sF5dG(mkBtx z@2aO<2il-6c{k*M(RiF=Z5!838>#$mPyyY4~1>2gWnK8DWIXSWOsmOuCAU7WG# zXkpzhl=BfT4(RjGeW62HJv+>Q5y(;PvXt`mapv?n+XJ4KSivE-=HJbJ$HaVjr!jxr zXl6*+i;vlK7B0#(FL@q}2N4-PU3|tE<9H;G)txu8u?H{8+XzuQf8?bOvRe~xaly7w zkM_qjc~I($QY#!&@25PE;jJONcmry~e zFQ<6FwDsm0s`Mf}_sW8ahp47GF*UKatT)$Bfq|>skVCvB4b-Gyy0Fi-+eyrXi!Sf` zrd>~)sk6Dm2;|KwLe%cqeB!i_`LPm5*A|2etdH=F+<_cJy<RQj(=A3#52{l95OPeJ7})~(HLg{R=YXtc8>U7^dV+keL#ZdU`=~058*ERW z{uMeBNNcl?-2E2i$B*x>PM*{e-|5J3%I~b9L_+3Vgz>cnru5_;xfaMTRFNhrH}m8=)yeWR%mWZ1d$wm7%XFc$8WEHVeCA}(k6X# zQ%0IOgUw)M>{F{lMEc1+tgc8B?mn#gI?Z);BW>Lfo6oQpz+%|bvV2Ro;eR1(kuJoa zq{Y3FzW7hKWi(+{bIn%;GJJWM4(?6{;vXFqhPfLpl0K+@t*tgk4_Bq4bd+8dSf4DK zJdktp!x`%6VC7vf;e3x_d^O>(r6GkCW^F3m1XJ8({8ggF9l;i)XvwDg(UDc+b|zw9 z&4mi-ipru&G5HQJ`VduwRi!U^1s*k&6I7r=EVxOFq8W@GS(^6vNkX+5lu%m;K{dHi@C7!F2S~3cKpg*SQ z+JxCu?ETA_ZzowehApeTJgTfCk=-{@gPX=&Eg&M4AkF#UW;x+uyT+9Q3esEC0^)e7 zgdUSF0d%d^(`XBT$eX;q25Tc+hbbfq;8(e*q zxTn97T6zD1;72Q6nm$J^SvULgjWNjRiA$1j4eH;}vLACgkurE4XMDF|NrKdGfqJ~W zbT_Cnx$&M5GrsM}sJgo9Tn3>d_+nv=OY%%QO3i4g$?CS@5%dQMsvM73<$%`smgsDQ zg-ugt5O?+uIjA%@tM^13jR(1Lw9H#4OQS?B<%M|davVlQ#^ zvS#(M-(yXta&SQs_?*KCIfWKl2nyx?^190PrwlF4RfkU`yp`Wnxn!biQ-of$aC)Ltb6r$AfymNTO>Q z{)gi(s`{PF>+^j8IAY38&M?5J#|5zS8gJNbM_AeGQkB^r*& zqidc2MX03v86gLbzz8-q^b-9I!QS4xxi|+pF3ozEYMpKa0DwB2$XK50r*UGVoSDS@ z=|<*RhdKY#>Gs&{ou=#2ia+UpJsXCGBlw7x`X;3s?HVP~H_@M>*Vue%?8$oZ;rmTM zqkH<)-voru>GDl=O2D-yjlXd3&E!?XZXS_?`cJ*0$=o1%h5C*4xo_Y{|_(nU;)n zcipLMBA=tG4EG$NSm)W1NeevVaeG>tK~fPEis+4#QmGTlwng_rlztD`ZT$C|k?gN@ z&5We06(cWXGWY{oZWjcNdb(#d!Q)P7PfrKsn+AD-e~fL~7dNB*=5a4~SJGOWd!9Tf zCN_OvAeslJ%CGLR;{LD#1FqqSuigbznGGpD{!Xe+@^w-=5=)}Y-uDJWUGj%eaHL>M zz;0=@bJI>{#X(V>YCRsa9?tgxM&L(W>Z$z`++@_@xXS z3t%{PV~Zxs8{~Q88<80v@qInZ#{pd>N+_8@8D~f~Czitqn>Y!rIk>bGp7A5^v)80h zd}z~=I8LWc3$*5SP+)f53B>1urbw3=43utolRVjqd$RN;#gIqWjwkqIn60N#yI1EW4NeXn?ZyCOjO9`a}CnIsc#0Wqct|DUY5>QZOm54&H z42CDR%qKNayr>sqJUlkK6@61AkMw&~s@VZu{7{>Tf}GCEp^NE{246Iuo^~Ry4=MAS$M0a%M9>`2`lnI|1ZE52Xu4Zrh z8&M-sf%+d$<0DUp`*}|z8%$b^01P4X?4}(Aw_b?%wvBQJ2GBu$4b};k%!;4q+t}NpdkdV?+|chxpmxkmwhraL~X($62+>6 zx}1I9;ez5!QO*pK+J4G13OWHs}7P=o*ETsiUr=(r{kPzg|f2u!>1MvSfiAb`IiQZ7f^Txv;0 ze%Dv@J*0riL8%WL`HJxyDb5}jAr68UOKe^J%i)X;$Gx2$T-cNDKR}ZQ69mxzjA!*% z`<88913=XV0t%-nQ`!K#7ZSKUsMgZ~VVB0C#kx=1ne3u787IEd`~^EA9NCfyifJ4J zO?EhxVo`sqPv#ua+RiM4ah2wwzb_Xm5NI;OYR*9WX?dtaDUmxU7=t06^GJuYd^`Fn z>O>Zj)wF5&@9jrDoV!(C=ZBKa0OQcD2*D0^A6{44?$hZkPphDX`WO0)%x<{!aD9$l zgI}X}Lz6R2QCqN%_#`!k68-ggmklN=-;BWnJMHO;yegcp~LaE%d{r zKuv)l(=998JFJ7=WJU*$(CC86g}_ll9Vz9Sc7(KHQlkg2isj7U*-e$W_UK5$<};Gq z5uIzDP80{`U@09l8r#NY|H$p=&86dpw3JE0{EW49}=?zby zXgof8(M8+qQ@#XR7z9NtAn-F_<>|FeI0Fq$`V3LTcc1pOtoDgcvewZ;VzJ@HBd3Xk zjLyakpD$3lY~4MwoTWKW1K;9Tg- z!eLpe8ZpY$9<656z!KIt6P~fEvpww>mKUM!ybx_LtxX_K*X5K4Sb5Djti z2IgbC`V3{xT;8lXga(oW($&N2b23ICb?V-tE#+)xKSdcVgP-*l4z%$H)m%IgsD>*} zoCFo1E#qKuLH6Y$%i8`$mDaO^psx^6`jn6djx75XgIzsamAeXxF@2kp8CN)GLihlc;keCEVze3#IpyyA4@hv%+E#r zm)piB${w==vl5%>u*W3P6^EZNB1r*~(9=d5>6mE}Yfeku`}j$9q1F#A{#`u1)lV3Y zfNP*O|KTtVl%2Sv!W)?Z9|;ihV+PxqWTN5_51!r20|1GB8aXu}()69_k=P)B^r3=b z7uMiI9Z7^1jfqKWOmnCw7f!ci43-&qLBO=-J{I=l)fXGe9{nYhu)V;35|EWHII#a2?-tgq3GtL+kvl8cp+Ed zBI9=w5<6ph+{d!K1# z;d^+AYUB=6M#wuVnG_zr1^!f{QWsFN1=5NUJ1= zxxaE3YSQkP>tG(OZ`>CRUY9+cw8)*sB7kVf<+mdPr8yen{4bGNo{n&}_?hzEt{F*} zFSQz#Zo`T(v+tL*;p-HE4fNb7-;xWu3zXQiZ3PI&#Z6SB))tFtwvKQqX+BaOp(4@J zQ-*t4(FzEGy~UA(q7P_+XRmu$)f-~@Z5KU#soVDSZb`BBHAPc^=|K$F!2(TqY5zgN zBMfyuJaB_%tDb1Z=Qt0cWsHF(vcf2Z?BWu+G zIzn$&@@VjJ#p2yv9c{P1i-Kv;04ZTlK9AtX38BymF4Am4JAxjy=tar)K z+At2y;hvi3tO&zfEx4it_7WIAqh3;43Ij8Tp0;?-WyQi>L>SX3W&*E7i?eN)I-Qy! zf(sara}9kPnn`zvBA|h%s|T0iR{8@3otF{EO#z*TZ$BjKaEit6Mm}M>IQVEIN&KcN zpt;Xzn-$z)nsnF9_PMidjJ0^v36D~}(^y62yQunJaERs^1If6>=FG!baCUXB)LRsd zZ-0f4>E8cwq#GqYKiq&EULE4gGwOziK!TPoc+$O0+0AA>`51acK;oQmo|E zf(z7z0rTj!SPkXKb84M+PhEn}ymg*`UDzkdKyd}fp|59TCiG(xoy>GSA8Ru5uIC>@ z|6dM#{Ua*+G7PUjwm^fj;HM)@8!J{`i+c43B}|#aF^b^5bmP_h#%ZbW7z$`&&*&A^ z20VE=}kEFIR2WBQjwkMM#1Xahvhwn!y*`T7FQ_jw`Y-vDo8%^*;-(zq zGAUNrjTW@{+zF>7_wwVqY0eaIJE4&WOw`jzETt?8Hke}((d~)Rf2$(sto?b%3Eeyo zZm#mI`NWnL>&Pk{eWs5iL1>qgS9f=Xy{6zHM0%S3V4pK60pxdRDGT?ZY${EvG&-Gl z*V7#{z)nig&9rmoKA|E>zC5x^101K1{RoXP^sFq%$@{<}%g!K%<;&2vm8^}^h$sk7 zk>qVrw~R9pQ31^(%JGnkdGn$JMM~g}`wXWf`%#HHVc7Bog-Nh5-hWTf+aKE8Pn6h4CLAipq{eE;MmsYiA1AnSxyeG&OwjeUQUtWKuTwd!~MQlFAKQrj&tGh=Bsl* zqKe2T%HuZorZ4wtr_l27N9leCp(;XX@>sT{z@JASmz-| zs?sbRyXt>F_eMeR^~@#b!{hL1W#4lTC3n7)idXZ2^u>@VFa$x=F(!OnO`TpGTrO8O zY4<`jI8?B21Ka$s&=;#Sj5z^WTSr#g`mnE3PpL;}itN0=nA{a5I`Cdwx(Y^9ok3ik(1SWmAyq0A*Yf{)v~)4W0SlSS+Y4+ z){*eH=sT1KaHqyM=yc7||7PQu?Z+C}k)-hMM^y$i<8NfnZ;WMf_n0i)9e|9NN9$9H zr|`RFJ*T+DbnFD%>Drd~Nu=&y_#8Xf`^LY3%#EmAw_IULZ*_$wwn%Y$0tUcWRkjH5 zcyH2(n%gw-`YYxcRFW|jCeM!9dTow|Se|+$wxrZSJgjjy+wtR1hAK{haRwEQ-kW^X zFj!C|hxN&E3R{anPOrK<3am<{qu+qc8jC2KQ_EGvxIJc;fI9CrJfo3)`>%rD!0_xl zBZ|D zJi>qKRMCaG1bpu=;aamv1<>#`{O;kpg zN0l|P==iS%1pi4@4@LMU_K?-fV=wS#w*B=$7BOBP$@54(b^k*G;_i6MZle1-QP&{t zaQv9`;x6bv@dnshY9~vGFM<;z!c2G^N<}veyO%g-PFh^d$$=rU1JYJ0SYLVjuqVA5 zO-Xy%FZN|8E<6($sfWcnp!zSM6hByh2No7f;Kk8jr!__tPZ9aKOw`mKVi8eL7zxOs z0I#1z{uMlv3-vNa3_om|%a~!u zp@W#YbwW((%nu}RKA_LHpjP4Y!KawXSq+PgZFPF#(14XUw<0LR-tof15E)J=NDP{* zmvUNia1cC&P}kki?Hzn8ctG9@Xu_zasvoFF;*^blvd4+_{Y)_SSEHxCpZ_@tczDX8 z&#HNT;KQ2l@4wJ80aZ{$G1Ml}{-;`SpnpdbQCFZUgp?)tqt*>BA~n7jZb$aD2HKl) zjM#cp8m@kW`Fwz9X}`St_GYi#>SJf4Q3ml9FZcDacnSY4&*=9Bc6x>wx*{$=7)y(+ z(_6Oppwo3h{Bv8bH#+&n^`7u04Ex7_M;tu}lUYbry4z!T6W-0gcGG;FD>lVpj-!6DAJjG-yCideXcl4Y zm}t^XJxV74qk%?Ihogbf2WzNnRgS}zeHU@jwk&}81c0>q02Q3DN3wxl(r*m+ClqdR zkZ2qJiG|(3Zv@}IcF+t;wv>$HQ``suXZCN6>He-ON$vlgb{Z35S62ub`Z9k?; zNu#)Wt3+sPh_VsTJeXNSn|WTi7ArlD*j$!*v2eO|fps}-w_UAieBCL|L3%l{w?}ki zhmo(EeXs>m#zJgUTME&2Exepyy9RWch!S4zsyUAN2iLXgjP%C&a?)}_jpmV2hzBWKSZ87382!Jyq?KJU%S>JXu$2RW-lGT z5Agr$RytOTMyO3jIQdTZ9$v)V=WdUUU`z`$ZC`^@FxQK~RR73#3>_DVbx*OcLvG(PwB-GDHZjJ0p@+Jj#YaT5jy`2ojVQY2%jk9E5 ze#YggRFYVO|9jVtzM z{t^);&RLm?rvsSChf+>b%7n$deGrVZ2UaQ0Ay8}WZ`GvYkA-#U{K(ccN*M98Oh+j?4qRgE>l zkA0_iu&;Anvbsy=C4pgrUAMGTyWO^&o;^cwx0szwU-RXp(-2`)pb%QiXl8yg-WWX& zv?^!v|JP|UmsbN&KOix>=vDO7mr~p=^5(WbJ71K13YHnOYY{+Y}f+Di>K2jWx z3d6OoJjOlRm7B-#B9xCOZ;632iA%rQxy|1)>9!=@SS;XV#D3G}?7uqXG*cVOpN^;1_l7s7DIUTXvst*3Mc82>GH* zaS}NAca zAJx3NZcjmydXc3Za@5s{xGQ+Eh=wxqWx zvbjlzD^so=g3Jmjj;&B;{BxsqNd6yOfhD9M@lDy)XG_fytu%*-f~YZ}m*cqB_gqz` z=+?)+_8nP$+R-MHuO4|VWU`5`K(*|_WoZK!o(v;r4JZdp3Zjnmmpk{LTPXl*c_(RT zMakhY29QBa|GnsJX4WmnZ>UE6sXag_0wzt#_l(xx-aFt6)W2BJ39vNHB@a_cePXDQ zOj!QpKxPd6+#L#J4F|>q-W$w;`d=$&?Pa0~E3D4?@+1#x?-7~abG_0MT23mozH5}|6I5hI-ZHb?1f?op`K-Qj-%@90@!rs4OByJ;AH2pLRy#q)Qs7toi zHVE{D!7X=*22O2}J?jKv4SGi#pFaV&#(Ue1*ckS?8z_-$zGKVRUha>qV6yyunN1YZ zN4t4(niaJTwWW=C?q%Hpda0l5^KZnW&^(f{*;GsrQMk0~gJ}yGR?wSAGXsqY@PH(+ z8(;6eI(FFWEDf->)}IIdU%*{|gHrZwE`ja7jsWsrwOFZ7t1NSL1DK|HPW}w-Q?&={ zT)b`5|4&%W*s_64fX`^2Lk~xJgVlT84(BPr7Qr~LztUdExydHml(}%lg(X&0o&Xxi z_GyntH8zZ&)KRZH{XFj#l2nSc>Uu#-!233-<7rbJP1;izRgPqAJ9*%PRB6Mh>uT2c zQs?d{+nfT%%#nCC!cd66D+XP2!tgOL&ro`8V0uGOK63%9Wyr3rsEgKUIQ6-wcy2GA zGh~TQmHxt@vp6{1oU3p}iYHbZo9VdC8x!d5t5}I45OtBdQ1Fh~R4t0%)aOAEXt5ag z$r7DobTp&gIPnC6GDoHk{4Z4`b1y(vTWntx4}A7hU4v$N&qkHiv*<0#igC6{fN`*A z&(0B#uxR#*zB=WX9jZ1jL^s_2r0EiHS}OKs==*i_li_Izz3Cvw`9Tl0<_o@ilMMa9 z714ZxrzV0DQ24;YluA8k6XxB#MPm&&BzU~}n#UUCcu?u{-c}p5Erggh!oG+jit-C( zm8M_1c|wefDXdnTi0U{^517Bf`|mWQxnQ)@A}MZv9NjQLZTwW4AMC#;s*=jcZluR< zLpfe_H-7`X%$Z&9khu4w$)ZXy%cm3t3~!2xLP;39+F@Ol$y+x1(LCl1=ltZD3oHki zAOw?LGnCZXo2|>AEQ!}D#Jfb82pF?U5Ol(KWq__65Dn7(TQ)MvYPa&!eAz4H=bneJ z8cAt#ebFWM_qsxB8$FBZD^eUnmAdm^BPBkqFv#i3WBU!$HvRCiGv|fT;6) z{&OcU50ABWs4#8OU!S`vi^LRTrjw-Qs=cOEU00hn{Vuit!iTG=^U3$`N*FtB>A?e; zvLUnAACROLfx>+&O0oLHi^Rk~ZKRaFHSW)jEnn;$ZeYr2$7C5V)BlS5>W3(s@9m{K ze2^4`MU*aSkPuKrN$Cb@>Fy2%DV0t^x)JFH0ck{#Sh{oRhTV6T&-Z_L`4PD1o_o%j zYtEdwP8ca&HDS&cZhD{A!RQ*+GiQMh1l{lLpd@JFC1NrEV$35WUpwO-f4znZPG!iO z5UNgOG&uw?M!~|DJ%>E;J))T$4W2{-{}ihM+QQ-_Gy>`8}7e3Q1guHp=V{DC0TU#q7vqmUt2^{%f zQX`|X8M9ecZbkm!ZoSL1d?SoB8tyX(K65JjMlqs!_LXswI6pP?_1bPVjJSkpkp~3} z?lelREg9{Pc(Hoz<=rNBO32oKx3Ci1XeR)gGu>bOWy{y}*L9flH|uAuxc8)gQ|#o# zN`=N5o)43ClgJV65jp$u;X*d>$(!RjzrPbZf)kk#sw|%Ee%ir*n;IFP%~sAP01ShT`-?{Z~UCGnaOzzV*Lkl z-i|gDHoDx*gK^ixn5C*fo2@_Om@D7Tmydn~#prQ{)wOUopw=&mO_lp=h8QppRu#}j z2FZnJ5uk!R8+2*iLD{DHK~(UE@{2)0i)iH+XI$@#y~F;Jik{IaIo8!b!Y$6BIpJNg zcAOqE#e4)eBKw_+`$j7HfzO^n*)j7Q13t=c6@s)EH)AgE|C9cw3@JeI6Q3~Nsq=Tn z-LKAeeCl*KTI}pu+Pn~KOJLdo(>~&3j9&z4xuTrYwF)gsRKbC+Lu=~;?3}42Toa~J zX#QBbU+#$TKn0|vv}=e`Km;6TI37aZh6kP7mQzxX$#Xm7hXtFpQ=seoefh_kp_x)Hj2EyWYRxuW z%`P>mJO(41W5>_X(NoOE*5L zBb3v`bHbG0lnV(jCwo=IM-94?@wUvgKCKxc1~3lKQRD|(0^=9}MtS?qeLC))C35^N z zZ+DKr%V*RmO7NDC!0Gp@_hQxGun%??4;0BYsXt;(?5J3GKj3DOKoDSdkj`A{Q@e*g zUwt6xQVmus{g-(gMZ2EpxB)exQw2w9f&q+R99{{vj6OpYx6;@rZzP#8V2g7+_yA7a z+@|;}J%4DFp*~hU^aV~O;GTrvhmqBfy^QXM1@G@YY(I(?lhY@ zOa!UyVQYj?1i5zJvH`y6U|!L6Su8E$c7*|q037C?t795ok}xwQ;xQ2$CP}^j7C_O* zvOp@m;|g)@R2uriUD5Q~)|>XpUxAQ$2=r$YP>*+sZEZ2T{rcG90O!D)ZxivQ5q3Ap`} zc`)*SUSE44e3pglu}-_MOTV>NMW;p4A)$q3F@z3}&7@O{GE0_sV>5y(DYFi)i09&@ zp3X_4F;J;A3PdfJFrM_4M7(Kp!n^$7;_Rj7IPAFEgPDV;F+bs!jw=U;tyK0jA(3N6 zn2^70pHU?H_uun{Mjq~q724DsslVz}jWLDUkEoxL^18~(V81oN zz+Zu*o1gSFa}~IRO(Is7#tTrSBqxmC1q`9C@%sVxCdLe?|53 zEE>hyED5T`sg)cAF(A#5E;&Yx07BmLjTN8JpiAmmc$GT(7HILNJyw7pyXtfZ2!4_t z(AGbRIf>&+@_B#}>G2YL8OQ#X&ql!6v)9TFbHNm!lY_qtF?V>K2XIi($SfgP*yM=3 zD5oLNz+;zV&#kfh<%5=UhJJKG&;P2+644#T1(!A@b5M3>xzg+e;v@i6}Z|^b&C}-8;_>Yi-l}& ztx?8U*8w_zAL3r`d=Hnce4f`H;n4Aimn3%CZzq1qjI>3i*c#4H{Gl@aDA!4)^egTt zxFgwwq4ZpJe}_EYZYN?qW>j(Cv{das17O{-U#Ww>0>b;xG!VY(8t}v&GxJU{9ahca z+e;WlK9?*b-oa4p}UM04qK zkDN_IVFMr3*mO;V(e~%#!RBYbTE759!4G1g?@t^GW%LVLkxZr5Z@7e}JosaRJ|pbe zC+0+t96p}<;j$u*TOjR*QNmVruz=C&-e4bfw>5WXY;0hFZH#< z4Z~>+z?zA6JR(-Q?fvTbT8|!r9$3sv+rk>5lI!6utJB3s(aMc?BA5Ty*~s&dX;?vV zU@ene(3SAX@{|C7;Gt`dWmo$b>mkD#avB7KY1%cVnl$dTH0stSI{YAW=%GYBVSr5e z`S&l@Cgm!>*1(pf_>tU4&LC=WxEPu+4YGFyfSl^=Ye_q%B!h@>3gF)%e zVy&N;^%Qqe%Jm{`KJo};3Y%FCNOt&GLv0Fz- zWU6)q_)~2jLpOFbsyKX?!oJo&6XM=~|Lf+w@9H$?FK3aKRj{gcHR zTWQ>8PcNF0%qGYuR#dIn#GH*Hj#k3rrzoF^3sQko%b$qg$Fw*xc3o8p`|mocIB{X_ z6?AC04!qpRZii2yL>M2QysYx^gk*E=l4}c=b#^bar;t1ppK1Ck2ABZI-n4K7vpVsr z3OJ(QSj~_OoEjR7p{q5Z4=ao;j_rb~$>Qsv*1hJhQi>rt58&On&s8;ehC6QJ-{F~r znSz_h;YaM&L8n|vJiW!;*sSNLgZ&$7T`Snj@^-45%nma6S~o7{9t&eFxvNIZl7vNb z#F^juiYOUoAtc(ghz0@HSprON=gr=h-VJ9K7CY{n6RCx>LAExPo(2!V|3hc8zeI)I z(!h4xK2lUOV9|o!k~)cfOG;XkI$J&$zFjvt(`wA3er-wZ??Y^Vji&ysjD6vAu#4P& zsdKYNaZCu`C7aUP08d6jX8ON-(vTPoD6McJ7L4B{ja(R@EI*XvuMKzFx%gIUxcK)j z;in>zaWmq_78J$*jF5=T1Cl#))9v=s8aN;i{ZC}K<5=<4#Q|kPaPfo1Vr+PrpUpj05%*r{45&PqMY zn+R?WSIN`aq@1e?;*(CPW1b^#0@2JAzcbEJ&muq)~=F05C`;9&1Oatru} z4XL!l49dp2hM#cU&$po|ht<$szr+?rtd9M#v&>$ez23lK{CUL<&E0psvf+p3D&H=C z>TU`rLU`q~7+i~Ll&01{%j(`Lsjh$41}TXC72~dKB&cM^NIMYi)fQWA zFQJ00t9La6J6jgZ%50P7<8?xWx~ zxJq@u2yw`t2*88hei+^YPNywcX7Zao#Saz5kz*QP2jUe8K#i5K(;6E%OzjA6>x-rT z$f(GeSbfI5uR)N0S(ep99WG7d-YWwVLt5nHH1sZ3(ewQ`BK7UxZv?UM4`u1uUAE_H z9>4u!9EUE68{hSx2n!y+nFu6wa$tl6XKv5?6^`W1@wrdSSuiwPY!rGFd3+Aw?1HDo1R?17fBgH^>CsWJc7?MB@j539=Aa16Wo z7M&*dSc|t^fIB?%wsw~S!-44h*J5)fdb_(bCXb$gQ>zN7QkE|2>Xd4?D*$#Z8@Wkh zWF?!q5UIY5o}LmLMK?w-vAZphqNVHNUAC zvBlAgW>0H;O3MvnT~5UJr>4f~KH}Gmqm1_)HA+skXIN07#+rG5@1%-dx!RtDh1THR z0*X~f+F193Gh1G*!O++0OoRH=Hiah^lEvbX982Y&J&i}S%b~xAUCo#k%Rq%)x z%CP)9;woxeoFSW&q|3wa=u^G=)#*7Pi-$xe!fJnLys13#5ORIuRcW3ni^(-2&S&_8$uV{ zZ=@)c@AV;3X=~GS!4Nrbwcl@Qf05ecpnB1J18s-%+SiZm`Vh>LobLBY&Xc@*PYF?a zS%f8Y`oRu;0?{*%?20MXS;_%KI??sQm$yg_wfu9YFPC>oJ zl7j7x9xtQIb9ut!%a?&q4drGAPR$)YRj~C#;~&#ZY8n&jPZ%?O`4}DbsKJi;o#|Je zWqF>~RzqJ(ORe+XZ$p}B*HmEvp>h8Ip6^S5`);At%tD-vo+54#z+m6QZmoln*k?h^ z%2_hyKN}ZKbx;L)xR3vSj8X<%4dXM zeTfGAQ@be>iu?|t3A&ztI50zLZO4NNS$Xgvo{>;+q8jLX1Y{ zv?l#s|lm`vC z2rch1;|t}fui0c&l76&?2@CPXT8N{9n0>stF@C-HhVA!SdcsCqJY78EJ=JYk#AMcllf8ir8-NbLh&O%I^FuanONae&ZBT*f~9Y%Bi~c?0yV4=AC!Bjp5Q#KH$`$;WsDEp zlbU`ic_Sv)<$!6bhwK8p6P(hPL7{Omr#%}-raoQU?D`tDyHR=S0*6OyC7w&mnbm`rhPs zP9+m*RK1(f!Ex;2dBfysyjhKMLd6~|9s7T+^6gC_>gCqU2S>6Qf+nL>PUj1 z84dd%zcglu%5aQr?WgCVeUJyv;k>Y%st63|vdRY0X8PqL8ReGU(a=P;w^eqX02 z@L#)B^Sr^5!lgwM&U z8W*H6ds5m=6?dezbP>*D|IH07dgr)cW1>$OU#{G5IqE6$F@xh?o*r;feW{-`WZJ~? zv~PL1*>IZYFaSfW@DpKE^IWf3e{|e2B5Um4q8G(FUhYBGj}afngKJhfE#xYg!2bej z^jAu3svKjej>77?Zl~^j%?ot9rrIsLdb`<{4k{IYIqc!MZTc6D$q-{vbkLo^jrKkV z&STi~X=fGpS;G*G-k_~|wCsbVWb1$Bn%M}NzHdApOO?s}_uM&h3CW9I;WmzCsuGr| z{M`HC?_jxEkMi{TS8qPz1u@SrS;88KmCE9qLVX9y0jGnTZgx-j?BnEr+w1%`k-S#f zJSU$2=?ae}j>htVZA*n`jcUJR!=FSyO#^&1w6B$uR*x8?8rUHvq7Jy1!DtI_m*n?; z3GiUn842MV%!X*pb!X)WPsv-0?V^S7r!s`9O ziv@Wj?={7W_CAN*SdP6fuWW>UWu&>d2OR1?&6F+-+w6HoNQsTshZ%o*aQ<{VD66n; zLzj*Pk<`&joOYeSIs0w&zZ?phna(n?dH>#~JpnqFdD^dRmZQ`moz$+w-&~08QW|*0 zK3CfLU;n=b6^kI9@A!-?uP&eHlzYv)1P=nrpbt0MEmca@SLaId zX`q``pbEHj&E?8zzXDDz)BP>mBrg2y3o!$5R4g3hr#B1DJ5i_e(YF2}Qr~`VR4Et# z%tQA!dB!goD$heG%OH#Q?-@}WYtSgSqXz6Ow;`Fz>#oaGA*pPKa!!-9QUk%THzdUVjqp3p=5Ml{g2X zwVDN8zB8DO9gHqKGhl>4_TxO?tAIX@A?2H;%-=#Nn#wJ?6HDNtl~d#Qo&T|y@HLDG zl$FLd`_hm|o{u$lN|qOb(~0Sc2ZKZ_(M!hZF9*sXE~C+5Cfj>@2i;5MpD|7g zU|M>98vdm-+Z_l~F9GEB5q-hdc|+sDh$2CY9ptRN$B4NxMa>mmd|osb`K&BqmTOjU zw`~4_)xN)vGt9~_3r2ip~A;o zR@1LdQh5bAC-u62+WDSMQ#vgljYnp1(C}aj+<)onNf==#MoLW^o?~b0>~>e90IHFQ zQdnYQpE!^m` z&%tU{4*GO)dYVTovN`+rh8=7a&#++TWqz+IC8y5pabO-YdXvo0hn2U(+?)nIlV5g3noQ3@;+qBuNLu|9HVmaxsG;_^R@lYn;*O9-}&2q z2ouPdc!zCI^>H~92rl-q6Nso)eI%Ayx_|LGMoXmO%CI_hI4|xIl`kgxXI+-%Fd8C( zHp}=0n|=bZ+{}QQ-ox06iQ9??n_iozImJeAT7J^BW=YlmhwHWqi>i9kVX^oj8=_?E zS3%d($ts$Wf$R%AJ%D|fJi8IxsVmVHJ1e(wyIihOzU*wNWv=sSlgma5nDBf=*xToh zBqxgVS*A+f2`%f79Gdj24YG?u{r6$OHF}vM(lC1;Bxt{-W~$qB_PO4&+1Ue~aekJ+ z1GRcpu9y`xF)KA>tg~B0TXY0F9}B_vz}37OIFb_YD;K>z*f`fe3jU(YN!mCOEYMb< z#-Km|C$RmlY2$89ctq3HvS`YAT$7g>bXq~(%qjVDWu)_u$gX+-2^vKnu@Z8$7_+iG zTl`|L>>na^m!MWla`&tv1kLlnGhoU=<{E?aXiQrA>R0;K%}ASMvK0*XKd`gzSh__G zpMR|AuhHb;Cr^=8;1V*PB|tp!U`&4q(Mpssv4ju;u60-6 zuM-Dd$5~-ee7W>zGBoK%mCLXRoWPe;570TVyzG;qI2K2|@a}!i2~twQ*-0_`h`sLf zudSM0{{M0*v*`O@)nQy~;UgjK@`tU+`xcaY^ZRI1p|ge*b*v6e|0L#U&ciPnTd>o9 zw!Jtf8N?}U-1`#eufjj%u$o1POuRb=dx`;u?v`4y*1~=E-V*UER`-?+uTiAhyo0Cij zWb@HIq$>C?g0)I69zwX2^<0|_T|?sAYm#!tWm>Kd{iik_{p%fk`9R zo^jpwzi61{t!0GR=JPHZ4my+!6v6*|_F4IdG{Uv6Ylyu35YQ+D2-HCI{M7=91UQOA zNS<0~(Qa5b%)bW=8v^9KGIFz{_cVp24Y?)5IwhqAnd*(@y5f2ld3xJPd#*Zh=l9xG z3_VzKgVpqNx`zIc|mg1TWExX@q8U8)UaJEO{mgj zDYV_xaersLm6^N!xD_YBeDUi1;mv&WO7s3m+lcd^xc5Q@gb>o>PM6(f0#*yObg(Uv zdD~7z3JpK{189?S3$#1S4Oukh ze*a#}A1n@$p2xnKcg79(?@&?1zkPolyrj=lNAF2h!HM{DAP{U65b=UMnSNUU zJGi5QzO;j$t(nm0)pNw@0Jp@r5Eai=rfMNZ#pc_fJXig)OR>BZkJ%tv?yaC*V6Wgnr#6f3AhzNkaW5aN9cD@Qd_Eq%xe_ocwa(lfWcpXer@ zb@FQN!{Gwv&y(k?w~-vTcKdQfuh3?{^kNC--k#bx_-kDr-t0&jpka_fVvoFmXQCmY zm6##8_f@PS$mEAr4Gx%JueXVi`n;Z(1J!x={7z77PcNo(6N{&KZG$>2fr?yxsE*pclyU~p1Bl}UO{=Dj1Yp7Bs09bf5R~i#zUAifZGM?g(23{eAVRFFbU82 zwL2|Md=y*7+>|~prWXeFG>HXUrM1?+chO} zuh}zCzu|bFifDj63)No-=LJZjvMrLLc05Dl?vMV|2hZlplBR&+5PriE~_FwdIzz-HfoER zjtR7e)u(bBXRG|myQDqJ`>#JA3s$?5Q`&XYZ zBL0V-|6+#&aiIn~l385hCM?@-&MjJaP#!gwM_OzqY)NvpDxH9H_jL^${p&>DxQ{Cu z*B+1V39cHTbZ%NY-+V+tqwE^FF15>g*9bQ{8x+)x)y#VpAGH?t2gsM7qeBC2^MZYg zqS&XO$Ex=^Dn-lhxnfb73lz>goFzMytD9zx#mjD!^v@Y(#nAu}eO#GY z%D>&8)(gnV`v?%O$^FecC@jB++emFyO*iDbhm7au3P1N#caKC#G{oumq!V=HU zZ@EB0CI9$v`#X-58agoLpnBgog_G6?60VWlLze(w>|PW+11fn|u9kqjr+PU8x-^=K z5SN=B*YhWJk9VR|M4IE@h(b&W7hJn|b00yyC{bWAQ)(+(kv@=Qcsw$ht8}N^KPm2X z`^>aL-64i$SWDx@(ot)fdX??qDnY$)R`<9sVDm&L+SpT(^H5l9+#^GEy#w!8e_B?N zRiApB7`q_qtaAr>r*{d5HFc6teom>JM)h1K5@QJ3;Sc}~8`^O6cYj#jKpQF(WF#~t z!5`Q}0!Pt$1J~JC+q|pM2;Vb3370wT=XWEC-;LCgC9HEbQJIej)^}i*!v)-``ntcD zW(S|;Q~uzwBltF0v9n7{-I;Oi(T8e!7y2{V95x(mjqJKi;%?siy704&aUVd^7%1%m z;5i>4`dN%dW4u6J!)}Mi7Rh0s1T;5{7FN5#x#M^2lhcNH-ebjR2DH#@Xt)>r zt)%&uH~U5SAT!Z0@xZlU@w@4I{;X~kwNvIu&im9OsCXbZ#*aiZEw>QX$(>CkHzFCw6K<1PO==VPlY@^2SY*+W!Mxx1)@Z-_YS{A zmj$+e@%6TU)aT#C=o_)48E0$vbYEhnXQlK`gxT(&9hnkF zvD?QLu-I1?6sv%w`r5H72XYY7ERQ!%7yzM!DGRruY-2!@x>F%W_;9o16 z>b1JJ@U~gX02P#!%#KH?LC;L)n^7I+uTpEl!{Y)TYfpi^yZ;*=8eRc5dVj(^^~`{9 zMVPPtWVRJN%to}nIIv@{YLXz7w6g!PI)PVm_f2tUdlT_~Z%k0UK8S9>*wbi)^}nVpSvJYhdHN44F|K0C}==-&DciN z<8As13mh^KQ8lK}lbCaS#8-#1Ok!5wG^76@`7ZGO7R&PC9BnS4`GPLqp1JI%Mq&-2 z*->So*>0iV#duJa)RxHC?r2fGvrfh&?nLIRU3Zv;mRIaz(x`ijXcLn?z6D5AaEE_t z{x`0d1>e(=6_;{?`r^Ow&J2CVAk#o?Qw6c-jP4}bw<$e zQ;=L>xgaTd*zvC6!ob7+B#R;@u!M$aZH~$Qg0iO-^A7Stf4-i+XZa)V=-hA7 zc=^*^|GJDY6flp7oNXo9r=_J_eId1d^H-pUoCVtEVe~trDc1YkP854lg)P{CS7sh{ zy2rVZ)^hfv2e??Yo{-VEN{DmcVT9MFLOki1PpEN|*>tK$$sP`Y53B9&^j`KTONFw2=5W>;<`_ zV&tR^r-=9yOh@;m*clXw$mpy6@7&UPWCF`!Jec14a*7e)BkV3euMF^``cDcQ*jIBG z$)gs^l4mvVYrHd>9p`vy;JvkLoQ`%) z2AK20q^7|ZBmLp@VgS*pP;(O!;KAVb_707GMz??gQLuM4Y8tD>p0Kla<(!=JFZ}fV z5^mWz?D2AHu+86lOl}(gTNi=iDA}7Q3h_p#MF@ z>5K21f-sJHt}5VQ_`FQ?(A5!bRk#tIkE_-&s8bP2VMGu3nd)6u3sIE=h#(-#ta zd?%=kP4x)UF<7Xvn8e9Nmu8KxrA!7dODZuZO~}JfGOFtMw~Z?LYK|Oq&r`j@KEQ=O z&~!>y(;8TBLbwNq~ySFz%#vTyVd`GOLhn~MX;*)gHa=5?KW);K~()_Sbz)0j=)+tTe9X{jYUI7HhZQw~h1}0{2k7qa)2XP^zVU6@H)@ zje{z$x|Y;2c@x$x0HFeCIt2NxRa!Bux$AM$^?8cyPVHMlP>W$yzhlD(qp-ae!TH#C z{mKI*6B9FaQW2?*D1vF5eeaKP@oKegIj=aEL(oQv{O`#-eP5`*{j+5>s58tx;5If5 zswb3Z!P0%CxMV=R@%PWtck>hhZqpn$vvR~lYpbZqdXr_m29xa(C&|Po?>uDg?-XI# zf0O}O5nwr@fI(rDF6epT6t*(`UYyuR#3Y*}rGHfI4ycKFW62Ikwae|U+L&px62 z7jUF@Llj;T?Y;>m)wIf#yqoXq#N#p7?wb*yI9Kwzb4lRtL58v|T3vcg`2sQo_K+-- zegEDOQW0!TrQt%$S&u+^DL@+4?Ds}2GiMcT&d{TREXiSxI~YbhN7-OlNPguCKk@gN#bXZP?|06>x%sn zcgqIccNX}}6X;DBfgw9Bj13|iD0&NuKU<0)Q07ulvAIq@r-XmrZu5DF9LR-70W&Fe z#wQ@&`MuboW>%we#CMhZ-io&O!gv9X)$XScpGV2Pt5F7^?zd&_iFX>lSCQt(n-3&C znOLUM65*&}!-xBDp_uJ^u}Zhz<=Z#3!qJLzxH3ziJO#X+H0Kg9&V(uOp{ANMAwW!-vqxWm7me@=MJkJYpcjSA;4jL$}WH>au_ z2cI5!fK!9|tr30G696v&?m|l`GV*SbdviWf23l^thc#ys8?8OZ2Ye50H_#$X|0A=a zP53EI%q1VfhMC)`F3m}vr-5X-ZF-AzD${(gs;W`5TvYtx8Rj_jMP4o!8tvaW>Gvy4 zm_ka9!E|kVht2yf8|f|Q+w@AR-VDkkQ!6R^8?hkz)=5RF~nEmhYp!_I)-xfj$#PItx>BwpE#k8fRPr!ACk*-^_Ar545;vFc0 z;&wa_Is3cSV(AYr|0$G3iL)-1mw4~i3 zx6cd9^YS6xe|!*#J_mLnIx0QvmqVDTKupm$1jI%V8q|z?`Q*RiQh!SNg#&VcGpdl zQCf3z9b@_4cG=!bnf~m04gA!o=eYYxrLB)@49x=qJWa1n9)c$K989jkkOQ{ny~Bt# zjeD(v561q1r>uOK+4WLNt=E1uCcMe#I0egy!}rzy2lF-5|4>`W zJbLa?oR6AySm?uh5JLjK7C+<#j>V@Vvh`V&mS+nz@Xo##8!^hW~x?td-9{UAN#vL1u{4F z$vy@xXU^)fqA5w)vjhDWPfBzf2+hhLVgP}+%jVMmO(Ff8S@h#(`GFvjUx?o=1o87{?Q|LDLczlGvcb@BbkwNem+7*0%{{@kjkl1AN2NT4ab#49}bTLoQ9+ja*y`%YoEoQ zphFx*?rYHQ=v=+8!_SiRcD7xZx0^iO0eVis2R0_ph{Vn63Fm-rbgW=ETytN?1GU^w z7|j!ik>_hI)300wLj9I zzdWOO(Z;LxyZrP)+6Up^7?3~$tLJEo|23M%nm?$3w_1*x2PAm47^YyNLk?24qY@Q* z*gj+MY1|~eY`Z#QUf_QQE!G@Chg6RL@j`DmKWP0BTdP&Bdl;mYiPU-Iva{m`fgBu5 zM?sS_@hy}vdcz!@gy2Vd%KwJ zB*()iG7c_j6xxFH{kuqZ3V_nnnh+EwqD>(}1vG1~>34-MyA<3AK7>IaiN(p%4ujy)7$m!IqfcqUBWqSX;E`#Q^_ZJnkLy?K$X^k2DVD8jBM?YsaxPVlNC1EC z#VhXL@DzTZg;Sx@^d$%hfD~e&C)(d6Jgwa4jNuYvCvcgRX&N{_CWHft?CT)wv zo1(QW$4HzeHe;oM3?82z?Q43HnLH-zsnYV%+VBAU$ z#Uwwsee7IGxD@nNZ6e^OJ)M7Ih#0^SVJZlEJGp&RiV;G=!Qcs^OOYEkG5x^#G}@aO z68OoSSJ&~W>MvLQL2+S2O_Iy_R<}wq{u-oU*|ry0H69)ywijj}Qp65VR^5g-_U*lp z5BGP=Qp`BbYj7aVuPvU)tLb2-IOi;+5gmTczpBWBpT{gTd9C`2gQcb5(v^m2|C;x4 zbY5$8cPXK4kkiZ|)Rw|4`1N0Nw@U8AG>PMp3~>qJR*$ssD**_^*!tUd49Lwpi>oH(vkGdfs}j>l z4-(Jwe6uaP=x2JvNjMOFLyISKY?&VOx5l-_#4rBf6qtISHAE~I#Ll+9FGhXMn+M{w zkVTD#jm5#uZT*Fr{@b*{F}sFip(pZ_0tWvIEzuy9%(*HroO;Jsi=IxYCT%@hx!s=f zr1n3E)_d8r1Ed&e6bh7XF=v;@g%Pkn(zIFgI3S&S2cD3%2zc1rMICO3pJP0eCWLTv z<*GbBwKRxg#;;aOIhRal8~c1n`cP+1+||l;AhBLeU;>=q5YN2L-vD)&W5j)aobJFk z2`><@H1a(NYmXl&to|EZJ)Iwjdqv~4oJNR)#^KKTlJ9G)cShkC1+33qDSadb#g4N~ z{z7XZ%ofTLTC>+BTIbu7=-?EpY_H*Knr@0-)|cjK z5xt!Z{>QJ^TRcu}fG3KVuj;<3>)Wum-U)Tz8|r%tiaptSPON%i-^pGG`fbl{Rr}N# zOjZ~HHRM?Un*c&V!8eCknGf%HrY|=Bo{$ncxBRQz!Q!&AyOLM5rP1Wape6N!351K9 z?6YBMfATE#{wlNiE6Y#uN-0}p&>7|4{_Wjq$_UylwiVBKS79|Dj;z#7JL2dbH6g31Jer+-pl*(N-#gGC9M)p) z$zr~-H8s3*?;{+3K0QsC+RmJgPC4Fjt{Bi$njy-nxq{k}%4-$nu36|Xd;B{v^o#tb z!tC{WIU{9~W5tSWkul|?_D2eh9AP}e+$hyz(!59|i+4YGHAJBAEWP{kBT1QarmOC+ zZe^QaM&xAm>qM;ajksOMBClpL9V58_a&>MhLm>6I`@U=fI0Nt_g%xACK~qA*%cc~c zqUBf6TEUk(d*g{E+s5nqVr15B&Fme4QRdxVG7#Y(ME2 z;gVphk(6Osd9sv7UUIrm7IVg9vAl}R34hs~b6Jn{@0F(4jAxZD?@mQ2@PEQk^Qp5V zI6Vg|(*EXq2iVI&)AsAvlBqfqs(MS!(ZW?7&qhEXa47$(G$n#W^~3kPY%9USVvN4r z@9#KOtwpQC)%u}udQRLpIob_}ae!eVz?5fp_U&;I*Jvv_CZ8K?q>G$H)$mLG;W}B&I zg34Cs(wIi_7KO6t3d{TP?xk**s-?Lv=QryJeLO8;sCvsDq9e(a#;I7)Px>VCX|$is zz!B@&5ZJ%@e|CJJ{MYhqb#k{N_Yw(&Inbswy zde4oqn5{TR(q}LIBPH{&!N*kjhl(`sSl<+*Ed=L~GJzGi)bmu4H*>n$rJlGm(>9T;E4!wPVAm)ax3? zZRTOXGWXL!LDp;|%!?66AMCOJPd|true true true + true true diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIClientExtensions.cs b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIClientExtensions.cs index 52f183514bc..e9322acfc25 100644 --- a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIClientExtensions.cs +++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIClientExtensions.cs @@ -260,7 +260,7 @@ public static Task UploadVideoCharacterAsync( _ = Throw.IfNull(generator); _ = Throw.IfNull(videoContent); - OpenAIVideoGenerator openAIGenerator = generator as OpenAIVideoGenerator + OpenAIVideoGenerator openAIGenerator = generator.GetService() ?? throw new InvalidOperationException("The video generator is not backed by an OpenAI VideoClient."); return openAIGenerator.UploadVideoCharacterAsync(name, videoContent, cancellationToken); diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIVideoGenerator.cs b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIVideoGenerator.cs index f6ac7ba470c..d29a1053d05 100644 --- a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIVideoGenerator.cs +++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIVideoGenerator.cs @@ -15,6 +15,7 @@ using System.Threading.Tasks; using Microsoft.Shared.DiagnosticIds; using Microsoft.Shared.Diagnostics; +using Microsoft.Shared.Text; using OpenAI.Videos; namespace Microsoft.Extensions.AI; @@ -159,7 +160,7 @@ request.SourceVideoId is null && if (options?.Duration is TimeSpan extDuration) { - body["seconds"] = (int)extDuration.TotalSeconds; + body["seconds"] = ((int)extDuration.TotalSeconds).ToInvariantString(); } ForwardAdditionalProperties(body, options); @@ -222,7 +223,7 @@ request.SourceVideoId is null && if (options?.Duration is TimeSpan duration) { - requestBody["seconds"] = (int)duration.TotalSeconds; + requestBody["seconds"] = ((int)duration.TotalSeconds).ToInvariantString(); } if (options?.Count is int count && count > 1) @@ -239,22 +240,21 @@ request.SourceVideoId is null && ["image_url"] = imageReferenceUri.Uri.ToString(), }; } - - if (imageReferenceData is not null) - { - using BinaryContent multipartContent = BuildMultipartContent( - requestBody, imageReferenceData, "input_reference", - out string multipartContentType); - createResult = await _videoClient.CreateVideoAsync( - multipartContent, multipartContentType, - reqOpts).ConfigureAwait(false); - } - else + else if (imageReferenceData is not null) { - using var content = CreateJsonContent(requestBody); - createResult = await _videoClient.CreateVideoAsync( - content, "application/json", reqOpts).ConfigureAwait(false); + // The API expects input_reference as a JSON object, not a multipart file. + // Convert to a data URI so the image bytes are sent inline. + string mediaType = imageReferenceData.MediaType ?? "application/octet-stream"; + string base64 = Convert.ToBase64String(imageReferenceData.Data.ToArray()); + requestBody["input_reference"] = new JsonObject + { + ["image_url"] = $"data:{mediaType};base64,{base64}", + }; } + + using var content = CreateJsonContent(requestBody); + createResult = await _videoClient.CreateVideoAsync( + content, "application/json", reqOpts).ConfigureAwait(false); } // Parse the creation response to get the video ID and initial status @@ -311,8 +311,26 @@ internal async Task UploadVideoCharacterAsync( await _videoClient.Pipeline.SendAsync(message).ConfigureAwait(false); - using JsonDocument doc = JsonDocument.Parse(message.Response!.Content); - return doc.RootElement.GetProperty("id").GetString()!; + PipelineResponse response = message.Response!; + using JsonDocument doc = JsonDocument.Parse(response.Content); + JsonElement root = doc.RootElement; + + // The API may return an error object with a "message" property. + if (root.TryGetProperty("error", out JsonElement errorElement)) + { + string errorMessage = errorElement.TryGetProperty("message", out JsonElement msgElement) + ? msgElement.GetString() ?? "Unknown error" + : errorElement.ToString(); + throw new InvalidOperationException($"Character upload failed: {errorMessage}"); + } + + if (root.TryGetProperty("id", out JsonElement idElement)) + { + return idElement.GetString()!; + } + + throw new InvalidOperationException( + $"Character upload response did not contain an 'id' property. Response: {response.Content}"); } ///

Creates a for a POST request to a path not yet exposed by the SDK. diff --git a/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIVideoGeneratorTests.cs b/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIVideoGeneratorTests.cs index bca109c1c96..84598d2613b 100644 --- a/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIVideoGeneratorTests.cs +++ b/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIVideoGeneratorTests.cs @@ -5,6 +5,13 @@ using System; using System.ClientModel; +using System.ClientModel.Primitives; +using System.Net; +using System.Net.Http; +using System.Text; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; using OpenAI; using OpenAI.Videos; using Xunit; @@ -44,4 +51,183 @@ public void GetService_ReturnsExpectedServices() Assert.NotNull(videoGenerator.GetService()); Assert.NotNull(videoGenerator.GetService()); } + + [Fact] + public async Task GenerateAsync_CreateWithDuration_EmitsSecondsAsString() + { + string? capturedBody = null; + using var handler = new RoutingHandler(request => + { + capturedBody = request.Content!.ReadAsStringAsync().Result; + return CreateVideoResponse("vid_123", "queued"); + }); + using HttpClient httpClient = new(handler); + using var generator = CreateGenerator(httpClient); + + await generator.GenerateAsync( + new VideoGenerationRequest { Prompt = "test" }, + new VideoGenerationOptions { Duration = TimeSpan.FromSeconds(8) }); + + Assert.NotNull(capturedBody); + using var doc = JsonDocument.Parse(capturedBody); + JsonElement root = doc.RootElement; + + // The API requires seconds as a string enum, not an integer + Assert.Equal(JsonValueKind.String, root.GetProperty("seconds").ValueKind); + Assert.Equal("8", root.GetProperty("seconds").GetString()); + } + + [Fact] + public async Task GenerateAsync_ExtendWithDuration_EmitsSecondsAsString() + { + string? capturedBody = null; + using var handler = new RoutingHandler(request => + { + capturedBody = request.Content!.ReadAsStringAsync().Result; + return CreateVideoResponse("vid_456", "queued"); + }); + using HttpClient httpClient = new(handler); + using var generator = CreateGenerator(httpClient); + + await generator.GenerateAsync( + new VideoGenerationRequest + { + Prompt = "continue the scene", + OperationKind = VideoOperationKind.Extend, + SourceVideoId = "vid_original", + }, + new VideoGenerationOptions { Duration = TimeSpan.FromSeconds(12) }); + + Assert.NotNull(capturedBody); + using var doc = JsonDocument.Parse(capturedBody); + JsonElement root = doc.RootElement; + + Assert.Equal(JsonValueKind.String, root.GetProperty("seconds").ValueKind); + Assert.Equal("12", root.GetProperty("seconds").GetString()); + } + + [Fact] + public async Task GenerateAsync_ImageReferenceData_SentAsJsonDataUri() + { + string? capturedBody = null; + string? capturedContentType = null; + using var handler = new RoutingHandler(request => + { + capturedContentType = request.Content!.Headers.ContentType?.ToString(); + capturedBody = request.Content!.ReadAsStringAsync().Result; + return CreateVideoResponse("vid_789", "queued"); + }); + using HttpClient httpClient = new(handler); + using var generator = CreateGenerator(httpClient); + + byte[] imageBytes = [0x89, 0x50, 0x4E, 0x47]; // PNG magic bytes + await generator.GenerateAsync( + new VideoGenerationRequest + { + Prompt = "animate this image", + OriginalMedia = [new DataContent(imageBytes, "image/png")], + }); + + // Should be JSON, not multipart + Assert.NotNull(capturedContentType); + Assert.Contains("application/json", capturedContentType); + + Assert.NotNull(capturedBody); + using var doc = JsonDocument.Parse(capturedBody); + JsonElement root = doc.RootElement; + + // input_reference should be a JSON object with image_url as a data URI + Assert.True(root.TryGetProperty("input_reference", out JsonElement inputRef)); + Assert.Equal(JsonValueKind.Object, inputRef.ValueKind); + string imageUrl = inputRef.GetProperty("image_url").GetString()!; + Assert.StartsWith("data:image/png;base64,", imageUrl); + } + + [Fact] + public async Task UploadVideoCharacterAsync_ApiError_ThrowsWithMessage() + { + using var handler = new RoutingHandler(_ => + new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StringContent( + """{"error": {"message": "Video too long"}}""", + Encoding.UTF8, + "application/json"), + }); + using HttpClient httpClient = new(handler); + using var generator = CreateGenerator(httpClient); + + var ex = await Assert.ThrowsAsync( + () => generator.UploadVideoCharacterAsync( + "TestChar", + new DataContent(new byte[100], "video/mp4"))); + + Assert.Contains("Video too long", ex.Message); + } + + [Fact] + public async Task UploadVideoCharacterAsync_MissingId_ThrowsWithResponseBody() + { + using var handler = new RoutingHandler(_ => + new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StringContent( + """{"unexpected": "response"}""", + Encoding.UTF8, + "application/json"), + }); + using HttpClient httpClient = new(handler); + using var generator = CreateGenerator(httpClient); + + var ex = await Assert.ThrowsAsync( + () => generator.UploadVideoCharacterAsync( + "TestChar", + new DataContent(new byte[100], "video/mp4"))); + + Assert.Contains("did not contain an 'id' property", ex.Message); + Assert.Contains("unexpected", ex.Message); + } + + [Fact] + public async Task UploadVideoCharacterAsync_Success_ReturnsCharacterId() + { + using var handler = new RoutingHandler(_ => + new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StringContent( + """{"id": "char_abc123", "created_at": 0, "name": "TestChar"}""", + Encoding.UTF8, + "application/json"), + }); + using HttpClient httpClient = new(handler); + using var generator = CreateGenerator(httpClient); + + string id = await generator.UploadVideoCharacterAsync( + "TestChar", + new DataContent(new byte[100], "video/mp4")); + + Assert.Equal("char_abc123", id); + } + + private static IVideoGenerator CreateGenerator(HttpClient httpClient) => + new OpenAIClient( + new ApiKeyCredential("test-key"), + new OpenAIClientOptions { Transport = new HttpClientPipelineTransport(httpClient) }) + .GetVideoClient() + .AsIVideoGenerator("sora-2"); + + private static HttpResponseMessage CreateVideoResponse(string id, string status) + { + string json = $$"""{"id": "{{id}}", "status": "{{status}}"}"""; + return new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StringContent(json, Encoding.UTF8, "application/json"), + }; + } + + private sealed class RoutingHandler(Func handler) : HttpMessageHandler + { + protected override Task SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) => + Task.FromResult(handler(request)); + } } From 69a87d04b060efbd5631e9925a71b50c1dd84428 Mon Sep 17 00:00:00 2001 From: "Eric St. John" Date: Thu, 26 Mar 2026 20:01:52 -0700 Subject: [PATCH 07/10] Add more provider implementations --- .../VideoProviders/GoogleVeo/GoogleVeo.csproj | 16 + .../GoogleVeoVideoGenerationOperation.cs | 144 ++++ .../GoogleVeo/GoogleVeoVideoGenerator.cs | 238 ++++++ samples/VideoProviders/GoogleVeo/Program.cs | 226 ++++++ samples/VideoProviders/GoogleVeo/README.md | 79 ++ samples/VideoProviders/LumaAI/LumaAI.csproj | 16 + .../LumaAI/LumaVideoGenerationOperation.cs | 127 +++ .../LumaAI/LumaVideoGenerator.cs | 211 +++++ samples/VideoProviders/LumaAI/Program.cs | 199 +++++ samples/VideoProviders/LumaAI/README.md | 64 ++ .../MultiProviderPOC/MultiProviderPOC.csproj | 18 + .../MultiProviderPOC/Program.cs | 758 ++++++++++++++++++ .../VideoProviders/MultiProviderPOC/README.md | 70 ++ .../MultiProviderPOC/demo-multi-provider.ps1 | 350 ++++++++ samples/VideoProviders/PROVIDER_COMPARISON.md | 229 ++++++ samples/VideoProviders/Runway/Program.cs | 230 ++++++ samples/VideoProviders/Runway/README.md | 63 ++ samples/VideoProviders/Runway/Runway.csproj | 16 + .../Runway/RunwayVideoGenerationOperation.cs | 132 +++ .../Runway/RunwayVideoGenerator.cs | 300 +++++++ .../Video/VideoGenerationOptions.cs | 21 +- .../Video/VideoGenerationRequest.cs | 3 + .../Video/VideoGenerationOptionsTests.cs | 26 +- 23 files changed, 3532 insertions(+), 4 deletions(-) create mode 100644 samples/VideoProviders/GoogleVeo/GoogleVeo.csproj create mode 100644 samples/VideoProviders/GoogleVeo/GoogleVeoVideoGenerationOperation.cs create mode 100644 samples/VideoProviders/GoogleVeo/GoogleVeoVideoGenerator.cs create mode 100644 samples/VideoProviders/GoogleVeo/Program.cs create mode 100644 samples/VideoProviders/GoogleVeo/README.md create mode 100644 samples/VideoProviders/LumaAI/LumaAI.csproj create mode 100644 samples/VideoProviders/LumaAI/LumaVideoGenerationOperation.cs create mode 100644 samples/VideoProviders/LumaAI/LumaVideoGenerator.cs create mode 100644 samples/VideoProviders/LumaAI/Program.cs create mode 100644 samples/VideoProviders/LumaAI/README.md create mode 100644 samples/VideoProviders/MultiProviderPOC/MultiProviderPOC.csproj create mode 100644 samples/VideoProviders/MultiProviderPOC/Program.cs create mode 100644 samples/VideoProviders/MultiProviderPOC/README.md create mode 100644 samples/VideoProviders/MultiProviderPOC/demo-multi-provider.ps1 create mode 100644 samples/VideoProviders/PROVIDER_COMPARISON.md create mode 100644 samples/VideoProviders/Runway/Program.cs create mode 100644 samples/VideoProviders/Runway/README.md create mode 100644 samples/VideoProviders/Runway/Runway.csproj create mode 100644 samples/VideoProviders/Runway/RunwayVideoGenerationOperation.cs create mode 100644 samples/VideoProviders/Runway/RunwayVideoGenerator.cs diff --git a/samples/VideoProviders/GoogleVeo/GoogleVeo.csproj b/samples/VideoProviders/GoogleVeo/GoogleVeo.csproj new file mode 100644 index 00000000000..552fe62669e --- /dev/null +++ b/samples/VideoProviders/GoogleVeo/GoogleVeo.csproj @@ -0,0 +1,16 @@ + + + + Exe + net10.0 + enable + enable + $(NoWarn);MEAI001 + + + + + + + + diff --git a/samples/VideoProviders/GoogleVeo/GoogleVeoVideoGenerationOperation.cs b/samples/VideoProviders/GoogleVeo/GoogleVeoVideoGenerationOperation.cs new file mode 100644 index 00000000000..e61cdde97a8 --- /dev/null +++ b/samples/VideoProviders/GoogleVeo/GoogleVeoVideoGenerationOperation.cs @@ -0,0 +1,144 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System.Text.Json; +using Microsoft.Extensions.AI; + +namespace GoogleVeo; + +/// +/// Tracks an in-flight Google Veo operation, polling the Gemini operations API. +/// +/// +/// Polling: GET https://generativelanguage.googleapis.com/v1beta/{operationName}?key={apiKey} +/// Response includes "done": true when complete, with "response.generatedVideos" containing results. +/// +internal sealed class GoogleVeoVideoGenerationOperation : VideoGenerationOperation +{ + private const string BaseUrl = "https://generativelanguage.googleapis.com/v1beta"; + + private readonly HttpClient _httpClient; + private readonly string _apiKey; + private string? _status; + private string? _failureReason; + private bool _done; + private readonly List _videoUris = []; + + public GoogleVeoVideoGenerationOperation(string operationName, string apiKey, HttpClient httpClient, string modelId) + { + OperationId = operationName; + ModelId = modelId; + _apiKey = apiKey; + _httpClient = httpClient; + _status = "PROCESSING"; + } + + public override string? OperationId { get; } + + public override string? Status => _status; + + public override int? PercentComplete => _done ? 100 : null; // Veo doesn't report percent + + public override bool IsCompleted => _done; + + public override string? FailureReason => _failureReason; + + public override async Task UpdateAsync(CancellationToken cancellationToken = default) + { + string url = $"{BaseUrl}/{OperationId}?key={_apiKey}"; + using var response = await _httpClient.GetAsync(url, cancellationToken); + string body = await response.Content.ReadAsStringAsync(cancellationToken); + response.EnsureSuccessStatusCode(); + + using var doc = JsonDocument.Parse(body); + var root = doc.RootElement; + + _done = root.TryGetProperty("done", out var doneProp) && doneProp.GetBoolean(); + + if (root.TryGetProperty("error", out var error)) + { + _status = "FAILED"; + _failureReason = error.TryGetProperty("message", out var msg) ? msg.GetString() : "Unknown error"; + _done = true; + } + else if (_done) + { + _status = "SUCCEEDED"; + + // Parse generated videos + if (root.TryGetProperty("response", out var resp) && + resp.TryGetProperty("generatedVideos", out var videos)) + { + _videoUris.Clear(); + foreach (var video in videos.EnumerateArray()) + { + if (video.TryGetProperty("video", out var videoObj) && + videoObj.TryGetProperty("uri", out var uri)) + { + _videoUris.Add(uri.GetString()!); + } + } + } + } + else + { + _status = "PROCESSING"; + } + } + + public override async Task WaitForCompletionAsync( + IProgress? progress = null, + CancellationToken cancellationToken = default) + { + while (!IsCompleted) + { + // Veo docs recommend ~10 second polling for video generation + await Task.Delay(TimeSpan.FromSeconds(10), cancellationToken); + await UpdateAsync(cancellationToken); + progress?.Report(new VideoGenerationProgress(_status, PercentComplete)); + } + + if (_status == "FAILED") + { + throw new InvalidOperationException($"Video generation failed: {_failureReason}"); + } + } + + public override async Task> GetContentsAsync( + VideoGenerationOptions? options = null, + CancellationToken cancellationToken = default) + { + if (!IsCompleted || _status == "FAILED") + { + throw new InvalidOperationException("The operation has not completed successfully."); + } + + if (_videoUris.Count == 0) + { + await UpdateAsync(cancellationToken); + } + + if (_videoUris.Count == 0) + { + throw new InvalidOperationException("No video URIs available after completion."); + } + + var results = new List(); + foreach (string videoUri in _videoUris) + { + if (options?.ResponseFormat == VideoGenerationResponseFormat.Uri) + { + results.Add(new UriContent(new Uri(videoUri), "video/mp4")); + } + else + { + using var response = await _httpClient.GetAsync(videoUri, cancellationToken); + response.EnsureSuccessStatusCode(); + byte[] data = await response.Content.ReadAsByteArrayAsync(cancellationToken); + results.Add(new DataContent(data, "video/mp4")); + } + } + + return results; + } +} diff --git a/samples/VideoProviders/GoogleVeo/GoogleVeoVideoGenerator.cs b/samples/VideoProviders/GoogleVeo/GoogleVeoVideoGenerator.cs new file mode 100644 index 00000000000..af8b0b09be1 --- /dev/null +++ b/samples/VideoProviders/GoogleVeo/GoogleVeoVideoGenerator.cs @@ -0,0 +1,238 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System.Drawing; +using System.Net.Http.Headers; +using System.Text.Json; +using System.Text.Json.Nodes; +using Microsoft.Extensions.AI; + +namespace GoogleVeo; + +/// +/// Implements for Google Veo via the Gemini API. +/// Supports text-to-video, image-to-video, reference images, and video extension. +/// +/// +/// API Reference: https://ai.google.dev/gemini-api/docs/video +/// Endpoint: POST https://generativelanguage.googleapis.com/v1beta/models/{model}:generateVideos +/// Polling: GET https://generativelanguage.googleapis.com/v1beta/{operation.name} +/// +internal sealed class GoogleVeoVideoGenerator : IVideoGenerator +{ + private const string BaseUrl = "https://generativelanguage.googleapis.com/v1beta"; + private readonly HttpClient _httpClient; + private readonly string _apiKey; + private readonly string _modelId; + + public GoogleVeoVideoGenerator(string apiKey, string modelId = "veo-3.1-generate-preview", HttpClient? httpClient = null) + { + _apiKey = apiKey; + _modelId = modelId; + _httpClient = httpClient ?? new HttpClient(); + _httpClient.DefaultRequestHeaders.Accept.Add(new MediaTypeWithQualityHeaderValue("application/json")); + } + + public async Task GenerateAsync( + VideoGenerationRequest request, + VideoGenerationOptions? options = null, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(request); + + string model = options?.ModelId ?? _modelId; + var body = new JsonObject(); + + // Text prompt (required for most operations) + if (request.Prompt is not null) + { + body["prompt"] = request.Prompt; + } + + // Image for image-to-video + if (request.OperationKind == VideoOperationKind.Create && request.OriginalMedia is not null) + { + var image = GetFirstImageContent(request.OriginalMedia); + if (image is not null) + { + body["image"] = image; + } + } + + // Reference images (provider-specific via AdditionalProperties) + if (options?.AdditionalProperties?.TryGetValue("referenceImages", out object? refImgs) == true && refImgs is JsonArray refArray) + { + body["referenceImages"] = JsonNode.Parse(refArray.ToJsonString())!; + } + + // Last frame for first+last frame interpolation + if (options?.AdditionalProperties?.TryGetValue("lastFrameImage", out object? lastFrame) == true) + { + body["lastFrame"] = new JsonObject + { + ["image"] = BuildImageNode(lastFrame), + }; + } + + // Generation config + var config = new JsonObject(); + + if (options?.AdditionalProperties?.TryGetValue("personGeneration", out object? personGen) == true && personGen is string personGenStr) + { + config["personGeneration"] = personGenStr; + } + + if (options?.Duration is { } duration) + { + config["durationSeconds"] = ((int)duration.TotalSeconds).ToString(); + } + + if (options?.VideoSize is { } size) + { + config["resolution"] = MapResolution(size); + } + + if (options?.AspectRatio is { } aspectRatio) + { + config["aspectRatio"] = aspectRatio; + } + else if (options?.AdditionalProperties?.TryGetValue("aspectRatio", out object? ar) == true && ar is string arStr) + { + config["aspectRatio"] = arStr; + } + + if (options?.AdditionalProperties?.TryGetValue("numberOfVideos", out object? numVids) == true && numVids is int numVidsInt) + { + config["numberOfVideos"] = numVidsInt; + } + else if (options?.Count is { } count) + { + config["numberOfVideos"] = count; + } + + // Negative prompt — prefer first-class property on request, fall back to AdditionalProperties + string? negativePrompt = request.NegativePrompt; + if (negativePrompt is null && options?.AdditionalProperties?.TryGetValue("negativePrompt", out object? negPrompt) == true && negPrompt is string negPromptStr) + { + negativePrompt = negPromptStr; + } + + if (negativePrompt is not null) + { + config["negativePrompt"] = negativePrompt; + } + + if (options?.GenerateAudio is bool genAudio) + { + config["generateAudio"] = genAudio; + } + else if (options?.AdditionalProperties?.TryGetValue("generateAudio", out object? genAudioObj) == true && genAudioObj is bool genAudioBool) + { + config["generateAudio"] = genAudioBool; + } + + if (options?.Seed is int seed) + { + config["seed"] = seed; + } + else if (options?.AdditionalProperties?.TryGetValue("seed", out object? seedObj) == true && seedObj is int seedInt) + { + config["seed"] = seedInt; + } + + if (config.Count > 0) + { + body["generationConfig"] = config; + } + + // Video extension uses a different field structure + if (request.OperationKind == VideoOperationKind.Extend && request.SourceVideoId is not null) + { + // For extend, the sourceVideoId should be a video file URI or inline data + // The Veo API uses the image field for the last frame of the source video + // This is a simplification - real extension requires the Gemini Files API + body["extensionSourceVideoId"] = request.SourceVideoId; + } + + string url = $"{BaseUrl}/models/{model}:generateVideos?key={_apiKey}"; + string json = body.ToJsonString(); + using var content = new StringContent(json, System.Text.Encoding.UTF8, "application/json"); + using var response = await _httpClient.PostAsync(url, content, cancellationToken); + + string responseBody = await response.Content.ReadAsStringAsync(cancellationToken); + response.EnsureSuccessStatusCode(); + + var result = JsonDocument.Parse(responseBody); + string operationName = result.RootElement.GetProperty("name").GetString()!; + + return new GoogleVeoVideoGenerationOperation(operationName, _apiKey, _httpClient, model); + } + + public object? GetService(Type serviceType, object? serviceKey = null) + { + if (serviceKey is null && serviceType.IsInstanceOfType(this)) + { + return this; + } + + return null; + } + + public void Dispose() => _httpClient.Dispose(); + + private static JsonNode? GetFirstImageContent(IEnumerable media) + { + foreach (var item in media) + { + if (item is DataContent dc && (dc.MediaType?.StartsWith("image/", StringComparison.OrdinalIgnoreCase) ?? false) && dc.Data.Length > 0) + { + return new JsonObject + { + ["imageBytes"] = Convert.ToBase64String(dc.Data.ToArray()), + ["mimeType"] = dc.MediaType, + }; + } + + if (item is UriContent uc && uc.Uri is not null) + { + return new JsonObject + { + ["imageUri"] = uc.Uri.ToString(), + }; + } + } + + return null; + } + + private static JsonNode BuildImageNode(object imageData) + { + if (imageData is string path && File.Exists(path)) + { + byte[] bytes = File.ReadAllBytes(path); + return new JsonObject + { + ["imageBytes"] = Convert.ToBase64String(bytes), + ["mimeType"] = "image/png", + }; + } + + if (imageData is JsonNode node) + { + return node; + } + + return new JsonObject { ["imageUri"] = imageData.ToString() }; + } + + private static string MapResolution(Size size) + { + int maxDim = Math.Max(size.Width, size.Height); + return maxDim switch + { + <= 720 => "720p", + <= 1080 => "1080p", + _ => "4k", + }; + } +} diff --git a/samples/VideoProviders/GoogleVeo/Program.cs b/samples/VideoProviders/GoogleVeo/Program.cs new file mode 100644 index 00000000000..ed86ffd892b --- /dev/null +++ b/samples/VideoProviders/GoogleVeo/Program.cs @@ -0,0 +1,226 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +// Google Veo (Gemini API) Video Generation Sample +// +// Usage examples: +// dotnet run -- generate "A cinematic drone shot of a coastline at sunset" +// dotnet run -- generate "A character walks through" --image reference.jpg +// dotnet run -- generate "Smooth transition" --image first.jpg --last-frame last.jpg +// +// Environment: +// GOOGLE_API_KEY — your Gemini API key from https://aistudio.google.com/apikey + +using System.CommandLine; +using System.Text.Json.Nodes; +using GoogleVeo; +using Microsoft.Extensions.AI; + +var modelOption = new Option("--model", () => "veo-3.1-generate-preview", + "Model: veo-3.1-generate-preview, veo-3.1-fast-preview, veo-3, veo-2."); +var outputOption = new Option("--output", "Output file path (.mp4)."); +var durationOption = new Option("--duration", "Duration in seconds (4, 6, or 8)."); +var resolutionOption = new Option("--resolution", () => "720p", "Resolution: 720p, 1080p, 4k."); +var aspectRatioOption = new Option("--aspect-ratio", "Aspect ratio (e.g. 16:9, 9:16)."); +var formatOption = new Option("--format", () => "data", "Response format: data or uri."); +var countOption = new Option("--count", () => 1, "Number of videos to generate."); +var negativePromptOption = new Option("--negative-prompt", "What to avoid in the video."); +var audioOption = new Option("--audio", () => false, "Generate audio (Veo 3+ only)."); +var seedOption = new Option("--seed", "Seed for reproducibility."); +var personGenOption = new Option("--person-generation", "Person generation policy: dont_allow, allow_adult."); + +// ── generate ──────────────────────────────────────────────────────────────── +var promptArg = new Argument("prompt", "Text prompt."); +var imageOption = new Option("--image", "Input image for image-to-video."); +var lastFrameOption = new Option("--last-frame", "Last frame image for interpolation."); +var refImagesOption = new Option("--ref-image", "Reference image(s) for style guidance (up to 3).") { AllowMultipleArgumentsPerToken = true }; +var refTypeOption = new Option("--ref-type", () => "style", "Reference type: style or subject."); + +var generateCommand = new Command("generate", "Generate video from text, image-to-video, or frame interpolation.") +{ + promptArg, modelOption, outputOption, imageOption, lastFrameOption, + refImagesOption, refTypeOption, + durationOption, resolutionOption, aspectRatioOption, countOption, + negativePromptOption, audioOption, seedOption, personGenOption, formatOption, +}; + +generateCommand.SetHandler(async (context) => +{ + string prompt = context.ParseResult.GetValueForArgument(promptArg); + string model = context.ParseResult.GetValueForOption(modelOption)!; + string? outputPath = context.ParseResult.GetValueForOption(outputOption); + string? imagePath = context.ParseResult.GetValueForOption(imageOption); + string? lastFrame = context.ParseResult.GetValueForOption(lastFrameOption); + string[]? refImages = context.ParseResult.GetValueForOption(refImagesOption); + string refType = context.ParseResult.GetValueForOption(refTypeOption)!; + int? duration = context.ParseResult.GetValueForOption(durationOption); + string resolution = context.ParseResult.GetValueForOption(resolutionOption)!; + string? aspectRatio = context.ParseResult.GetValueForOption(aspectRatioOption); + int count = context.ParseResult.GetValueForOption(countOption); + string? negativePrompt = context.ParseResult.GetValueForOption(negativePromptOption); + bool audio = context.ParseResult.GetValueForOption(audioOption); + int? seed = context.ParseResult.GetValueForOption(seedOption); + string? personGen = context.ParseResult.GetValueForOption(personGenOption); + string format = context.ParseResult.GetValueForOption(formatOption)!; + + using var generator = CreateGenerator(model); + + List? media = null; + if (imagePath is not null) + { + media = [await DataContent.LoadFromAsync(imagePath)]; + } + + var options = new VideoGenerationOptions + { + ModelId = model, + Count = count, + ResponseFormat = string.Equals(format, "uri", StringComparison.OrdinalIgnoreCase) + ? VideoGenerationResponseFormat.Uri + : VideoGenerationResponseFormat.Data, + AdditionalProperties = [], + }; + + if (duration.HasValue) + { + options.Duration = TimeSpan.FromSeconds(duration.Value); + } + + options.VideoSize = resolution switch + { + "1080p" => new System.Drawing.Size(1920, 1080), + "4k" => new System.Drawing.Size(3840, 2160), + _ => new System.Drawing.Size(1280, 720), + }; + + if (aspectRatio is not null) + { + options.AspectRatio = aspectRatio; + } + + if (audio) + { + options.GenerateAudio = true; + } + + if (seed.HasValue) + { + options.Seed = seed.Value; + } + + if (personGen is not null) + { + options.AdditionalProperties["personGeneration"] = personGen; + } + + // Last frame for interpolation + if (lastFrame is not null) + { + byte[] lastFrameBytes = await File.ReadAllBytesAsync(lastFrame); + var lastFrameNode = new JsonObject + { + ["imageBytes"] = Convert.ToBase64String(lastFrameBytes), + ["mimeType"] = "image/png", + }; + options.AdditionalProperties["lastFrameImage"] = lastFrameNode; + } + + // Reference images + if (refImages is { Length: > 0 }) + { + var refs = new JsonArray(); + foreach (string refImg in refImages) + { + byte[] refBytes = await File.ReadAllBytesAsync(refImg); + refs.Add(new JsonObject + { + ["referenceImage"] = new JsonObject + { + ["imageBytes"] = Convert.ToBase64String(refBytes), + ["mimeType"] = "image/png", + }, + ["referenceType"] = refType.ToUpperInvariant(), + }); + } + + options.AdditionalProperties["referenceImages"] = refs; + } + + var request = new VideoGenerationRequest(prompt, media); + + if (negativePrompt is not null) + { + request.NegativePrompt = negativePrompt; + } + + var operation = await generator.GenerateAsync(request, options); + await CompleteAndSaveAsync(operation, options, outputPath); +}); + +// ── Root ──────────────────────────────────────────────────────────────────── +var rootCommand = new RootCommand("Google Veo (Gemini API) video generation sample using MEAI IVideoGenerator.") +{ + generateCommand, +}; + +return await rootCommand.InvokeAsync(args); + +// ═══════════════════════════════════════════════════════════════════════════ +// Helpers +// ═══════════════════════════════════════════════════════════════════════════ +static IVideoGenerator CreateGenerator(string model) +{ + string? apiKey = Environment.GetEnvironmentVariable("GOOGLE_API_KEY"); + if (string.IsNullOrEmpty(apiKey)) + { + Console.Error.WriteLine("Error: Set the GOOGLE_API_KEY environment variable."); + Console.Error.WriteLine("Get a key at https://aistudio.google.com/apikey"); + Environment.Exit(1); + } + + return new GoogleVeoVideoGenerator(apiKey, model); +} + +static async Task CompleteAndSaveAsync(VideoGenerationOperation operation, VideoGenerationOptions options, string? outputPath) +{ + Console.WriteLine($"OPERATION: {operation.OperationId}"); + Console.WriteLine($" Status: {operation.Status}"); + + var sw = System.Diagnostics.Stopwatch.StartNew(); + await operation.WaitForCompletionAsync( + new Progress(p => + Console.WriteLine($" Progress: {p.Status}{(p.PercentComplete.HasValue ? $" ({p.PercentComplete}%)" : "")}"))); + + sw.Stop(); + Console.WriteLine($" Completed in {sw.Elapsed.TotalSeconds:F1}s"); + + var contents = await operation.GetContentsAsync(options); + Console.WriteLine($" {contents.Count} content item(s)"); + + for (int i = 0; i < contents.Count; i++) + { + string savePath = outputPath is not null && contents.Count > 1 + ? Path.Combine( + Path.GetDirectoryName(outputPath) ?? ".", + $"{Path.GetFileNameWithoutExtension(outputPath)}_{i}{Path.GetExtension(outputPath)}") + : outputPath!; + + switch (contents[i]) + { + case DataContent dc when outputPath is not null: + Directory.CreateDirectory(Path.GetDirectoryName(savePath) ?? "."); + await dc.SaveToAsync(savePath); + Console.WriteLine($" [{i}] Saved: {savePath} ({dc.Data.Length} bytes)"); + break; + case DataContent dc: + Console.WriteLine($" [{i}] DataContent: {dc.Data.Length} bytes ({dc.MediaType})"); + break; + case UriContent uc: + Console.WriteLine($" [{i}] URI: {uc.Uri}"); + break; + default: + Console.WriteLine($" [{i}] {contents[i].GetType().Name}"); + break; + } + } +} diff --git a/samples/VideoProviders/GoogleVeo/README.md b/samples/VideoProviders/GoogleVeo/README.md new file mode 100644 index 00000000000..32229f4f6a1 --- /dev/null +++ b/samples/VideoProviders/GoogleVeo/README.md @@ -0,0 +1,79 @@ +# Google Veo (Gemini API) Video Generation Sample + +This sample demonstrates using the **Microsoft.Extensions.AI** `IVideoGenerator` abstraction with Google's Veo models via the Gemini API. + +## Getting Access + +1. Go to [https://aistudio.google.com/apikey](https://aistudio.google.com/apikey) +2. Create a Gemini API key +3. Veo models may require specific Google Cloud billing or allowlist access +4. See [Google Veo docs](https://ai.google.dev/gemini-api/docs/video) for feature availability + +## Environment Setup + +```bash +export GOOGLE_API_KEY="AIza..." +``` + +## Models + +| Model | ID | Features | +|---|---|---| +| Veo 3.1 | `veo-3.1-generate-preview` | Text/image-to-video, extension, refer images, interpolation, 720p-4k, audio | +| Veo 3.1 Fast | `veo-3.1-fast-preview` | Same features, faster generation, lower quality | +| Veo 3 | `veo-3` | Text-to-video with native audio, 720p-1080p | +| Veo 2 | `veo-2` | Text/image-to-video, 720p-4k | + +## Supported Operations + +| Operation | MEAI Mapping | Notes | +|---|---|---| +| Text-to-video | `VideoOperationKind.Create`, no `OriginalMedia` | Prompt-only generation | +| Image-to-video | `VideoOperationKind.Create` + `OriginalMedia` (image) | Image as starting reference | +| First+last frame interpolation | `OriginalMedia` + `AdditionalProperties["lastFrameImage"]` | Generate video between two frames | +| Reference images (up to 3) | `AdditionalProperties["referenceImages"]` | Style/subject transfer with `reference_type` | +| Video extension | `VideoOperationKind.Extend` | Extend up to 20 times (7s each, 720p only) | +| Multiple outputs | `VideoGenerationOptions.Count` | Generate 1-4 videos from one request | + +## Usage + +```bash +# Text-to-video +dotnet run -- generate "A cinematic drone shot of a coastline at sunset" --output sunset.mp4 + +# Image-to-video +dotnet run -- generate "The scene comes alive" --image photo.jpg --output scene.mp4 + +# First+last frame interpolation +dotnet run -- generate "Smooth transition between frames" --image first.jpg --last-frame last.jpg --output interp.mp4 + +# Reference images for style +dotnet run -- generate "A character walking" --ref-image style1.png --ref-image style2.png --ref-type style + +# With audio (Veo 3+) +dotnet run -- generate "A thunderstorm over a city" --model veo-3 --audio --output storm.mp4 + +# High resolution, specific duration +dotnet run -- generate "A serene lake" --resolution 4k --duration 8 --output lake.mp4 + +# With negative prompt +dotnet run -- generate "A person walking" --negative-prompt "blurry, distorted" --person-generation allow_adult + +# Multiple outputs +dotnet run -- generate "A sunset" --count 4 --output sunset.mp4 +``` + +## API Gaps / Limitations + +- **Reference images with typed purpose**: Veo supports `referenceImages` with `referenceType` ("REFERENCE_TYPE_STYLE" or "REFERENCE_TYPE_SUBJECT"), allowing up to 3 images for style/subject transfer. MEAI's `OriginalMedia` doesn't distinguish between "input image for image-to-video" and "reference image for style transfer". +- **First/last frame interpolation**: Veo generates a video between two keyframe images. MEAI has no concept of a "last frame" — this requires `AdditionalProperties`. +- **Native audio generation**: Veo 3+ can generate synchronized audio with video. MEAI has no audio-related option. +- **Negative prompts**: Veo supports `negativePrompt` to exclude unwanted elements. Not part of the core MEAI options. +- **Resolution as named tier**: Veo uses `"720p"`, `"1080p"`, `"4k"` — not pixel dimensions. The `VideoSize` abstraction works but the mapping is lossy. +- **Aspect ratio as string**: Veo uses `"16:9"`, `"9:16"` etc. `VideoSize` can encode this but it's different from the ratio concept each provider uses. +- **Duration as string**: Veo requires `durationSeconds` as a string (`"4"`, `"6"`, `"8"`). The `TimeSpan Duration` maps fine but the valid values are model-specific. +- **Person generation policy**: Veo has `personGeneration` (`"dont_allow"`, `"allow_adult"`) — a safety control with no MEAI equivalent. +- **Seed**: Reproducibility parameter not part of core MEAI options. +- **Video extension**: Extension works by passing frames from previous videos. The Veo API requires using the Gemini Files API to upload the source video first, making `SourceVideoId` insufficient as a simple string ID. +- **Operation polling model**: Veo returns a Gemini LRO (Long Running Operation) with `operations.get()`. The `VideoGenerationOperation.UpdateAsync()` pattern maps well to this. +- **Multiple videos**: Veo can generate 1-4 videos per request via `numberOfVideos`. MEAI's `Count` option maps to this, but `GetContentsAsync` returns them all in one list. diff --git a/samples/VideoProviders/LumaAI/LumaAI.csproj b/samples/VideoProviders/LumaAI/LumaAI.csproj new file mode 100644 index 00000000000..552fe62669e --- /dev/null +++ b/samples/VideoProviders/LumaAI/LumaAI.csproj @@ -0,0 +1,16 @@ + + + + Exe + net10.0 + enable + enable + $(NoWarn);MEAI001 + + + + + + + + diff --git a/samples/VideoProviders/LumaAI/LumaVideoGenerationOperation.cs b/samples/VideoProviders/LumaAI/LumaVideoGenerationOperation.cs new file mode 100644 index 00000000000..5c473aff1c3 --- /dev/null +++ b/samples/VideoProviders/LumaAI/LumaVideoGenerationOperation.cs @@ -0,0 +1,127 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System.Text.Json; +using Microsoft.Extensions.AI; + +namespace LumaAI; + +/// +/// Tracks an in-flight Luma AI video generation, polling GET /v1/generations/{id} for status. +/// +internal sealed class LumaVideoGenerationOperation : VideoGenerationOperation +{ + private const string BaseUrl = "https://api.lumalabs.ai/dream-machine/v1"; + + private readonly HttpClient _httpClient; + private string? _status; + private string? _failureReason; + private string? _videoUrl; + + public LumaVideoGenerationOperation(string operationId, HttpClient httpClient, string modelId) + { + OperationId = operationId; + ModelId = modelId; + _httpClient = httpClient; + _status = "queued"; + } + + public override string? OperationId { get; } + + public override string? Status => _status; + + public override int? PercentComplete => _status switch + { + "completed" => 100, + "failed" => null, + "dreaming" => 50, // Luma uses "dreaming" for in-progress + _ => 0, + }; + + public override bool IsCompleted => _status is "completed" or "failed"; + + public override string? FailureReason => _failureReason; + + public override async Task UpdateAsync(CancellationToken cancellationToken = default) + { + using var response = await _httpClient.GetAsync($"{BaseUrl}/generations/{OperationId}", cancellationToken); + string body = await response.Content.ReadAsStringAsync(cancellationToken); + response.EnsureSuccessStatusCode(); + + using var doc = JsonDocument.Parse(body); + var root = doc.RootElement; + + _status = root.GetProperty("state").GetString(); + if (root.TryGetProperty("failure_reason", out var fr) && fr.ValueKind == JsonValueKind.String) + { + _failureReason = fr.GetString(); + } + + if (root.TryGetProperty("assets", out var assets) && + assets.TryGetProperty("video", out var video) && + video.ValueKind == JsonValueKind.String) + { + _videoUrl = video.GetString(); + } + } + + public override async Task WaitForCompletionAsync( + IProgress? progress = null, + CancellationToken cancellationToken = default) + { + while (!IsCompleted) + { + await Task.Delay(TimeSpan.FromSeconds(5), cancellationToken); + await UpdateAsync(cancellationToken); + progress?.Report(new VideoGenerationProgress(_status, PercentComplete)); + } + + if (_status == "failed") + { + throw new InvalidOperationException($"Video generation failed: {_failureReason}"); + } + } + + public override async Task> GetContentsAsync( + VideoGenerationOptions? options = null, + CancellationToken cancellationToken = default) + { + if (!IsCompleted || _status == "failed") + { + throw new InvalidOperationException("The operation has not completed successfully."); + } + + if (_videoUrl is null) + { + // Re-fetch to get the video URL + await UpdateAsync(cancellationToken); + } + + if (_videoUrl is null) + { + throw new InvalidOperationException("No video URL available after completion."); + } + + if (options?.ResponseFormat == VideoGenerationResponseFormat.Uri) + { + return [new UriContent(new Uri(_videoUrl), "video/mp4")]; + } + + // Download the video data + using var response = await _httpClient.GetAsync(_videoUrl, cancellationToken); + response.EnsureSuccessStatusCode(); + byte[] data = await response.Content.ReadAsByteArrayAsync(cancellationToken); + return [new DataContent(data, "video/mp4")]; + } + + public override VideoGenerationRequest CreateExtensionRequest(string? prompt = null) + { + // Luma extend uses keyframes with type=generation, id= + return new VideoGenerationRequest + { + Prompt = prompt, + SourceVideoId = OperationId, + OperationKind = VideoOperationKind.Extend, + }; + } +} diff --git a/samples/VideoProviders/LumaAI/LumaVideoGenerator.cs b/samples/VideoProviders/LumaAI/LumaVideoGenerator.cs new file mode 100644 index 00000000000..380b34ff5e1 --- /dev/null +++ b/samples/VideoProviders/LumaAI/LumaVideoGenerator.cs @@ -0,0 +1,211 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System.Drawing; +using System.Net.Http.Headers; +using System.Text.Json; +using System.Text.Json.Nodes; +using Microsoft.Extensions.AI; + +namespace LumaAI; + +/// +/// Implements for the Luma AI Dream Machine API. +/// Supports text-to-video, image-to-video, extend, and keyframe interpolation. +/// +/// +/// API Reference: https://docs.lumalabs.ai/docs/video-generation +/// Endpoint: https://api.lumalabs.ai/dream-machine/v1/generations +/// +internal sealed class LumaVideoGenerator : IVideoGenerator +{ + private const string BaseUrl = "https://api.lumalabs.ai/dream-machine/v1"; + private readonly HttpClient _httpClient; + private readonly string _modelId; + + public LumaVideoGenerator(string apiKey, string modelId = "ray-2", HttpClient? httpClient = null) + { + _modelId = modelId; + _httpClient = httpClient ?? new HttpClient(); + _httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Bearer", apiKey); + _httpClient.DefaultRequestHeaders.Accept.Add(new MediaTypeWithQualityHeaderValue("application/json")); + } + + public async Task GenerateAsync( + VideoGenerationRequest request, + VideoGenerationOptions? options = null, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(request); + + string model = options?.ModelId ?? _modelId; + var body = new JsonObject { ["prompt"] = request.Prompt, ["model"] = model }; + + // Duration + if (options?.Duration is { } duration) + { + body["duration"] = $"{(int)duration.TotalSeconds}s"; + } + + // Resolution + if (options?.VideoSize is { } size) + { + body["resolution"] = MapResolution(size); + } + + // Aspect ratio — prefer first-class property, fall back to AdditionalProperties + string? aspectRatio = options?.AspectRatio; + if (aspectRatio is null && options?.AdditionalProperties?.TryGetValue("aspect_ratio", out object? ar) == true && ar is string arStr) + { + aspectRatio = arStr; + } + + if (aspectRatio is not null) + { + body["aspect_ratio"] = aspectRatio; + } + + // Loop + if (options?.AdditionalProperties?.TryGetValue("loop", out object? loop) == true && loop is bool loopBool) + { + body["loop"] = loopBool; + } + + // Concepts + if (options?.AdditionalProperties?.TryGetValue("concepts", out object? concepts) == true && concepts is JsonArray conceptsArray) + { + body["concepts"] = JsonNode.Parse(conceptsArray.ToJsonString())!; + } + + // Callback URL + if (options?.AdditionalProperties?.TryGetValue("callback_url", out object? cbUrl) == true && cbUrl is string cbUrlStr) + { + body["callback_url"] = cbUrlStr; + } + + // Build keyframes based on operation kind + var keyframes = new JsonObject(); + + switch (request.OperationKind) + { + case VideoOperationKind.Create: + // Image-to-video: use original media as first frame (frame0) + if (request.OriginalMedia is not null) + { + await AddImageKeyframesAsync(keyframes, request.OriginalMedia, options); + } + + break; + + case VideoOperationKind.Extend: + // Extend: use SourceVideoId as frame0 generation reference + if (request.SourceVideoId is not null) + { + keyframes["frame0"] = new JsonObject + { + ["type"] = "generation", + ["id"] = request.SourceVideoId, + }; + } + + break; + + case VideoOperationKind.Edit: + // Luma doesn't have a direct "edit" endpoint — map to video-to-video via keyframes + if (request.SourceVideoId is not null) + { + keyframes["frame0"] = new JsonObject + { + ["type"] = "generation", + ["id"] = request.SourceVideoId, + }; + } + + break; + } + + if (keyframes.Count > 0) + { + body["keyframes"] = keyframes; + } + + string json = body.ToJsonString(); + using var content = new StringContent(json, System.Text.Encoding.UTF8, "application/json"); + using var response = await _httpClient.PostAsync($"{BaseUrl}/generations", content, cancellationToken); + + string responseBody = await response.Content.ReadAsStringAsync(cancellationToken); + response.EnsureSuccessStatusCode(); + + var result = JsonDocument.Parse(responseBody); + string operationId = result.RootElement.GetProperty("id").GetString()!; + + return new LumaVideoGenerationOperation(operationId, _httpClient, model); + } + + public object? GetService(Type serviceType, object? serviceKey = null) + { + if (serviceKey is null && serviceType.IsInstanceOfType(this)) + { + return this; + } + + return null; + } + + public void Dispose() => _httpClient.Dispose(); + + private static async Task AddImageKeyframesAsync(JsonObject keyframes, IEnumerable media, VideoGenerationOptions? options) + { + int index = 0; + foreach (var item in media) + { + if (item is not DataContent dc) + { + continue; + } + + string frameKey = index == 0 ? "frame0" : "frame1"; + + if (item is UriContent uc && uc.Uri is not null) + { + // If it's a URL-based image, Luma requires HTTPS URLs + keyframes[frameKey] = new JsonObject + { + ["type"] = "image", + ["url"] = uc.Uri.ToString(), + }; + } + else if (dc.Data.Length > 0) + { + // Luma only accepts HTTPS URLs for images, not data URIs. + // (Limitation: callers must upload images to a CDN first.) + string dataUri = dc.Uri ?? $"data:{dc.MediaType ?? "image/png"};base64,{Convert.ToBase64String(dc.Data.ToArray())}"; + keyframes[frameKey] = new JsonObject + { + ["type"] = "image", + ["url"] = dataUri, + }; + } + + index++; + if (index >= 2) + { + break; // Luma supports max 2 keyframes (frame0 + frame1) + } + } + + await Task.CompletedTask; + } + + private static string MapResolution(Size size) + { + int maxDim = Math.Max(size.Width, size.Height); + return maxDim switch + { + <= 540 => "540p", + <= 720 => "720p", + <= 1080 => "1080p", + _ => "4k", + }; + } +} diff --git a/samples/VideoProviders/LumaAI/Program.cs b/samples/VideoProviders/LumaAI/Program.cs new file mode 100644 index 00000000000..255db72f6ad --- /dev/null +++ b/samples/VideoProviders/LumaAI/Program.cs @@ -0,0 +1,199 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +// Luma AI (Dream Machine) Video Generation Sample +// +// Usage examples: +// dotnet run -- generate "A tiger walking through snow" +// dotnet run -- generate "The scene continues" --image start-frame.jpg +// dotnet run -- extend "The tiger starts running" --video +// +// Environment: +// LUMA_API_KEY — your Luma API key from https://lumalabs.ai/dream-machine/api/keys + +using System.CommandLine; +using LumaAI; +using Microsoft.Extensions.AI; + +var modelOption = new Option("--model", () => "ray-2", "Model ID (ray-2 or ray-flash-2)."); +var outputOption = new Option("--output", "Output file path (.mp4)."); +var durationOption = new Option("--duration", "Duration string (e.g. '5s')."); +var resolutionOption = new Option("--resolution", () => "720p", "Resolution: 540p, 720p, 1080p, 4k."); +var aspectRatioOption = new Option("--aspect-ratio", "Aspect ratio (e.g. 16:9)."); +var loopOption = new Option("--loop", () => false, "Whether to loop the video."); +var formatOption = new Option("--format", () => "data", "Response format: data or uri."); + +// ── generate ──────────────────────────────────────────────────────────────── +var generatePromptArg = new Argument("prompt", "Text prompt."); +var imageOption = new Option("--image", "Input image file for image-to-video (HTTPS URL or local file)."); +var endImageOption = new Option("--end-image", "End frame image for keyframe interpolation."); + +var generateCommand = new Command("generate", "Generate a video from text or image.") +{ + generatePromptArg, modelOption, outputOption, imageOption, endImageOption, + durationOption, resolutionOption, aspectRatioOption, loopOption, formatOption, +}; + +generateCommand.SetHandler(async (context) => +{ + string prompt = context.ParseResult.GetValueForArgument(generatePromptArg); + string model = context.ParseResult.GetValueForOption(modelOption)!; + string? outputPath = context.ParseResult.GetValueForOption(outputOption); + string? imagePath = context.ParseResult.GetValueForOption(imageOption); + string? endImagePath = context.ParseResult.GetValueForOption(endImageOption); + string? duration = context.ParseResult.GetValueForOption(durationOption); + string resolution = context.ParseResult.GetValueForOption(resolutionOption)!; + string? aspectRatio = context.ParseResult.GetValueForOption(aspectRatioOption); + bool loop = context.ParseResult.GetValueForOption(loopOption); + string format = context.ParseResult.GetValueForOption(formatOption)!; + + using var generator = CreateGenerator(model); + + List? media = null; + if (imagePath is not null) + { + media = [await DataContent.LoadFromAsync(imagePath)]; + if (endImagePath is not null) + { + media.Add(await DataContent.LoadFromAsync(endImagePath)); + } + } + + var options = BuildOptions(duration, resolution, aspectRatio, loop, format); + var request = new VideoGenerationRequest(prompt, media); + var operation = await generator.GenerateAsync(request, options); + await CompleteAndSaveAsync(operation, options, outputPath); +}); + +// ── extend ────────────────────────────────────────────────────────────────── +var extendPromptArg = new Argument("prompt", "Prompt for the extension."); +var extendVideoOption = new Option("--video", "Generation ID to extend.") { IsRequired = true }; + +var extendCommand = new Command("extend", "Extend a completed video generation.") +{ + extendPromptArg, extendVideoOption, modelOption, outputOption, formatOption, +}; + +extendCommand.SetHandler(async (context) => +{ + string prompt = context.ParseResult.GetValueForArgument(extendPromptArg); + string videoId = context.ParseResult.GetValueForOption(extendVideoOption)!; + string model = context.ParseResult.GetValueForOption(modelOption)!; + string? outputPath = context.ParseResult.GetValueForOption(outputOption); + string format = context.ParseResult.GetValueForOption(formatOption)!; + + using var generator = CreateGenerator(model); + var options = BuildOptions(null, "720p", null, false, format); + var request = new VideoGenerationRequest(prompt) + { + OperationKind = VideoOperationKind.Extend, + SourceVideoId = videoId, + }; + + var operation = await generator.GenerateAsync(request, options); + await CompleteAndSaveAsync(operation, options, outputPath); +}); + +// ── Root ──────────────────────────────────────────────────────────────────── +var rootCommand = new RootCommand("Luma AI (Dream Machine) video generation sample using MEAI IVideoGenerator.") +{ + generateCommand, + extendCommand, +}; + +return await rootCommand.InvokeAsync(args); + +// ═══════════════════════════════════════════════════════════════════════════ +// Helpers +// ═══════════════════════════════════════════════════════════════════════════ +static IVideoGenerator CreateGenerator(string model) +{ + string? apiKey = Environment.GetEnvironmentVariable("LUMA_API_KEY"); + if (string.IsNullOrEmpty(apiKey)) + { + Console.Error.WriteLine("Error: Set the LUMA_API_KEY environment variable."); + Console.Error.WriteLine("Get a key at https://lumalabs.ai/dream-machine/api/keys"); + Environment.Exit(1); + } + + return new LumaVideoGenerator(apiKey, model); +} + +static VideoGenerationOptions BuildOptions(string? duration, string resolution, string? aspectRatio, bool loop, string format) +{ + var options = new VideoGenerationOptions + { + ResponseFormat = string.Equals(format, "uri", StringComparison.OrdinalIgnoreCase) + ? VideoGenerationResponseFormat.Uri + : VideoGenerationResponseFormat.Data, + AdditionalProperties = [], + }; + + if (duration is not null) + { + // Try to parse "5s" → 5 seconds + if (int.TryParse(duration.TrimEnd('s'), out int secs)) + { + options.Duration = TimeSpan.FromSeconds(secs); + } + } + + if (aspectRatio is not null) + { + options.AspectRatio = aspectRatio; + } + + if (loop) + { + options.AdditionalProperties["loop"] = true; + } + + // Map resolution string to VideoSize for the abstraction + options.VideoSize = resolution switch + { + "540p" => new System.Drawing.Size(960, 540), + "1080p" => new System.Drawing.Size(1920, 1080), + "4k" => new System.Drawing.Size(3840, 2160), + _ => new System.Drawing.Size(1280, 720), + }; + + return options; +} + +static async Task CompleteAndSaveAsync(VideoGenerationOperation operation, VideoGenerationOptions options, string? outputPath) +{ + Console.WriteLine($"OPERATION_ID: {operation.OperationId}"); + Console.WriteLine($" Status: {operation.Status}"); + + var sw = System.Diagnostics.Stopwatch.StartNew(); + await operation.WaitForCompletionAsync( + new Progress(p => + Console.WriteLine($" Progress: {p.Status}{(p.PercentComplete.HasValue ? $" ({p.PercentComplete}%)" : "")}"))); + + sw.Stop(); + Console.WriteLine($" Completed in {sw.Elapsed.TotalSeconds:F1}s"); + + var contents = await operation.GetContentsAsync(options); + Console.WriteLine($" {contents.Count} content item(s)"); + + for (int i = 0; i < contents.Count; i++) + { + switch (contents[i]) + { + case DataContent dc when outputPath is not null: + Directory.CreateDirectory(Path.GetDirectoryName(outputPath) ?? "."); + await dc.SaveToAsync(outputPath); + Console.WriteLine($" [{i}] Saved: {outputPath} ({dc.Data.Length} bytes)"); + break; + case DataContent dc: + Console.WriteLine($" [{i}] DataContent: {dc.Data.Length} bytes ({dc.MediaType})"); + break; + case UriContent uc: + Console.WriteLine($" [{i}] URI: {uc.Uri}"); + break; + default: + Console.WriteLine($" [{i}] {contents[i].GetType().Name}"); + break; + } + } +} diff --git a/samples/VideoProviders/LumaAI/README.md b/samples/VideoProviders/LumaAI/README.md new file mode 100644 index 00000000000..aefb3314aa4 --- /dev/null +++ b/samples/VideoProviders/LumaAI/README.md @@ -0,0 +1,64 @@ +# Luma AI (Dream Machine) Video Generation Sample + +This sample demonstrates using the **Microsoft.Extensions.AI** `IVideoGenerator` abstraction with Luma AI's Dream Machine API (Ray 2 models). + +## Getting Access + +1. Go to [https://lumalabs.ai/dream-machine/api/keys](https://lumalabs.ai/dream-machine/api/keys) +2. Sign in or create a Luma account +3. Create an API key +4. Check billing at [https://lumalabs.ai/dream-machine/api/billing/overview](https://lumalabs.ai/dream-machine/api/billing/overview) + +## Environment Setup + +```bash +export LUMA_API_KEY="luma-xxxx" +``` + +## Models + +| Model | ID | Notes | +|---|---|---| +| Ray 2 | `ray-2` | Full quality, supports 540p–4k | +| Ray 2 Flash | `ray-flash-2` | Faster, lower cost | + +## Supported Operations + +| Operation | MEAI Mapping | Notes | +|---|---|---| +| Text-to-video | `VideoOperationKind.Create`, no `OriginalMedia` | Basic prompt → video | +| Image-to-video (start frame) | `VideoOperationKind.Create` + `OriginalMedia` (1 image) | Image as first frame (`keyframes.frame0`) | +| Image-to-video (start+end frames) | `VideoOperationKind.Create` + `OriginalMedia` (2 images) | Two images as keyframes (`frame0`+`frame1`) for interpolation | +| Extend video | `VideoOperationKind.Extend` + `SourceVideoId` | Extend using the generation ID of a completed video | +| Reverse extend | `AdditionalProperties` | Extend backwards — requires provider-specific keyframe manipulation | +| Video interpolation | `AdditionalProperties` | Interpolate between two generation IDs | + +## Usage + +```bash +# Text-to-video +dotnet run -- generate "A tiger walking through snow" --output tiger.mp4 + +# Image-to-video with start frame +dotnet run -- generate "The scene comes alive" --image start.jpg --output scene.mp4 + +# Start + end frame interpolation +dotnet run -- generate "Smooth transition" --image start.jpg --end-image end.jpg + +# Extend a completed video +dotnet run -- extend "The tiger starts running" --video --output extended.mp4 + +# With options +dotnet run -- generate "A neon cityscape" --model ray-2 --resolution 1080p --aspect-ratio 16:9 --loop --duration 5s +``` + +## API Gaps / Limitations + +- **Image URLs only**: Luma requires HTTPS URLs for `promptImage`, not data URIs. The sample sends data URIs but the API may reject them — callers may need to pre-upload to a CDN. +- **No direct edit**: There is no video editing endpoint; `VideoOperationKind.Edit` is mapped to keyframe continuation which is not true editing. +- **Reverse extend**: Requires setting `SourceVideoId` as `frame1` (not `frame0`). This requires provider-specific handling not captured by the current abstraction. +- **Concepts/camera motion**: Luma supports "concepts" (e.g., `dolly_zoom`) and camera motion keywords in prompts. These are prompt-level, no dedicated API field. +- **Callback URL**: Luma supports `callback_url` for push-based status updates — not part of the MEAI polling model. +- **Modify Video**: Luma has a separate `/modify-video` endpoint for video editing (not modeled here). +- **Reframe**: Luma supports video/image reframing to different aspect ratios — a unique feature. +- **Add Audio**: Luma has a separate endpoint to add audio to a completed generation. diff --git a/samples/VideoProviders/MultiProviderPOC/MultiProviderPOC.csproj b/samples/VideoProviders/MultiProviderPOC/MultiProviderPOC.csproj new file mode 100644 index 00000000000..d5d23595d8e --- /dev/null +++ b/samples/VideoProviders/MultiProviderPOC/MultiProviderPOC.csproj @@ -0,0 +1,18 @@ + + + + Exe + net10.0 + enable + enable + $(NoWarn);MEAI001;OPENAI001 + + + + + + + + + + diff --git a/samples/VideoProviders/MultiProviderPOC/Program.cs b/samples/VideoProviders/MultiProviderPOC/Program.cs new file mode 100644 index 00000000000..8614a9b169d --- /dev/null +++ b/samples/VideoProviders/MultiProviderPOC/Program.cs @@ -0,0 +1,758 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +// Multi-Provider Video Generation POC +// +// Demonstrates using IVideoGenerator with multiple providers through a unified CLI. +// Supports: OpenAI (Sora), Google Veo, Runway, and Luma AI. +// +// Usage: +// dotnet run -- generate --provider openai "A cat playing piano" +// dotnet run -- generate --provider veo "A cinematic drone shot" --audio --aspect-ratio 16:9 +// dotnet run -- generate --provider runway "A bunny hopping" --seed 42 +// dotnet run -- generate --provider luma "A tiger in snow" --aspect-ratio 16:9 +// dotnet run -- image-to-video --provider openai "The scene comes alive" --image photo.jpg +// dotnet run -- edit --provider openai "Change sky to sunset" --video +// dotnet run -- extend --provider openai "Continue the scene" --video +// +// Environment variables (set the ones for the providers you plan to use): +// OPENAI_API_KEY — OpenAI API key +// GOOGLE_API_KEY — Google Gemini API key +// RUNWAY_API_KEY — Runway API key +// LUMA_API_KEY — Luma AI API key + +using System.CommandLine; +using System.Drawing; +using System.Net.Http.Headers; +using System.Text.Json; +using System.Text.Json.Nodes; +using Microsoft.Extensions.AI; +using Microsoft.Extensions.Logging; +using OpenAI; + +// ── Shared options ───────────────────────────────────────────────────────── +var providerOption = new Option("--provider", () => VideoProvider.OpenAI, "Video generation provider."); +var modelOption = new Option("--model", "Model ID (provider-specific). Uses provider default if omitted."); +var outputOption = new Option("--output", "Output file path (.mp4)."); +var durationOption = new Option("--duration", "Duration in seconds."); +var aspectRatioOption = new Option("--aspect-ratio", "Aspect ratio (e.g. 16:9, 9:16, 1:1)."); +var seedOption = new Option("--seed", "Seed for reproducible generation."); +var audioOption = new Option("--audio", () => false, "Generate audio (Veo 3+ only)."); +var negativePromptOption = new Option("--negative-prompt", "What to avoid in the generated video."); +var widthOption = new Option("--width", "Video width in pixels."); +var heightOption = new Option("--height", "Video height in pixels."); +var formatOption = new Option("--format", () => "data", "Response format: data or uri."); + +// ── generate (text-to-video) ─────────────────────────────────────────────── +var generatePromptArg = new Argument("prompt", "Text prompt describing the video."); +var generateCommand = new Command("generate", "Generate a video from a text prompt.") +{ + generatePromptArg, providerOption, modelOption, outputOption, + durationOption, aspectRatioOption, seedOption, audioOption, + negativePromptOption, widthOption, heightOption, formatOption, +}; + +generateCommand.SetHandler(async (context) => +{ + string prompt = context.ParseResult.GetValueForArgument(generatePromptArg); + var provider = context.ParseResult.GetValueForOption(providerOption); + string? model = context.ParseResult.GetValueForOption(modelOption); + string? outputPath = context.ParseResult.GetValueForOption(outputOption); + var opts = ParseSharedOptions(context, model); + + using var generator = CreateGenerator(provider, model); + var request = new VideoGenerationRequest(prompt); + ApplyNegativePrompt(request, context.ParseResult.GetValueForOption(negativePromptOption)); + + var operation = await generator.GenerateAsync(request, opts); + await CompleteAndSaveAsync(operation, opts, outputPath); +}); + +// ── image-to-video ───────────────────────────────────────────────────────── +var i2vPromptArg = new Argument("prompt", "Text prompt."); +var imageOption = new Option("--image", "Input image file (path or URL).") { IsRequired = true }; + +var i2vCommand = new Command("image-to-video", "Generate video from an image + text prompt.") +{ + i2vPromptArg, imageOption, providerOption, modelOption, outputOption, + durationOption, aspectRatioOption, seedOption, audioOption, + negativePromptOption, widthOption, heightOption, formatOption, +}; + +i2vCommand.SetHandler(async (context) => +{ + string prompt = context.ParseResult.GetValueForArgument(i2vPromptArg); + string imagePath = context.ParseResult.GetValueForOption(imageOption)!; + var provider = context.ParseResult.GetValueForOption(providerOption); + string? model = context.ParseResult.GetValueForOption(modelOption); + string? outputPath = context.ParseResult.GetValueForOption(outputOption); + var opts = ParseSharedOptions(context, model); + + List media; + if (imagePath.StartsWith("http", StringComparison.OrdinalIgnoreCase)) + { + media = [new UriContent(new Uri(imagePath), "image/jpeg")]; + } + else + { + media = [await DataContent.LoadFromAsync(imagePath)]; + } + + using var generator = CreateGenerator(provider, model); + var request = new VideoGenerationRequest(prompt, media); + ApplyNegativePrompt(request, context.ParseResult.GetValueForOption(negativePromptOption)); + + var operation = await generator.GenerateAsync(request, opts); + await CompleteAndSaveAsync(operation, opts, outputPath); +}); + +// ── edit ─────────────────────────────────────────────────────────────────── +var editPromptArg = new Argument("prompt", "Prompt describing the edit."); +var editVideoOption = new Option("--video", "Video ID to edit.") { IsRequired = true }; +var editInputOption = new Option("--input", "Video file to upload for editing (if no --video ID)."); + +var editCommand = new Command("edit", "Edit an existing video.") +{ + editPromptArg, editVideoOption, editInputOption, providerOption, + modelOption, outputOption, formatOption, +}; + +editCommand.SetHandler(async (context) => +{ + string prompt = context.ParseResult.GetValueForArgument(editPromptArg); + string videoId = context.ParseResult.GetValueForOption(editVideoOption)!; + string? inputPath = context.ParseResult.GetValueForOption(editInputOption); + var provider = context.ParseResult.GetValueForOption(providerOption); + string? model = context.ParseResult.GetValueForOption(modelOption); + string? outputPath = context.ParseResult.GetValueForOption(outputOption); + string format = context.ParseResult.GetValueForOption(formatOption)!; + + using var generator = CreateGenerator(provider, model); + var opts = new VideoGenerationOptions + { + ModelId = model, + ResponseFormat = ParseFormat(format), + }; + + List? media = null; + if (inputPath is not null) + { + media = [await DataContent.LoadFromAsync(inputPath)]; + } + + var request = new VideoGenerationRequest(prompt, media) + { + OperationKind = VideoOperationKind.Edit, + SourceVideoId = videoId, + }; + + var operation = await generator.GenerateAsync(request, opts); + await CompleteAndSaveAsync(operation, opts, outputPath); +}); + +// ── extend ───────────────────────────────────────────────────────────────── +var extendPromptArg = new Argument("prompt", "Prompt for extending the video."); +var extendVideoOption = new Option("--video", "Video ID to extend.") { IsRequired = true }; + +var extendCommand = new Command("extend", "Extend a completed video.") +{ + extendPromptArg, extendVideoOption, providerOption, + modelOption, outputOption, durationOption, formatOption, +}; + +extendCommand.SetHandler(async (context) => +{ + string prompt = context.ParseResult.GetValueForArgument(extendPromptArg); + string videoId = context.ParseResult.GetValueForOption(extendVideoOption)!; + var provider = context.ParseResult.GetValueForOption(providerOption); + string? model = context.ParseResult.GetValueForOption(modelOption); + string? outputPath = context.ParseResult.GetValueForOption(outputOption); + int? duration = context.ParseResult.GetValueForOption(durationOption); + string format = context.ParseResult.GetValueForOption(formatOption)!; + + using var generator = CreateGenerator(provider, model); + var opts = new VideoGenerationOptions + { + ModelId = model, + ResponseFormat = ParseFormat(format), + }; + + if (duration.HasValue) + { + opts.Duration = TimeSpan.FromSeconds(duration.Value); + } + + var request = new VideoGenerationRequest(prompt) + { + OperationKind = VideoOperationKind.Extend, + SourceVideoId = videoId, + }; + + var operation = await generator.GenerateAsync(request, opts); + await CompleteAndSaveAsync(operation, opts, outputPath); +}); + +// ── Root ─────────────────────────────────────────────────────────────────── +var rootCommand = new RootCommand("Multi-provider video generation POC using MEAI IVideoGenerator.\nSupports: OpenAI (Sora), Google Veo, Runway, Luma AI.") +{ + generateCommand, + i2vCommand, + editCommand, + extendCommand, +}; + +return await rootCommand.InvokeAsync(args); + +// ═══════════════════════════════════════════════════════════════════════════ +// Provider factory +// ═══════════════════════════════════════════════════════════════════════════ +static IVideoGenerator CreateGenerator(VideoProvider provider, string? model) => provider switch +{ + VideoProvider.OpenAI => CreateOpenAI(model), + VideoProvider.Veo => CreateGoogleVeo(model), + VideoProvider.Runway => CreateRunway(model), + VideoProvider.Luma => CreateLuma(model), + _ => throw new ArgumentException($"Unknown provider: {provider}"), +}; + +static IVideoGenerator CreateOpenAI(string? model) +{ + string apiKey = RequireEnvVar("OPENAI_API_KEY", "https://platform.openai.com/api-keys"); + var loggerFactory = LoggerFactory.Create(b => b.AddConsole().SetMinimumLevel(LogLevel.Debug)); + return new OpenAIClient(apiKey) + .GetVideoClient() + .AsIVideoGenerator(model ?? "sora-2") + .AsBuilder() + .UseLogging(loggerFactory) + .UseOpenTelemetry(loggerFactory) + .Build(); +} + +static IVideoGenerator CreateGoogleVeo(string? model) +{ + string apiKey = RequireEnvVar("GOOGLE_API_KEY", "https://aistudio.google.com/apikey"); + return new GoogleVeoVideoGenerator(apiKey, model ?? "veo-3.1-generate-preview"); +} + +static IVideoGenerator CreateRunway(string? model) +{ + string apiKey = RequireEnvVar("RUNWAY_API_KEY", "https://dev.runwayml.com/"); + return new RunwayVideoGenerator(apiKey, model ?? "gen4_turbo"); +} + +static IVideoGenerator CreateLuma(string? model) +{ + string apiKey = RequireEnvVar("LUMA_API_KEY", "https://lumalabs.ai/dream-machine/api/keys"); + return new LumaVideoGenerator(apiKey, model ?? "ray-2"); +} + +static string RequireEnvVar(string name, string url) +{ + string? value = Environment.GetEnvironmentVariable(name); + if (string.IsNullOrEmpty(value)) + { + Console.Error.WriteLine($"Error: Set the {name} environment variable."); + Console.Error.WriteLine($"Get a key at {url}"); + Environment.Exit(1); + } + + return value!; +} + +// ═══════════════════════════════════════════════════════════════════════════ +// Helpers +// ═══════════════════════════════════════════════════════════════════════════ +VideoGenerationOptions ParseSharedOptions(System.CommandLine.Invocation.InvocationContext context, string? model) +{ + int? duration = context.ParseResult.GetValueForOption(durationOption); + string? aspectRatio = context.ParseResult.GetValueForOption(aspectRatioOption); + int? seed = context.ParseResult.GetValueForOption(seedOption); + bool audio = context.ParseResult.GetValueForOption(audioOption); + int? width = context.ParseResult.GetValueForOption(widthOption); + int? height = context.ParseResult.GetValueForOption(heightOption); + string format = context.ParseResult.GetValueForOption(formatOption)!; + + var opts = new VideoGenerationOptions + { + ModelId = model, + ResponseFormat = ParseFormat(format), + }; + + if (duration.HasValue) + { + opts.Duration = TimeSpan.FromSeconds(duration.Value); + } + + if (aspectRatio is not null) + { + opts.AspectRatio = aspectRatio; + } + + if (seed.HasValue) + { + opts.Seed = seed.Value; + } + + if (audio) + { + opts.GenerateAudio = true; + } + + if (width.HasValue && height.HasValue) + { + opts.VideoSize = new Size(width.Value, height.Value); + } + + return opts; +} + +static void ApplyNegativePrompt(VideoGenerationRequest request, string? negativePrompt) +{ + if (negativePrompt is not null) + { + request.NegativePrompt = negativePrompt; + } +} + +static VideoGenerationResponseFormat ParseFormat(string format) + => string.Equals(format, "uri", StringComparison.OrdinalIgnoreCase) + ? VideoGenerationResponseFormat.Uri + : VideoGenerationResponseFormat.Data; + +static async Task CompleteAndSaveAsync(VideoGenerationOperation operation, VideoGenerationOptions options, string? outputPath) +{ + Console.WriteLine($"OPERATION_ID: {operation.OperationId}"); + Console.WriteLine($" Provider: {operation.GetType().Name}"); + Console.WriteLine($" Status: {operation.Status}"); + + var sw = System.Diagnostics.Stopwatch.StartNew(); + await operation.WaitForCompletionAsync( + new Progress(p => + Console.WriteLine($" Progress: {p.Status}{(p.PercentComplete.HasValue ? $" ({p.PercentComplete}%)" : string.Empty)}"))); + + sw.Stop(); + Console.WriteLine($" Completed in {sw.Elapsed.TotalSeconds:F1}s"); + + var contents = await operation.GetContentsAsync(options); + Console.WriteLine($" {contents.Count} content item(s)"); + + for (int i = 0; i < contents.Count; i++) + { + string savePath = outputPath is not null && contents.Count > 1 + ? Path.Combine( + Path.GetDirectoryName(outputPath) ?? ".", + $"{Path.GetFileNameWithoutExtension(outputPath)}_{i}{Path.GetExtension(outputPath)}") + : outputPath!; + + switch (contents[i]) + { + case DataContent dc when outputPath is not null: + Directory.CreateDirectory(Path.GetDirectoryName(savePath) ?? "."); + await dc.SaveToAsync(savePath); + Console.WriteLine($" [{i}] Saved: {savePath} ({dc.Data.Length} bytes, {dc.MediaType})"); + break; + case DataContent dc: + Console.WriteLine($" [{i}] DataContent: {dc.Data.Length} bytes ({dc.MediaType})"); + break; + case UriContent uc: + Console.WriteLine($" [{i}] URI: {uc.Uri} ({uc.MediaType})"); + break; + default: + Console.WriteLine($" [{i}] {contents[i].GetType().Name}"); + break; + } + } +} + +// ═══════════════════════════════════════════════════════════════════════════ +// Types +// ═══════════════════════════════════════════════════════════════════════════ +enum VideoProvider { OpenAI, Veo, Runway, Luma } + +// ═══════════════════════════════════════════════════════════════════════════ +// Inline provider implementations +// (In production, these would be separate NuGet packages or project references) +// ═══════════════════════════════════════════════════════════════════════════ + +// ─── Google Veo ──────────────────────────────────────────────────────────── +internal sealed class GoogleVeoVideoGenerator : IVideoGenerator +{ + private const string BaseUrl = "https://generativelanguage.googleapis.com/v1beta"; + private readonly HttpClient _httpClient; + private readonly string _apiKey; + private readonly string _modelId; + + public GoogleVeoVideoGenerator(string apiKey, string modelId, HttpClient? httpClient = null) + { + _apiKey = apiKey; + _modelId = modelId; + _httpClient = httpClient ?? new HttpClient(); + _httpClient.DefaultRequestHeaders.Accept.Add(new MediaTypeWithQualityHeaderValue("application/json")); + } + + public async Task GenerateAsync( + VideoGenerationRequest request, VideoGenerationOptions? options = null, CancellationToken cancellationToken = default) + { + string model = options?.ModelId ?? _modelId; + var body = new JsonObject(); + if (request.Prompt is not null) body["prompt"] = request.Prompt; + + if (request.OperationKind == VideoOperationKind.Create && request.OriginalMedia is not null) + { + foreach (var item in request.OriginalMedia) + { + if (item is DataContent dc && (dc.MediaType?.StartsWith("image/", StringComparison.OrdinalIgnoreCase) ?? false) && dc.Data.Length > 0) + { + body["image"] = new JsonObject { ["imageBytes"] = Convert.ToBase64String(dc.Data.ToArray()), ["mimeType"] = dc.MediaType }; + break; + } + + if (item is UriContent uc) + { + body["image"] = new JsonObject { ["imageUri"] = uc.Uri.ToString() }; + break; + } + } + } + + var config = new JsonObject(); + if (options?.Duration is { } dur) config["durationSeconds"] = ((int)dur.TotalSeconds).ToString(); + if (options?.AspectRatio is { } ar) config["aspectRatio"] = ar; + if (options?.Count is { } cnt) config["numberOfVideos"] = cnt; + if (options?.Seed is int seed) config["seed"] = seed; + if (options?.GenerateAudio == true) config["generateAudio"] = true; + if (request.NegativePrompt is { } neg) config["negativePrompt"] = neg; + if (options?.AdditionalProperties?.TryGetValue("personGeneration", out object? pg) == true && pg is string pgs) config["personGeneration"] = pgs; + if (config.Count > 0) body["generationConfig"] = config; + + string url = $"{BaseUrl}/models/{model}:generateVideos?key={_apiKey}"; + using var content = new StringContent(body.ToJsonString(), System.Text.Encoding.UTF8, "application/json"); + using var response = await _httpClient.PostAsync(url, content, cancellationToken); + string responseBody = await response.Content.ReadAsStringAsync(cancellationToken); + response.EnsureSuccessStatusCode(); + var result = JsonDocument.Parse(responseBody); + string opName = result.RootElement.GetProperty("name").GetString()!; + return new GoogleVeoVideoGenerationOperation(opName, _apiKey, _httpClient, model); + } + + public object? GetService(Type serviceType, object? serviceKey = null) + => serviceKey is null && serviceType.IsInstanceOfType(this) ? this : null; + + public void Dispose() => _httpClient.Dispose(); +} + +internal sealed class GoogleVeoVideoGenerationOperation : VideoGenerationOperation +{ + private const string BaseUrl = "https://generativelanguage.googleapis.com/v1beta"; + private readonly HttpClient _httpClient; + private readonly string _apiKey; + private bool _done; + private string? _status; + private string? _failureReason; + private readonly List _videoUris = []; + + public GoogleVeoVideoGenerationOperation(string opName, string apiKey, HttpClient httpClient, string modelId) + { + OperationId = opName; ModelId = modelId; _apiKey = apiKey; _httpClient = httpClient; _status = "PROCESSING"; + } + + public override string? OperationId { get; } + public override string? Status => _status; + public override int? PercentComplete => _done ? 100 : null; + public override bool IsCompleted => _done; + public override string? FailureReason => _failureReason; + + public override async Task UpdateAsync(CancellationToken cancellationToken = default) + { + using var resp = await _httpClient.GetAsync($"{BaseUrl}/{OperationId}?key={_apiKey}", cancellationToken); + string body = await resp.Content.ReadAsStringAsync(cancellationToken); + resp.EnsureSuccessStatusCode(); + using var doc = JsonDocument.Parse(body); + var root = doc.RootElement; + _done = root.TryGetProperty("done", out var d) && d.GetBoolean(); + if (_done) _status = "COMPLETED"; + if (root.TryGetProperty("error", out var err)) { _failureReason = err.ToString(); _status = "FAILED"; _done = true; } + _videoUris.Clear(); + if (root.TryGetProperty("response", out var response) && response.TryGetProperty("generatedVideos", out var vids)) + foreach (var v in vids.EnumerateArray()) + if (v.TryGetProperty("video", out var video) && video.TryGetProperty("uri", out var uri)) + _videoUris.Add(uri.GetString()!); + } + + public override async Task WaitForCompletionAsync(IProgress? progress = null, CancellationToken cancellationToken = default) + { + while (!IsCompleted) + { + await Task.Delay(TimeSpan.FromSeconds(10), cancellationToken); + await UpdateAsync(cancellationToken); + progress?.Report(new VideoGenerationProgress(_status, PercentComplete)); + } + + if (_status == "FAILED") throw new InvalidOperationException($"Video generation failed: {_failureReason}"); + } + + public override async Task> GetContentsAsync(VideoGenerationOptions? options = null, CancellationToken cancellationToken = default) + { + if (!IsCompleted) throw new InvalidOperationException("Not completed."); + if (_videoUris.Count == 0) await UpdateAsync(cancellationToken); + var results = new List(); + foreach (var uri in _videoUris) + { + if (options?.ResponseFormat == VideoGenerationResponseFormat.Uri) { results.Add(new UriContent(new Uri(uri), "video/mp4")); continue; } + using var r = await _httpClient.GetAsync(uri, cancellationToken); r.EnsureSuccessStatusCode(); + results.Add(new DataContent(await r.Content.ReadAsByteArrayAsync(cancellationToken), "video/mp4")); + } + + return results; + } +} + +// ─── Runway ──────────────────────────────────────────────────────────────── +internal sealed class RunwayVideoGenerator : IVideoGenerator +{ + private const string BaseUrl = "https://api.dev.runwayml.com"; + private readonly HttpClient _httpClient; + private readonly string _modelId; + + public RunwayVideoGenerator(string apiKey, string modelId, HttpClient? httpClient = null) + { + _modelId = modelId; + _httpClient = httpClient ?? new HttpClient(); + _httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Bearer", apiKey); + _httpClient.DefaultRequestHeaders.Accept.Add(new MediaTypeWithQualityHeaderValue("application/json")); + _httpClient.DefaultRequestHeaders.Add("X-Runway-Version", "2024-11-06"); + } + + public async Task GenerateAsync( + VideoGenerationRequest request, VideoGenerationOptions? options = null, CancellationToken cancellationToken = default) + { + string model = options?.ModelId ?? _modelId; + string endpoint; + JsonObject body; + + bool hasVideo = request.OriginalMedia?.Any(m => m is DataContent dc && dc.MediaType?.StartsWith("video/", StringComparison.OrdinalIgnoreCase) == true) == true; + bool hasImage = request.OriginalMedia?.Any(m => m is DataContent dc && dc.MediaType?.StartsWith("image/", StringComparison.OrdinalIgnoreCase) == true) == true; + + if (request.OperationKind == VideoOperationKind.Edit && hasVideo) + { + endpoint = "/v1/video_to_video"; + body = new JsonObject { ["model"] = "gen4_aleph", ["promptText"] = request.Prompt ?? "" }; + string? vidUri = GetMediaUri(request.OriginalMedia, "video/"); + if (vidUri is not null) body["videoUri"] = vidUri; + } + else if (hasImage) + { + endpoint = "/v1/image_to_video"; + string? imgUri = GetMediaUri(request.OriginalMedia, "image/"); + string ratio = options?.AspectRatio is { } ar ? MapAspectRatio(ar) : "1280:720"; + body = new JsonObject { ["model"] = model, ["promptText"] = request.Prompt ?? "", ["promptImage"] = imgUri ?? "", ["ratio"] = ratio }; + } + else + { + endpoint = "/v1/text_to_video"; + string ratio = options?.AspectRatio is { } ar ? MapAspectRatio(ar) : "1280:720"; + body = new JsonObject { ["model"] = model, ["promptText"] = request.Prompt ?? "", ["ratio"] = ratio }; + } + + if (options?.Duration is { } dur) body["duration"] = (int)dur.TotalSeconds; + if (options?.Seed is int seed) body["seed"] = seed; + + using var content = new StringContent(body.ToJsonString(), System.Text.Encoding.UTF8, "application/json"); + using var resp = await _httpClient.PostAsync($"{BaseUrl}{endpoint}", content, cancellationToken); + string responseBody = await resp.Content.ReadAsStringAsync(cancellationToken); + resp.EnsureSuccessStatusCode(); + string taskId = JsonDocument.Parse(responseBody).RootElement.GetProperty("id").GetString()!; + return new RunwayVideoGenerationOperation(taskId, _httpClient, model); + } + + public object? GetService(Type serviceType, object? serviceKey = null) + => serviceKey is null && serviceType.IsInstanceOfType(this) ? this : null; + public void Dispose() => _httpClient.Dispose(); + + private static string? GetMediaUri(IEnumerable? media, string prefix) + { + if (media is null) return null; + foreach (var item in media) + { + if (item is UriContent uc) return uc.Uri.ToString(); + if (item is DataContent dc && dc.Data.Length > 0) + return dc.Uri ?? $"data:{dc.MediaType ?? "application/octet-stream"};base64,{Convert.ToBase64String(dc.Data.ToArray())}"; + } + + return null; + } + + private static string MapAspectRatio(string ar) => ar switch + { + "16:9" => "1280:720", "9:16" => "720:1280", "1:1" => "960:960", + "4:3" => "1104:832", "3:4" => "832:1104", _ => ar, + }; +} + +internal sealed class RunwayVideoGenerationOperation : VideoGenerationOperation +{ + private const string BaseUrl = "https://api.dev.runwayml.com"; + private readonly HttpClient _httpClient; + private string? _status; + private string? _failureReason; + private string? _outputUrl; + + public RunwayVideoGenerationOperation(string taskId, HttpClient httpClient, string modelId) + { + OperationId = taskId; ModelId = modelId; _httpClient = httpClient; _status = "PENDING"; + } + + public override string? OperationId { get; } + public override string? Status => _status; + public override int? PercentComplete => _status switch { "SUCCEEDED" => 100, "RUNNING" => 50, "THROTTLED" => 10, _ => 0 }; + public override bool IsCompleted => _status is "SUCCEEDED" or "FAILED"; + public override string? FailureReason => _failureReason; + + public override async Task UpdateAsync(CancellationToken cancellationToken = default) + { + using var resp = await _httpClient.GetAsync($"{BaseUrl}/v1/tasks/{OperationId}", cancellationToken); + string body = await resp.Content.ReadAsStringAsync(cancellationToken); + resp.EnsureSuccessStatusCode(); + using var doc = JsonDocument.Parse(body); + var root = doc.RootElement; + _status = root.GetProperty("status").GetString(); + if (root.TryGetProperty("failure", out var f) && f.ValueKind == JsonValueKind.String) _failureReason = f.GetString(); + if (root.TryGetProperty("output", out var o) && o.ValueKind == JsonValueKind.Array && o.GetArrayLength() > 0) _outputUrl = o[0].GetString(); + else if (root.TryGetProperty("output", out var o2) && o2.ValueKind == JsonValueKind.String) _outputUrl = o2.GetString(); + } + + public override async Task WaitForCompletionAsync(IProgress? progress = null, CancellationToken cancellationToken = default) + { + while (!IsCompleted) + { + await Task.Delay(TimeSpan.FromSeconds(5), cancellationToken); + await UpdateAsync(cancellationToken); + progress?.Report(new VideoGenerationProgress(_status, PercentComplete)); + } + + if (_status == "FAILED") throw new InvalidOperationException($"Task failed: {_failureReason}"); + } + + public override async Task> GetContentsAsync(VideoGenerationOptions? options = null, CancellationToken cancellationToken = default) + { + if (!IsCompleted) throw new InvalidOperationException("Not completed."); + if (_outputUrl is null) await UpdateAsync(cancellationToken); + if (_outputUrl is null) throw new InvalidOperationException("No output URL."); + if (options?.ResponseFormat == VideoGenerationResponseFormat.Uri) return [new UriContent(new Uri(_outputUrl), "video/mp4")]; + using var r = await _httpClient.GetAsync(_outputUrl, cancellationToken); r.EnsureSuccessStatusCode(); + return [new DataContent(await r.Content.ReadAsByteArrayAsync(cancellationToken), "video/mp4")]; + } +} + +// ─── Luma AI ─────────────────────────────────────────────────────────────── +internal sealed class LumaVideoGenerator : IVideoGenerator +{ + private const string BaseUrl = "https://api.lumalabs.ai/dream-machine/v1"; + private readonly HttpClient _httpClient; + private readonly string _modelId; + + public LumaVideoGenerator(string apiKey, string modelId, HttpClient? httpClient = null) + { + _modelId = modelId; + _httpClient = httpClient ?? new HttpClient(); + _httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Bearer", apiKey); + _httpClient.DefaultRequestHeaders.Accept.Add(new MediaTypeWithQualityHeaderValue("application/json")); + } + + public async Task GenerateAsync( + VideoGenerationRequest request, VideoGenerationOptions? options = null, CancellationToken cancellationToken = default) + { + string model = options?.ModelId ?? _modelId; + var body = new JsonObject { ["prompt"] = request.Prompt, ["model"] = model }; + if (options?.Duration is { } dur) body["duration"] = $"{(int)dur.TotalSeconds}s"; + if (options?.AspectRatio is { } ar) body["aspect_ratio"] = ar; + + var keyframes = new JsonObject(); + if (request.OperationKind == VideoOperationKind.Create && request.OriginalMedia is not null) + { + int idx = 0; + foreach (var item in request.OriginalMedia) + { + string key = idx == 0 ? "frame0" : "frame1"; + if (item is UriContent uc) keyframes[key] = new JsonObject { ["type"] = "image", ["url"] = uc.Uri.ToString() }; + else if (item is DataContent dc && dc.Data.Length > 0) + keyframes[key] = new JsonObject { ["type"] = "image", ["url"] = dc.Uri ?? $"data:{dc.MediaType ?? "image/png"};base64,{Convert.ToBase64String(dc.Data.ToArray())}" }; + if (++idx >= 2) break; + } + } + else if (request.OperationKind == VideoOperationKind.Extend && request.SourceVideoId is not null) + { + keyframes["frame0"] = new JsonObject { ["type"] = "generation", ["id"] = request.SourceVideoId }; + } + + if (keyframes.Count > 0) body["keyframes"] = keyframes; + + using var content = new StringContent(body.ToJsonString(), System.Text.Encoding.UTF8, "application/json"); + using var resp = await _httpClient.PostAsync($"{BaseUrl}/generations", content, cancellationToken); + string responseBody = await resp.Content.ReadAsStringAsync(cancellationToken); + resp.EnsureSuccessStatusCode(); + string opId = JsonDocument.Parse(responseBody).RootElement.GetProperty("id").GetString()!; + return new LumaVideoGenerationOperation(opId, _httpClient, model); + } + + public object? GetService(Type serviceType, object? serviceKey = null) + => serviceKey is null && serviceType.IsInstanceOfType(this) ? this : null; + public void Dispose() => _httpClient.Dispose(); +} + +internal sealed class LumaVideoGenerationOperation : VideoGenerationOperation +{ + private const string BaseUrl = "https://api.lumalabs.ai/dream-machine/v1"; + private readonly HttpClient _httpClient; + private string? _status; + private string? _failureReason; + private string? _videoUrl; + + public LumaVideoGenerationOperation(string opId, HttpClient httpClient, string modelId) + { + OperationId = opId; ModelId = modelId; _httpClient = httpClient; _status = "queued"; + } + + public override string? OperationId { get; } + public override string? Status => _status; + public override int? PercentComplete => _status switch { "completed" => 100, "dreaming" => 50, _ => 0 }; + public override bool IsCompleted => _status is "completed" or "failed"; + public override string? FailureReason => _failureReason; + + public override async Task UpdateAsync(CancellationToken cancellationToken = default) + { + using var resp = await _httpClient.GetAsync($"{BaseUrl}/generations/{OperationId}", cancellationToken); + string body = await resp.Content.ReadAsStringAsync(cancellationToken); + resp.EnsureSuccessStatusCode(); + using var doc = JsonDocument.Parse(body); + var root = doc.RootElement; + _status = root.GetProperty("state").GetString(); + if (root.TryGetProperty("failure_reason", out var fr) && fr.ValueKind == JsonValueKind.String) _failureReason = fr.GetString(); + if (root.TryGetProperty("assets", out var assets) && assets.TryGetProperty("video", out var v) && v.ValueKind == JsonValueKind.String) _videoUrl = v.GetString(); + } + + public override async Task WaitForCompletionAsync(IProgress? progress = null, CancellationToken cancellationToken = default) + { + while (!IsCompleted) + { + await Task.Delay(TimeSpan.FromSeconds(5), cancellationToken); + await UpdateAsync(cancellationToken); + progress?.Report(new VideoGenerationProgress(_status, PercentComplete)); + } + + if (_status == "failed") throw new InvalidOperationException($"Generation failed: {_failureReason}"); + } + + public override async Task> GetContentsAsync(VideoGenerationOptions? options = null, CancellationToken cancellationToken = default) + { + if (!IsCompleted) throw new InvalidOperationException("Not completed."); + if (_videoUrl is null) await UpdateAsync(cancellationToken); + if (_videoUrl is null) throw new InvalidOperationException("No video URL."); + if (options?.ResponseFormat == VideoGenerationResponseFormat.Uri) return [new UriContent(new Uri(_videoUrl), "video/mp4")]; + using var r = await _httpClient.GetAsync(_videoUrl, cancellationToken); r.EnsureSuccessStatusCode(); + return [new DataContent(await r.Content.ReadAsByteArrayAsync(cancellationToken), "video/mp4")]; + } + + public override VideoGenerationRequest CreateExtensionRequest(string? prompt = null) + => new() { Prompt = prompt, SourceVideoId = OperationId, OperationKind = VideoOperationKind.Extend }; +} diff --git a/samples/VideoProviders/MultiProviderPOC/README.md b/samples/VideoProviders/MultiProviderPOC/README.md new file mode 100644 index 00000000000..3109f34c371 --- /dev/null +++ b/samples/VideoProviders/MultiProviderPOC/README.md @@ -0,0 +1,70 @@ +# Multi-Provider Video Generation POC + +Unified CLI that demonstrates `IVideoGenerator` across four providers — OpenAI (Sora), Google Veo, Runway, and Luma AI — using the same MEAI abstractions with first-class properties. + +## Prerequisites + +Set API keys for the providers you want to test: + +| Provider | Environment Variable | Get a Key | +|----------|---------------------|-----------| +| OpenAI | `OPENAI_API_KEY` | [platform.openai.com/api-keys](https://platform.openai.com/api-keys) | +| Google Veo | `GOOGLE_API_KEY` | [aistudio.google.com/apikey](https://aistudio.google.com/apikey) | +| Runway | `RUNWAY_API_KEY` | [dev.runwayml.com](https://dev.runwayml.com/) | +| Luma AI | `LUMA_API_KEY` | [lumalabs.ai/dream-machine/api/keys](https://lumalabs.ai/dream-machine/api/keys) | + +## Quick Start + +```bash +# Text-to-video with OpenAI +dotnet run -- generate --provider openai "A cat playing piano" --output cat.mp4 + +# Text-to-video with Google Veo + audio + negative prompt +dotnet run -- generate --provider veo "Birds on a lake" --audio --negative-prompt "people, cars" --output birds.mp4 + +# Text-to-video with Runway + seed for reproducibility +dotnet run -- generate --provider runway "A dancer spinning" --seed 42 --output dancer.mp4 + +# Text-to-video with Luma + aspect ratio +dotnet run -- generate --provider luma "Flowers blooming" --aspect-ratio 9:16 --output flowers.mp4 + +# Image-to-video (any provider) +dotnet run -- image-to-video --provider openai "The scene comes alive" --image photo.jpg --output scene.mp4 + +# Edit a video (OpenAI, Runway) +dotnet run -- edit --provider openai "Change to sunset colors" --video --output edited.mp4 + +# Extend a video (OpenAI, Luma) +dotnet run -- extend --provider openai "The scene continues" --video --output extended.mp4 +``` + +## Demo Script + +Run the automated demo that exercises each provider's supported features: + +```powershell +# Auto-detect providers from environment variables +./demo-multi-provider.ps1 + +# Run specific providers +./demo-multi-provider.ps1 -Providers "openai,veo" + +# With a reference image for image-to-video tests +./demo-multi-provider.ps1 -ReferenceImage myimage.png + +# Reset state and start fresh +./demo-multi-provider.ps1 -Reset +``` + +## Feature Matrix + +| Feature | OpenAI | Google Veo | Runway | Luma AI | +|---------|:------:|:----------:|:------:|:-------:| +| Text-to-video | ✅ | ✅ | ✅ | ✅ | +| Image-to-video | ✅ | ✅ | ✅ | ✅ | +| Video edit | ✅ | ❌ | ✅ | ❌ | +| Video extend | ✅ | ✅ | ❌ | ✅ | +| `AspectRatio` | via Size | ✅ | ✅ | ✅ | +| `Seed` | ❌ | ✅ | ✅ | ❌ | +| `GenerateAudio` | ❌ | ✅ | ❌ | ❌ | +| `NegativePrompt` | ❌ | ✅ | ❌ | ❌ | diff --git a/samples/VideoProviders/MultiProviderPOC/demo-multi-provider.ps1 b/samples/VideoProviders/MultiProviderPOC/demo-multi-provider.ps1 new file mode 100644 index 00000000000..aa033ffa867 --- /dev/null +++ b/samples/VideoProviders/MultiProviderPOC/demo-multi-provider.ps1 @@ -0,0 +1,350 @@ +#!/usr/bin/env pwsh +# Multi-Provider Video Generation Demo +# +# Runs through relevant scenarios for each provider based on feature support. +# Set the environment variables for the providers you want to test. +# +# Environment variables: +# OPENAI_API_KEY — OpenAI Sora +# GOOGLE_API_KEY — Google Veo +# RUNWAY_API_KEY — Runway +# LUMA_API_KEY — Luma AI +# +# Usage: +# ./demo-multi-provider.ps1 # Run all configured providers +# ./demo-multi-provider.ps1 -Providers openai,veo # Run specific providers +# ./demo-multi-provider.ps1 -Reset # Clear state and start fresh + + + +param( + [string]$Providers = "", # Comma-separated: openai,veo,runway,luma (empty = auto-detect) + [string]$OutputDir = "..\..\artifacts\multi-provider-output", + [string]$ReferenceImage = "", # Optional image for image-to-video tests + [switch]$Reset +) + +Set-StrictMode -Version Latest +$ErrorActionPreference = "Stop" + +$ProjectDir = $PSScriptRoot + +# ── State management ──────────────────────────────────────────────────── +New-Item -ItemType Directory -Path $OutputDir -Force | Out-Null +$stateFile = Join-Path $OutputDir "demo-state.json" + +if ($Reset -and (Test-Path $stateFile)) { + Remove-Item $stateFile -Force + Write-Host "State cleared." -ForegroundColor Yellow +} + +function Get-State { + if (Test-Path $stateFile) { return Get-Content $stateFile -Raw | ConvertFrom-Json -AsHashtable } + return @{} +} + +function Set-State([string]$Key, [string]$Value) { + $s = Get-State; $s[$Key] = $Value + $s | ConvertTo-Json | Set-Content $stateFile +} + +# ── Detect available providers ────────────────────────────────────────── +$providerMap = @{ + openai = "OPENAI_API_KEY" + veo = "GOOGLE_API_KEY" + runway = "RUNWAY_API_KEY" + luma = "LUMA_API_KEY" +} + +if ($Providers -ne "") { + $activeProviders = $Providers -split "," | ForEach-Object { $_.Trim().ToLower() } +} else { + $activeProviders = @() + foreach ($p in $providerMap.Keys) { + $envVar = $providerMap[$p] + if ([Environment]::GetEnvironmentVariable($envVar)) { + $activeProviders += $p + } + } +} + +if ($activeProviders.Count -eq 0) { + Write-Error "No providers configured. Set at least one API key environment variable." + exit 1 +} + +Write-Host "" +Write-Host ("=" * 70) -ForegroundColor Cyan +Write-Host " Multi-Provider Video Generation Demo" -ForegroundColor Cyan +Write-Host " Active providers: $($activeProviders -join ', ')" -ForegroundColor Cyan +Write-Host ("=" * 70) -ForegroundColor Cyan + +# ── Helpers ───────────────────────────────────────────────────────────── +function Invoke-Tool([string]$Label, [string[]]$Arguments) { + Write-Host "" + Write-Host ("─" * 70) -ForegroundColor Cyan + Write-Host " $Label" -ForegroundColor Cyan + Write-Host ("─" * 70) -ForegroundColor Cyan + Write-Host "> dotnet run --project $ProjectDir -- $($Arguments -join ' ')" -ForegroundColor DarkGray + + $output = & dotnet run --project $ProjectDir -- @Arguments 2>&1 + $output | ForEach-Object { Write-Host " $_" } + if ($LASTEXITCODE -ne 0) { + Write-Warning "Tool exited with code $LASTEXITCODE" + return "" + } + return ($output | Out-String) +} + +function Extract-Id([string]$Output, [string]$Prefix) { + if ($Output -match "$Prefix\:\s*(\S+)") { return $Matches[1] } + return "" +} + +function Skip-OrRun([string]$StateKey, [string]$Label, [string]$OutputFile, [scriptblock]$Action) { + $state = Get-State + if ($state.ContainsKey($StateKey) -and $state[$StateKey] -ne "" -and + ((-not $OutputFile) -or (Test-Path $OutputFile))) { + Write-Host "" + Write-Host ("─" * 70) -ForegroundColor DarkGray + Write-Host " SKIP: $Label ($StateKey=$($state[$StateKey]))" -ForegroundColor DarkGray + return $state[$StateKey] + } + $id = & $Action + if ($id -ne "") { Set-State $StateKey $id } + return $id +} + +# ═══════════════════════════════════════════════════════════════════════════ +# Provider-specific scenarios +# ═══════════════════════════════════════════════════════════════════════════ + +# ─── OpenAI (Sora) ───────────────────────────────────────────────────── +function Run-OpenAI { + Write-Host "" + Write-Host ("═" * 70) -ForegroundColor Green + Write-Host " OPENAI (Sora)" -ForegroundColor Green + Write-Host " Features: text-to-video, image-to-video, edit, extend, characters" -ForegroundColor Green + Write-Host ("═" * 70) -ForegroundColor Green + + # 1. Text-to-video + $t2vPath = Join-Path $OutputDir "openai_01_text2video.mp4" + $t2vId = Skip-OrRun "openai_t2v" "OpenAI: Text-to-video" $t2vPath { + $out = Invoke-Tool "OpenAI: Text-to-video (8s, 1280x720)" @( + "generate", "--provider", "openai", + "A smooth tracking shot through a neon-lit cyberpunk city at night. Rain reflects colorful lights on the wet streets.", + "--duration", "8", "--width", "1280", "--height", "720", + "--output", $t2vPath + ) + return (Extract-Id $out "OPERATION_ID") + } + + # 2. Image-to-video (if reference image provided) + if ($ReferenceImage -ne "" -and (Test-Path $ReferenceImage)) { + $i2vPath = Join-Path $OutputDir "openai_02_image2video.mp4" + $i2vId = Skip-OrRun "openai_i2v" "OpenAI: Image-to-video" $i2vPath { + $out = Invoke-Tool "OpenAI: Image-to-video from reference" @( + "image-to-video", "--provider", "openai", + "A cinematic slow-motion shot inspired by the image, camera slowly orbiting around the subject.", + "--image", $ReferenceImage, "--duration", "4", + "--output", $i2vPath + ) + return (Extract-Id $out "OPERATION_ID") + } + } + + # 3. Edit (requires previous video) + if ($t2vId -ne "") { + $editPath = Join-Path $OutputDir "openai_03_edit.mp4" + Skip-OrRun "openai_edit" "OpenAI: Edit video" $editPath { + $out = Invoke-Tool "OpenAI: Edit — shift to warm sunset palette" @( + "edit", "--provider", "openai", + "Shift the entire color palette to warm golden sunset tones with soft amber highlights.", + "--video", $t2vId, + "--output", $editPath + ) + return (Extract-Id $out "OPERATION_ID") + } | Out-Null + } + + # 4. Extend (requires previous video) + if ($t2vId -ne "") { + $extPath = Join-Path $OutputDir "openai_04_extend.mp4" + Skip-OrRun "openai_extend" "OpenAI: Extend video" $extPath { + $out = Invoke-Tool "OpenAI: Extend — continue the scene" @( + "extend", "--provider", "openai", + "The camera rises above the buildings to reveal a stunning panoramic view of the cyberpunk skyline.", + "--video", $t2vId, "--duration", "8", + "--output", $extPath + ) + return (Extract-Id $out "OPERATION_ID") + } | Out-Null + } +} + +# ─── Google Veo ──────────────────────────────────────────────────────── +function Run-Veo { + Write-Host "" + Write-Host ("═" * 70) -ForegroundColor Green + Write-Host " GOOGLE VEO" -ForegroundColor Green + Write-Host " Features: text-to-video, image-to-video, audio, negative prompt, seed, aspect ratio" -ForegroundColor Green + Write-Host ("═" * 70) -ForegroundColor Green + + # 1. Text-to-video with audio and negative prompt + $t2vPath = Join-Path $OutputDir "veo_01_text2video_audio.mp4" + Skip-OrRun "veo_t2v" "Veo: Text-to-video with audio" $t2vPath { + $out = Invoke-Tool "Veo: Text-to-video + audio + negative prompt" @( + "generate", "--provider", "veo", + "A serene mountain lake at dawn, birds singing, gentle water ripples.", + "--audio", + "--negative-prompt", "people, buildings, cars, text, watermark", + "--aspect-ratio", "16:9", + "--duration", "8", + "--output", $t2vPath + ) + return (Extract-Id $out "OPERATION_ID") + } | Out-Null + + # 2. Text-to-video with seed for reproducibility + $seedPath = Join-Path $OutputDir "veo_02_seeded.mp4" + Skip-OrRun "veo_seed" "Veo: Seeded generation" $seedPath { + $out = Invoke-Tool "Veo: Text-to-video with seed=42" @( + "generate", "--provider", "veo", + "A colorful hot air balloon festival with dozens of balloons taking off at sunrise.", + "--seed", "42", + "--aspect-ratio", "9:16", + "--duration", "6", + "--output", $seedPath + ) + return (Extract-Id $out "OPERATION_ID") + } | Out-Null + + # 3. Image-to-video (if reference image provided) + if ($ReferenceImage -ne "" -and (Test-Path $ReferenceImage)) { + $i2vPath = Join-Path $OutputDir "veo_03_image2video.mp4" + Skip-OrRun "veo_i2v" "Veo: Image-to-video" $i2vPath { + $out = Invoke-Tool "Veo: Image-to-video with audio" @( + "image-to-video", "--provider", "veo", + "The scene in the image comes to life with natural movement and ambient sounds.", + "--image", $ReferenceImage, + "--audio", "--duration", "4", + "--output", $i2vPath + ) + return (Extract-Id $out "OPERATION_ID") + } | Out-Null + } +} + +# ─── Runway ──────────────────────────────────────────────────────────── +function Run-Runway { + Write-Host "" + Write-Host ("═" * 70) -ForegroundColor Green + Write-Host " RUNWAY" -ForegroundColor Green + Write-Host " Features: text-to-video, image-to-video, video-to-video, seed" -ForegroundColor Green + Write-Host ("═" * 70) -ForegroundColor Green + + # 1. Text-to-video with seed + $t2vPath = Join-Path $OutputDir "runway_01_text2video.mp4" + Skip-OrRun "runway_t2v" "Runway: Text-to-video" $t2vPath { + $out = Invoke-Tool "Runway: Text-to-video with seed" @( + "generate", "--provider", "runway", + "A graceful ballet dancer performing a spin in an empty theater, dramatic lighting.", + "--seed", "12345", + "--duration", "5", + "--aspect-ratio", "16:9", + "--output", $t2vPath + ) + return (Extract-Id $out "OPERATION_ID") + } | Out-Null + + # 2. Image-to-video (if reference image provided) + if ($ReferenceImage -ne "" -and (Test-Path $ReferenceImage)) { + $i2vPath = Join-Path $OutputDir "runway_02_image2video.mp4" + Skip-OrRun "runway_i2v" "Runway: Image-to-video" $i2vPath { + $out = Invoke-Tool "Runway: Image-to-video" @( + "image-to-video", "--provider", "runway", + "The image gradually transforms into a cinematic scene with camera movement.", + "--image", $ReferenceImage, + "--duration", "5", + "--output", $i2vPath + ) + return (Extract-Id $out "OPERATION_ID") + } | Out-Null + } +} + +# ─── Luma AI ─────────────────────────────────────────────────────────── +function Run-Luma { + Write-Host "" + Write-Host ("═" * 70) -ForegroundColor Green + Write-Host " LUMA AI (Dream Machine)" -ForegroundColor Green + Write-Host " Features: text-to-video, image-to-video, extend, aspect ratio, keyframes" -ForegroundColor Green + Write-Host ("═" * 70) -ForegroundColor Green + + # 1. Text-to-video with aspect ratio + $t2vPath = Join-Path $OutputDir "luma_01_text2video.mp4" + $t2vId = Skip-OrRun "luma_t2v" "Luma: Text-to-video" $t2vPath { + $out = Invoke-Tool "Luma: Text-to-video with 9:16 aspect ratio" @( + "generate", "--provider", "luma", + "A time-lapse of flowers blooming in a garden, petals unfurling in sunlight.", + "--aspect-ratio", "9:16", + "--output", $t2vPath + ) + return (Extract-Id $out "OPERATION_ID") + } + + # 2. Image-to-video (if reference image provided) + if ($ReferenceImage -ne "" -and (Test-Path $ReferenceImage)) { + $i2vPath = Join-Path $OutputDir "luma_02_image2video.mp4" + Skip-OrRun "luma_i2v" "Luma: Image-to-video" $i2vPath { + $out = Invoke-Tool "Luma: Image-to-video from keyframe" @( + "image-to-video", "--provider", "luma", + "The image comes to life — subjects begin to move naturally.", + "--image", $ReferenceImage, + "--output", $i2vPath + ) + return (Extract-Id $out "OPERATION_ID") + } | Out-Null + } + + # 3. Extend (requires previous video) + if ($t2vId -ne "") { + $extPath = Join-Path $OutputDir "luma_03_extend.mp4" + Skip-OrRun "luma_extend" "Luma: Extend video" $extPath { + $out = Invoke-Tool "Luma: Extend — continue blooming scene" @( + "extend", "--provider", "luma", + "The garden continues to bloom as butterflies arrive and the sun moves across the sky.", + "--video", $t2vId, + "--output", $extPath + ) + return (Extract-Id $out "OPERATION_ID") + } | Out-Null + } +} + +# ═══════════════════════════════════════════════════════════════════════════ +# Run scenarios for each active provider +# ═══════════════════════════════════════════════════════════════════════════ +foreach ($p in $activeProviders) { + switch ($p) { + "openai" { Run-OpenAI } + "veo" { Run-Veo } + "runway" { Run-Runway } + "luma" { Run-Luma } + default { Write-Warning "Unknown provider: $p" } + } +} + +# ── Summary ───────────────────────────────────────────────────────────── +Write-Host "" +Write-Host ("═" * 70) -ForegroundColor Cyan +Write-Host " Demo complete! Output files:" -ForegroundColor Cyan +Write-Host ("═" * 70) -ForegroundColor Cyan +if (Test-Path $OutputDir) { + Get-ChildItem $OutputDir -Filter "*.mp4" | ForEach-Object { + Write-Host " $($_.Name) ($([math]::Round($_.Length / 1MB, 1)) MB)" -ForegroundColor Green + } +} +Write-Host "" +Write-Host " State: $stateFile" -ForegroundColor DarkGray +Write-Host " (pass -Reset to start fresh)" -ForegroundColor DarkGray diff --git a/samples/VideoProviders/PROVIDER_COMPARISON.md b/samples/VideoProviders/PROVIDER_COMPARISON.md new file mode 100644 index 00000000000..7deb9668f7b --- /dev/null +++ b/samples/VideoProviders/PROVIDER_COMPARISON.md @@ -0,0 +1,229 @@ +# Video Generation Provider Comparison & MEAI API Gap Analysis + +This document summarizes findings from implementing `IVideoGenerator` across four providers (OpenAI Sora, Google Veo, Runway, and Luma AI), identifies gaps in the current MEAI abstractions, and recommends potential API additions. + +## Provider Feature Matrix + +| Feature | OpenAI (Sora) | Google Veo 3.1 | Runway | Luma AI (Ray 2) | +|---|:---:|:---:|:---:|:---:| +| **Text-to-video** | ✅ | ✅ | ✅ | ✅ | +| **Image-to-video** | ✅ | ✅ | ✅ (i2v endpoint) | ✅ (keyframe) | +| **Video edit** | ✅ | ❌ | ✅ (v2v with gen4_aleph) | ❌¹ | +| **Video extend** | ✅ | ✅ (up to 20×) | ❌ | ✅ (fwd + reverse) | +| **Characters / Avatars** | ✅ (upload video) | ❌ | ✅ (act_two + avatars) | ❌ | +| **Reference images** | ❌ | ✅ (up to 3, typed) | ✅ (v2v references) | ❌ | +| **First+last frame interp** | ❌ | ✅ | ❌ | ✅ (frame0 + frame1) | +| **Native audio** | ❌ | ✅ (Veo 3+) | ❌² | ❌³ | +| **Negative prompt** | ❌ | ✅ | ❌ | ❌ | +| **Seed / reproducibility** | ❌ | ✅ | ✅ | ❌ | +| **Resolution control** | ✅ (WxH pixels) | ✅ (720p/1080p/4k) | ✅ (ratio string) | ✅ (540p–4k) | +| **Aspect ratio** | Implied via Size | ✅ (string) | ✅ (ratio string) | ✅ (string) | +| **Duration** | ✅ (string enum: 4/8/12) | ✅ (string enum: 4/6/8) | ✅ (integer: 2–10) | ✅ (string: "5s") | +| **Multiple outputs** | ❌ | ✅ (1–4) | ❌ | ❌ | +| **Looping video** | ❌ | ❌ | ❌ | ✅ | +| **Callback/webhook** | ❌ | ❌ | ❌ | ✅ | +| **Content moderation params** | ❌ | ✅ (personGeneration) | ✅ (publicFigureThreshold) | ❌ | + +¹ Luma has a separate "Modify Video" endpoint not covered in this evaluation. +² Runway provides separate sound effect, TTS, and speech-to-speech endpoints. +³ Luma has a separate "Add Audio" endpoint. + +## Async Polling Patterns + +All four providers use an async task/operation model that maps well to `VideoGenerationOperation`: + +| Provider | Submit | Poll | ID Format | +|---|---|---|---| +| OpenAI | `POST /videos/generations` | `GET /videos/generations/{id}` | `vg_xxxxx` | +| Google Veo | `POST /models/{model}:generateVideos` | `GET /{operation.name}` | `operations/xxx` | +| Runway | `POST /v1/{type}_to_video` | `GET /v1/tasks/{id}` | UUID | +| Luma AI | `POST /dream-machine/v1/generations` | `GET /dream-machine/v1/generations/{id}` | UUID | + +**Assessment**: The `VideoGenerationOperation` pattern (submit → poll → download) is well-suited for all providers. The polling interval varies (5s for Runway/Luma, 10s for Veo). + +## Input Media Handling + +| Provider | Image Input | Video Input | Data URI Support | +|---|---|---|---| +| OpenAI | Data URI in JSON body | Multipart upload | ✅ (images) | +| Google Veo | Base64 bytes in JSON | Gemini Files API | ✅ (inline base64) | +| Runway | HTTPS URL or data URI | HTTPS URL or data URI | ✅ | +| Luma AI | HTTPS URL only¹ | HTTPS URL only | ❌ | + +¹ Luma documentation says HTTPS URLs; data URI support is undocumented. + +**Assessment**: Most providers accept data URIs or inline base64, making `DataContent` a good abstraction. However, Luma's URL-only requirement means some providers will require an out-of-band upload step. + +## Identified API Gaps + +### Gap 1: Seed / Reproducibility (HIGH PRIORITY) + +**Problem**: 3 of 4 providers support a `seed` parameter for reproducible generation. Currently this requires `AdditionalProperties["seed"]`. + +**Recommendation**: Add `int? Seed` to `VideoGenerationOptions`. + +```csharp +/// Seed for reproducible generation. Same seed + same parameters ≈ same output. +public int? Seed { get; set; } +``` + +**Providers**: Google Veo ✅, Runway ✅, Luma ❌, OpenAI ❌ + +--- + +### Gap 2: Aspect Ratio (HIGH PRIORITY) + +**Problem**: Every provider has a concept of aspect ratio (`"16:9"`, `"9:16"`, `"1:1"`, etc.) separate from pixel resolution. The current `VideoSize` property encodes pixel dimensions, but ratio is the primary concept for most providers. Mapping `Size(1280, 720)` → `"16:9"` is lossy and ambiguous. + +**Recommendation**: Add `string? AspectRatio` to `VideoGenerationOptions`. + +```csharp +/// Aspect ratio of the generated video (e.g., "16:9", "9:16", "1:1"). +public string? AspectRatio { get; set; } +``` + +**Providers**: Google Veo ✅, Runway ✅, Luma ✅, OpenAI (implicit via Size) + +--- + +### Gap 3: Negative Prompt (MEDIUM PRIORITY) + +**Problem**: Google Veo supports `negativePrompt` to exclude unwanted elements. This is a concept that exists broadly in image generation (Stable Diffusion, DALL-E) and may appear in more video providers. + +**Recommendation**: Add `string? NegativePrompt` to `VideoGenerationRequest`. + +```csharp +/// Describes what to avoid in the generated video. +public string? NegativePrompt { get; set; } +``` + +**Providers**: Google Veo ✅ (others may add support) + +--- + +### Gap 4: Reference Images with Purpose (MEDIUM PRIORITY) + +**Problem**: Google Veo supports up to 3 reference images, each with a `referenceType` ("STYLE" or "SUBJECT"). Runway's video-to-video supports reference images for style transfer. The current `OriginalMedia` collection doesn't distinguish between "this is the source image for image-to-video" vs "this is a style reference". + +**Recommendation**: Consider a typed reference media collection, or a new property: + +```csharp +/// Reference images for guiding style, subject, or other attributes of the generation. +public IList? ReferenceMedia { get; set; } + +public class ReferenceMedia +{ + public AIContent Content { get; set; } + public string? ReferenceType { get; set; } // "style", "subject", etc. +} +``` + +**Providers**: Google Veo ✅ (3 refs, typed), Runway ✅ (1 ref for v2v) + +--- + +### Gap 5: Audio Generation (LOW-MEDIUM PRIORITY) + +**Problem**: Google Veo 3+ generates synchronized audio with video natively. Luma and Runway offer separate audio endpoints. As video AI evolves, audio-with-video will likely become standard. + +**Recommendation**: Add `bool? GenerateAudio` to `VideoGenerationOptions`. + +```csharp +/// Whether to generate synchronized audio alongside the video. +public bool? GenerateAudio { get; set; } +``` + +Alternatively, this could be modeled as part of `MediaType` (e.g., `"video/mp4; codecs=avc1,mp4a"`) but that's less ergonomic. + +**Providers**: Google Veo ✅ (native), Luma ✅ (separate endpoint), Runway ✅ (separate endpoint) + +--- + +### Gap 6: Keyframe / Interpolation (LOW PRIORITY) + +**Problem**: Both Luma and Google Veo support first+last frame interpolation — providing a start and end image and generating the video in between. The current API only models "input media" without positional semantics. + +**Current workaround**: Send two images in `OriginalMedia` and the provider implementation knows first = frame0, second = frame1. + +**Recommendation**: No immediate API change needed. The `OriginalMedia` collection with provider convention (first item = first frame, second = last frame) is workable. Could add `string? FramePosition` to a future `ReferenceMedia` type. + +--- + +### Gap 7: Reverse Extend (LOW PRIORITY) + +**Problem**: Luma supports extending a video backwards (generating content leading up to the existing video). This is conceptually different from forward extension. + +**Recommendation**: Consider adding `ReverseExtend` to `VideoOperationKind`, or leave as `AdditionalProperties`. + +--- + +### Gap 8: Looping Video (LOW PRIORITY) + +**Problem**: Luma supports `loop: true` to generate seamlessly looping video. + +**Recommendation**: Leave as `AdditionalProperties["loop"]` unless more providers add support. + +--- + +### Gap 9: Content Moderation / Safety Parameters (LOW PRIORITY) + +**Problem**: Both Runway (`publicFigureThreshold`) and Google Veo (`personGeneration`) have provider-specific content moderation controls. These are safety parameters rather than creative controls. + +**Recommendation**: Leave as `AdditionalProperties` — these are inherently provider-specific policies. + +--- + +## Problems Encountered During Implementation + +### 1. Runway's Separate Endpoints + +Runway uses three separate endpoints (`text_to_video`, `image_to_video`, `video_to_video`) rather than a single unified endpoint. The `IVideoGenerator.GenerateAsync` single-method approach requires the implementation to inspect `OriginalMedia` content types to determine which endpoint to call. This works but adds complexity. + +### 2. Luma's URL-Only Image Input + +Luma requires HTTPS URLs for images — it doesn't accept data URIs or inline base64. Implementations targeting Luma need an upload step before generation, which is outside the scope of `GenerateAsync`. The `UriContent` type helps, but `DataContent` users will need pre-upload. + +### 3. Google Veo's Extension Model + +Veo video extension requires uploading the source video through the Gemini Files API first, then referencing it. A simple `SourceVideoId` string is insufficient for the multi-step extension workflow. The extension operation also has limitations (720p only, 7s segments, up to 20 times). + +### 4. Ratio vs Size Ambiguity + +Every provider has a different approach to sizing: +- **OpenAI**: Width × Height pixels (e.g., 1280×720) +- **Google Veo**: Named resolution string + optional aspect ratio +- **Runway**: Fixed ratio strings (e.g., `"1280:720"`, `"1104:832"`) +- **Luma**: Named resolution (540p/720p/1080p/4k) + optional aspect ratio + +The `Size VideoSize` property maps well to OpenAI but requires lossy conversion for others. Adding `AspectRatio` as a separate property would help significantly. + +### 5. Duration Representation + +All providers handle duration differently: +- **OpenAI**: String enum (`"4"`, `"8"`, `"12"`) +- **Google Veo**: String enum (`"4"`, `"6"`, `"8"`) +- **Runway**: Integer (2–10) +- **Luma**: String with unit (`"5s"`) + +`TimeSpan Duration` is a good neutral abstraction, but the valid values are provider- and model-specific. Documentation should make clear that providers snap to supported values. + +## Summary of Recommendations + +| Priority | Recommendation | Rationale | +|---|---|---| +| **HIGH** | Add `string? AspectRatio` to `VideoGenerationOptions` | Universal concept across all providers, lossy via `VideoSize` alone | +| **HIGH** | Add `int? Seed` to `VideoGenerationOptions` | 3 of 4 providers support it, common for iterative creative workflows | +| **MEDIUM** | Add `string? NegativePrompt` to `VideoGenerationRequest` | Proven concept from image gen; Veo supports it, others likely will | +| **MEDIUM** | Add typed reference media concept | Veo + Runway use reference images with purpose; different from input media | +| **LOW-MED** | Add `bool? GenerateAudio` to `VideoGenerationOptions` | Growing trend for integrated audio; 3 providers offer it in some form | +| **LOW** | Consider `ReverseExtend` in `VideoOperationKind` | Luma-specific for now, but a useful concept for storytelling | + +## What Works Well + +- **`VideoGenerationOperation` pattern**: The submit → poll → download lifecycle maps perfectly to all four providers. +- **`VideoOperationKind` enum**: Create/Edit/Extend covers the core operations well. +- **`OriginalMedia` collection**: Handles image-to-video input for all providers. +- **`AdditionalProperties` escape hatch**: Provider-specific features (concepts, camera motion, content moderation) flow through cleanly. +- **`GetService()` pattern**: Enables provider-specific extensions (like OpenAI's `UploadVideoCharacterAsync`) without polluting the interface. +- **`VideoGenerationResponseFormat`**: Uri vs Data choice is useful for all providers. +- **`TimeSpan Duration`**: Clean neutral type that each provider maps to its own format. diff --git a/samples/VideoProviders/Runway/Program.cs b/samples/VideoProviders/Runway/Program.cs new file mode 100644 index 00000000000..23e981ca7b9 --- /dev/null +++ b/samples/VideoProviders/Runway/Program.cs @@ -0,0 +1,230 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +// Runway Video Generation Sample +// +// Usage examples: +// dotnet run -- text-to-video "A cute bunny hopping in a meadow" +// dotnet run -- image-to-video "The scene comes alive" --image bunny.jpg +// dotnet run -- video-to-video "Add easter elements" --video https://example.com/bunny.mp4 +// +// Environment: +// RUNWAY_API_KEY — your Runway API key from https://dev.runwayml.com/ + +using System.CommandLine; +using Microsoft.Extensions.AI; +using Runway; + +var modelOption = new Option("--model", () => "gen4_turbo", "Model (gen4.5, gen4_turbo, gen4_aleph, veo3.1, veo3)."); +var outputOption = new Option("--output", "Output file path (.mp4)."); +var durationOption = new Option("--duration", "Duration in seconds (2-10)."); +var seedOption = new Option("--seed", "Seed for reproducibility."); +var formatOption = new Option("--format", () => "data", "Response format: data or uri."); + +// ── text-to-video ─────────────────────────────────────────────────────────── +var t2vPromptArg = new Argument("prompt", "Text prompt."); +var t2vRatioOption = new Option("--ratio", () => "1280:720", "Output ratio (1280:720, 720:1280)."); + +var t2vCommand = new Command("text-to-video", "Generate video from text only.") +{ + t2vPromptArg, modelOption, outputOption, durationOption, seedOption, t2vRatioOption, formatOption, +}; + +t2vCommand.SetHandler(async (context) => +{ + string prompt = context.ParseResult.GetValueForArgument(t2vPromptArg); + string model = context.ParseResult.GetValueForOption(modelOption)!; + string? outputPath = context.ParseResult.GetValueForOption(outputOption); + int? duration = context.ParseResult.GetValueForOption(durationOption); + int? seed = context.ParseResult.GetValueForOption(seedOption); + string format = context.ParseResult.GetValueForOption(formatOption)!; + + using var generator = CreateGenerator(model); + var options = BuildOptions(model, duration, seed, format); + var request = new VideoGenerationRequest(prompt); + var operation = await generator.GenerateAsync(request, options); + await CompleteAndSaveAsync(operation, options, outputPath); +}); + +// ── image-to-video ────────────────────────────────────────────────────────── +var i2vPromptArg = new Argument("prompt", "Text prompt to describe the video."); +var i2vImageOption = new Option("--image", "Input image (file path or HTTPS URL).") { IsRequired = true }; + +var i2vCommand = new Command("image-to-video", "Generate video from an image + prompt.") +{ + i2vPromptArg, i2vImageOption, modelOption, outputOption, durationOption, seedOption, formatOption, +}; + +i2vCommand.SetHandler(async (context) => +{ + string prompt = context.ParseResult.GetValueForArgument(i2vPromptArg); + string imagePath = context.ParseResult.GetValueForOption(i2vImageOption)!; + string model = context.ParseResult.GetValueForOption(modelOption)!; + string? outputPath = context.ParseResult.GetValueForOption(outputOption); + int? duration = context.ParseResult.GetValueForOption(durationOption); + int? seed = context.ParseResult.GetValueForOption(seedOption); + string format = context.ParseResult.GetValueForOption(formatOption)!; + + using var generator = CreateGenerator(model); + + List media; + if (imagePath.StartsWith("http", StringComparison.OrdinalIgnoreCase)) + { + media = [new UriContent(new Uri(imagePath), "image/jpeg")]; + } + else + { + media = [await DataContent.LoadFromAsync(imagePath)]; + } + + var options = BuildOptions(model, duration, seed, format); + var request = new VideoGenerationRequest(prompt, media); + var operation = await generator.GenerateAsync(request, options); + await CompleteAndSaveAsync(operation, options, outputPath); +}); + +// ── video-to-video ────────────────────────────────────────────────────────── +var v2vPromptArg = new Argument("prompt", "Prompt describing the style transfer."); +var v2vVideoOption = new Option("--video", "Source video (file path or HTTPS URL).") { IsRequired = true }; +var v2vRefImageOption = new Option("--reference", "Reference image for style guidance."); + +var v2vCommand = new Command("video-to-video", "Transform a video with gen4_aleph.") +{ + v2vPromptArg, v2vVideoOption, v2vRefImageOption, outputOption, seedOption, formatOption, +}; + +v2vCommand.SetHandler(async (context) => +{ + string prompt = context.ParseResult.GetValueForArgument(v2vPromptArg); + string videoPath = context.ParseResult.GetValueForOption(v2vVideoOption)!; + string? refImage = context.ParseResult.GetValueForOption(v2vRefImageOption); + string? outputPath = context.ParseResult.GetValueForOption(outputOption); + int? seed = context.ParseResult.GetValueForOption(seedOption); + string format = context.ParseResult.GetValueForOption(formatOption)!; + + using var generator = CreateGenerator("gen4_aleph"); + + List media; + if (videoPath.StartsWith("http", StringComparison.OrdinalIgnoreCase)) + { + media = [new UriContent(new Uri(videoPath), "video/mp4")]; + } + else + { + media = [await DataContent.LoadFromAsync(videoPath)]; + } + + var options = BuildOptions("gen4_aleph", null, seed, format); + + // Add reference image if provided + if (refImage is not null) + { + var refs = new System.Text.Json.Nodes.JsonArray + { + new System.Text.Json.Nodes.JsonObject + { + ["type"] = "image", + ["uri"] = refImage.StartsWith("http", StringComparison.OrdinalIgnoreCase) + ? refImage + : $"data:image/png;base64,{Convert.ToBase64String(await File.ReadAllBytesAsync(refImage))}", + }, + }; + options.AdditionalProperties ??= []; + options.AdditionalProperties["references"] = refs; + } + + var request = new VideoGenerationRequest(prompt, media) + { + OperationKind = VideoOperationKind.Edit, + }; + + var operation = await generator.GenerateAsync(request, options); + await CompleteAndSaveAsync(operation, options, outputPath); +}); + +// ── Root ──────────────────────────────────────────────────────────────────── +var rootCommand = new RootCommand("Runway video generation sample using MEAI IVideoGenerator.") +{ + t2vCommand, + i2vCommand, + v2vCommand, +}; + +return await rootCommand.InvokeAsync(args); + +// ═══════════════════════════════════════════════════════════════════════════ +// Helpers +// ═══════════════════════════════════════════════════════════════════════════ +static IVideoGenerator CreateGenerator(string model) +{ + string? apiKey = Environment.GetEnvironmentVariable("RUNWAY_API_KEY"); + if (string.IsNullOrEmpty(apiKey)) + { + Console.Error.WriteLine("Error: Set the RUNWAY_API_KEY environment variable."); + Console.Error.WriteLine("Get a key at https://dev.runwayml.com/"); + Environment.Exit(1); + } + + return new RunwayVideoGenerator(apiKey, model); +} + +static VideoGenerationOptions BuildOptions(string model, int? duration, int? seed, string format) +{ + var options = new VideoGenerationOptions + { + ModelId = model, + ResponseFormat = string.Equals(format, "uri", StringComparison.OrdinalIgnoreCase) + ? VideoGenerationResponseFormat.Uri + : VideoGenerationResponseFormat.Data, + }; + + if (duration.HasValue) + { + options.Duration = TimeSpan.FromSeconds(duration.Value); + } + + if (seed.HasValue) + { + options.Seed = seed.Value; + } + + return options; +} + +static async Task CompleteAndSaveAsync(VideoGenerationOperation operation, VideoGenerationOptions options, string? outputPath) +{ + Console.WriteLine($"TASK_ID: {operation.OperationId}"); + Console.WriteLine($" Status: {operation.Status}"); + + var sw = System.Diagnostics.Stopwatch.StartNew(); + await operation.WaitForCompletionAsync( + new Progress(p => + Console.WriteLine($" Progress: {p.Status}{(p.PercentComplete.HasValue ? $" ({p.PercentComplete}%)" : "")}"))); + + sw.Stop(); + Console.WriteLine($" Completed in {sw.Elapsed.TotalSeconds:F1}s"); + + var contents = await operation.GetContentsAsync(options); + Console.WriteLine($" {contents.Count} content item(s)"); + + for (int i = 0; i < contents.Count; i++) + { + switch (contents[i]) + { + case DataContent dc when outputPath is not null: + Directory.CreateDirectory(Path.GetDirectoryName(outputPath) ?? "."); + await dc.SaveToAsync(outputPath); + Console.WriteLine($" [{i}] Saved: {outputPath} ({dc.Data.Length} bytes)"); + break; + case DataContent dc: + Console.WriteLine($" [{i}] DataContent: {dc.Data.Length} bytes ({dc.MediaType})"); + break; + case UriContent uc: + Console.WriteLine($" [{i}] URI: {uc.Uri}"); + break; + default: + Console.WriteLine($" [{i}] {contents[i].GetType().Name}"); + break; + } + } +} diff --git a/samples/VideoProviders/Runway/README.md b/samples/VideoProviders/Runway/README.md new file mode 100644 index 00000000000..519e603f734 --- /dev/null +++ b/samples/VideoProviders/Runway/README.md @@ -0,0 +1,63 @@ +# Runway Video Generation Sample + +This sample demonstrates using the **Microsoft.Extensions.AI** `IVideoGenerator` abstraction with Runway's generative video API. + +## Getting Access + +1. Go to [https://dev.runwayml.com/](https://dev.runwayml.com/) and sign in +2. Navigate to **API Keys** and create a new key +3. Runway requires a paid plan with credits — see [pricing](https://runwayml.com/pricing) +4. API version header `X-Runway-Version: 2024-11-06` is required on all requests + +## Environment Setup + +```bash +export RUNWAY_API_KEY="rw_xxxx" +``` + +## Models + +| Model | ID | Capabilities | Credits/sec | +|---|---|---|---| +| Gen-4.5 | `gen4.5` | Text-to-video only | 12 | +| Gen-4 Turbo | `gen4_turbo` | Text-to-video, image-to-video | 4 | +| Gen-4 Aleph | `gen4_aleph` | Video-to-video only | 4 | +| Veo 3.1 | `veo3.1` | Text-to-video (via Runway) | 4 | +| Veo 3 | `veo3` | Text-to-video (via Runway) | 4 | + +## Supported Operations + +| Operation | MEAI Mapping | Endpoint | +|---|---|---| +| Text-to-video | `VideoOperationKind.Create`, no `OriginalMedia` | `POST /v1/text_to_video` | +| Image-to-video | `VideoOperationKind.Create` + `OriginalMedia` (image) | `POST /v1/image_to_video` | +| Video-to-video | `VideoOperationKind.Edit` + `OriginalMedia` (video) | `POST /v1/video_to_video` | + +## Usage + +```bash +# Text-to-video +dotnet run -- text-to-video "A cute bunny hopping in a meadow" --output bunny.mp4 + +# Image-to-video +dotnet run -- image-to-video "The scene comes alive" --image bunny.jpg --duration 10 --output scene.mp4 + +# Video-to-video (gen4_aleph) with style reference +dotnet run -- video-to-video "Add easter elements" --video https://example.com/cats.mp4 --reference style.jpg --output styled.mp4 + +# With seed for reproducibility +dotnet run -- text-to-video "A sunset over mountains" --seed 42 --output sunset.mp4 +``` + +## API Gaps / Limitations + +- **No extend**: Runway does not have an endpoint for extending a completed video. `VideoOperationKind.Extend` cannot be mapped. +- **Separate endpoints**: Runway uses three separate endpoints (`text_to_video`, `image_to_video`, `video_to_video`) requiring the implementation to dispatch based on input media type, rather than a single unified endpoint. +- **Ratio vs Size**: Runway uses fixed ratio strings (`"1280:720"`, `"720:1280"`, etc.) rather than arbitrary pixel dimensions. The `VideoSize` → ratio mapping loses information. +- **Character performance** (`act_two`): Runway has a unique `character_performance` endpoint for driving a character with a reference video. This is fundamentally different from OpenAI's character system and has no MEAI equivalent. +- **Seed**: Available via `AdditionalProperties` — consider promoting to a first-class option. +- **Image position**: Runway's `image_to_video` accepts an array of `PromptImages` with `position` (currently only `"first"`). MEAI only models a single image via `OriginalMedia` without position metadata. +- **Duration as integer**: Runway passes duration as an integer (2-10), while OpenAI requires a string enum. The MEAI `TimeSpan Duration` maps cleanly to both. +- **Video-to-video references**: `gen4_aleph` supports `references` (array of image references for style). This is modeled via `AdditionalProperties` but could benefit from a first-class reference images concept. +- **Content moderation**: Runway has `contentModeration.publicFigureThreshold` — provider-specific safety control. +- **No resolution control for v2v**: For video-to-video, the output resolution is determined by the input video. diff --git a/samples/VideoProviders/Runway/Runway.csproj b/samples/VideoProviders/Runway/Runway.csproj new file mode 100644 index 00000000000..552fe62669e --- /dev/null +++ b/samples/VideoProviders/Runway/Runway.csproj @@ -0,0 +1,16 @@ + + + + Exe + net10.0 + enable + enable + $(NoWarn);MEAI001 + + + + + + + + diff --git a/samples/VideoProviders/Runway/RunwayVideoGenerationOperation.cs b/samples/VideoProviders/Runway/RunwayVideoGenerationOperation.cs new file mode 100644 index 00000000000..a1aa8fe78d6 --- /dev/null +++ b/samples/VideoProviders/Runway/RunwayVideoGenerationOperation.cs @@ -0,0 +1,132 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System.Text.Json; +using Microsoft.Extensions.AI; + +namespace Runway; + +/// +/// Tracks an in-flight Runway task, polling GET /v1/tasks/{id} for status. +/// +/// +/// Runway task states: PENDING, THROTTLED, RUNNING, SUCCEEDED, FAILED. +/// The API recommends polling no more than once every 5 seconds. +/// +internal sealed class RunwayVideoGenerationOperation : VideoGenerationOperation +{ + private const string BaseUrl = "https://api.dev.runwayml.com"; + + private readonly HttpClient _httpClient; + private string? _status; + private string? _failureReason; + private string? _outputUrl; + private int? _progressPercent; + + public RunwayVideoGenerationOperation(string operationId, HttpClient httpClient, string modelId) + { + OperationId = operationId; + ModelId = modelId; + _httpClient = httpClient; + _status = "PENDING"; + } + + public override string? OperationId { get; } + + public override string? Status => _status; + + public override int? PercentComplete => _progressPercent ?? _status switch + { + "SUCCEEDED" => 100, + "FAILED" => null, + "RUNNING" => 50, + "THROTTLED" => 10, + _ => 0, + }; + + public override bool IsCompleted => _status is "SUCCEEDED" or "FAILED"; + + public override string? FailureReason => _failureReason; + + public override async Task UpdateAsync(CancellationToken cancellationToken = default) + { + using var response = await _httpClient.GetAsync($"{BaseUrl}/v1/tasks/{OperationId}", cancellationToken); + string body = await response.Content.ReadAsStringAsync(cancellationToken); + response.EnsureSuccessStatusCode(); + + using var doc = JsonDocument.Parse(body); + var root = doc.RootElement; + + _status = root.GetProperty("status").GetString(); + + if (root.TryGetProperty("failure", out var failure) && failure.ValueKind == JsonValueKind.String) + { + _failureReason = failure.GetString(); + } + + if (root.TryGetProperty("progress", out var prog) && prog.TryGetDouble(out double progressVal)) + { + _progressPercent = (int)(progressVal * 100); + } + + // Output can be a single URL or an array + if (root.TryGetProperty("output", out var output)) + { + if (output.ValueKind == JsonValueKind.Array && output.GetArrayLength() > 0) + { + _outputUrl = output[0].GetString(); + } + else if (output.ValueKind == JsonValueKind.String) + { + _outputUrl = output.GetString(); + } + } + } + + public override async Task WaitForCompletionAsync( + IProgress? progress = null, + CancellationToken cancellationToken = default) + { + while (!IsCompleted) + { + await Task.Delay(TimeSpan.FromSeconds(5), cancellationToken); + await UpdateAsync(cancellationToken); + progress?.Report(new VideoGenerationProgress(_status, PercentComplete)); + } + + if (_status == "FAILED") + { + throw new InvalidOperationException($"Video generation failed: {_failureReason}"); + } + } + + public override async Task> GetContentsAsync( + VideoGenerationOptions? options = null, + CancellationToken cancellationToken = default) + { + if (!IsCompleted || _status == "FAILED") + { + throw new InvalidOperationException("The operation has not completed successfully."); + } + + if (_outputUrl is null) + { + await UpdateAsync(cancellationToken); + } + + if (_outputUrl is null) + { + throw new InvalidOperationException("No output URL available after completion."); + } + + if (options?.ResponseFormat == VideoGenerationResponseFormat.Uri) + { + return [new UriContent(new Uri(_outputUrl), "video/mp4")]; + } + + using var response = await _httpClient.GetAsync(_outputUrl, cancellationToken); + response.EnsureSuccessStatusCode(); + byte[] data = await response.Content.ReadAsByteArrayAsync(cancellationToken); + return [new DataContent(data, "video/mp4")]; + } +} diff --git a/samples/VideoProviders/Runway/RunwayVideoGenerator.cs b/samples/VideoProviders/Runway/RunwayVideoGenerator.cs new file mode 100644 index 00000000000..4cb2cb72aef --- /dev/null +++ b/samples/VideoProviders/Runway/RunwayVideoGenerator.cs @@ -0,0 +1,300 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System.Drawing; +using System.Net.Http.Headers; +using System.Text.Json; +using System.Text.Json.Nodes; +using Microsoft.Extensions.AI; + +namespace Runway; + +/// +/// Implements for the Runway API. +/// Supports text-to-video, image-to-video, and video-to-video (gen4_aleph). +/// +/// +/// API Reference: https://docs.dev.runwayml.com/api +/// Endpoints: +/// POST /v1/text_to_video +/// POST /v1/image_to_video +/// POST /v1/video_to_video +/// GET /v1/tasks/{id} +/// +internal sealed class RunwayVideoGenerator : IVideoGenerator +{ + private const string BaseUrl = "https://api.dev.runwayml.com"; + private const string ApiVersion = "2024-11-06"; + private readonly HttpClient _httpClient; + private readonly string _modelId; + + public RunwayVideoGenerator(string apiKey, string modelId = "gen4_turbo", HttpClient? httpClient = null) + { + _modelId = modelId; + _httpClient = httpClient ?? new HttpClient(); + _httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Bearer", apiKey); + _httpClient.DefaultRequestHeaders.Accept.Add(new MediaTypeWithQualityHeaderValue("application/json")); + _httpClient.DefaultRequestHeaders.Add("X-Runway-Version", ApiVersion); + } + + public async Task GenerateAsync( + VideoGenerationRequest request, + VideoGenerationOptions? options = null, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(request); + + string model = options?.ModelId ?? _modelId; + string endpoint; + JsonObject body; + + // Determine which endpoint to use based on operation kind and media + if (request.OperationKind == VideoOperationKind.Edit && HasVideoMedia(request.OriginalMedia)) + { + // Video-to-video (gen4_aleph only) + endpoint = "/v1/video_to_video"; + body = BuildVideoToVideoBody(request, model, options); + } + else if (HasImageMedia(request.OriginalMedia)) + { + // Image-to-video + endpoint = "/v1/image_to_video"; + body = BuildImageToVideoBody(request, model, options); + } + else + { + // Text-to-video + endpoint = "/v1/text_to_video"; + body = BuildTextToVideoBody(request, model, options); + } + + string json = body.ToJsonString(); + using var content = new StringContent(json, System.Text.Encoding.UTF8, "application/json"); + using var response = await _httpClient.PostAsync($"{BaseUrl}{endpoint}", content, cancellationToken); + + string responseBody = await response.Content.ReadAsStringAsync(cancellationToken); + response.EnsureSuccessStatusCode(); + + var result = JsonDocument.Parse(responseBody); + string taskId = result.RootElement.GetProperty("id").GetString()!; + + return new RunwayVideoGenerationOperation(taskId, _httpClient, model); + } + + public object? GetService(Type serviceType, object? serviceKey = null) + { + if (serviceKey is null && serviceType.IsInstanceOfType(this)) + { + return this; + } + + return null; + } + + public void Dispose() => _httpClient.Dispose(); + + private static JsonObject BuildTextToVideoBody(VideoGenerationRequest request, string model, VideoGenerationOptions? options) + { + var body = new JsonObject + { + ["model"] = model, + ["promptText"] = request.Prompt ?? string.Empty, + ["ratio"] = options?.AspectRatio is { } ar ? MapAspectRatio(ar) : MapRatio(options?.VideoSize), + }; + + if (options?.Duration is { } duration) + { + body["duration"] = (int)duration.TotalSeconds; + } + + AddSeed(body, options); + return body; + } + + private static JsonObject BuildImageToVideoBody(VideoGenerationRequest request, string model, VideoGenerationOptions? options) + { + string? imageUri = GetFirstImageUri(request.OriginalMedia); + + var body = new JsonObject + { + ["model"] = model, + ["promptText"] = request.Prompt ?? string.Empty, + ["promptImage"] = imageUri ?? string.Empty, + ["ratio"] = options?.AspectRatio is { } ar ? MapAspectRatio(ar) : MapRatioImageToVideo(options?.VideoSize), + }; + + if (options?.Duration is { } duration) + { + body["duration"] = (int)duration.TotalSeconds; + } + + AddSeed(body, options); + return body; + } + + private static JsonObject BuildVideoToVideoBody(VideoGenerationRequest request, string model, VideoGenerationOptions? options) + { + string? videoUri = GetFirstVideoUri(request.OriginalMedia); + + var body = new JsonObject + { + ["model"] = "gen4_aleph", // video-to-video only supports gen4_aleph + ["promptText"] = request.Prompt ?? string.Empty, + ["videoUri"] = videoUri ?? string.Empty, + }; + + // Reference images for style transfer + if (options?.AdditionalProperties?.TryGetValue("references", out object? refs) == true && refs is JsonArray refsArray) + { + body["references"] = JsonNode.Parse(refsArray.ToJsonString())!; + } + + AddSeed(body, options); + return body; + } + + private static void AddSeed(JsonObject body, VideoGenerationOptions? options) + { + // Prefer first-class Seed property, fall back to AdditionalProperties + if (options?.Seed is int seed) + { + body["seed"] = seed; + } + else if (options?.AdditionalProperties?.TryGetValue("seed", out object? seedObj) == true && seedObj is int seedInt) + { + body["seed"] = seedInt; + } + } + + private static string? GetFirstImageUri(IEnumerable? media) + { + if (media is null) + { + return null; + } + + foreach (var item in media) + { + if (item is UriContent uc && uc.Uri is not null && (uc.MediaType?.StartsWith("image/", StringComparison.OrdinalIgnoreCase) ?? true)) + { + return uc.Uri.ToString(); + } + + if (item is DataContent dc && dc.Data.Length > 0 && (dc.MediaType?.StartsWith("image/", StringComparison.OrdinalIgnoreCase) ?? true)) + { + // Runway accepts data URIs for images + return dc.Uri ?? $"data:{dc.MediaType ?? "image/png"};base64,{Convert.ToBase64String(dc.Data.ToArray())}"; + } + } + + return null; + } + + private static string? GetFirstVideoUri(IEnumerable? media) + { + if (media is null) + { + return null; + } + + foreach (var item in media) + { + if (item is UriContent uc && uc.Uri is not null && (uc.MediaType?.StartsWith("video/", StringComparison.OrdinalIgnoreCase) ?? true)) + { + return uc.Uri.ToString(); + } + + if (item is DataContent dc && dc.Data.Length > 0 && (dc.MediaType?.StartsWith("video/", StringComparison.OrdinalIgnoreCase) ?? false)) + { + return dc.Uri ?? $"data:{dc.MediaType};base64,{Convert.ToBase64String(dc.Data.ToArray())}"; + } + } + + return null; + } + + private static bool HasImageMedia(IEnumerable? media) + { + if (media is null) + { + return false; + } + + foreach (var item in media) + { + if (item is DataContent dc && (dc.MediaType?.StartsWith("image/", StringComparison.OrdinalIgnoreCase) ?? false)) + { + return true; + } + } + + return false; + } + + private static bool HasVideoMedia(IEnumerable? media) + { + if (media is null) + { + return false; + } + + foreach (var item in media) + { + if (item is DataContent dc && (dc.MediaType?.StartsWith("video/", StringComparison.OrdinalIgnoreCase) ?? false)) + { + return true; + } + } + + return false; + } + + private static string MapRatio(Size? size) + { + if (size is null) + { + return "1280:720"; + } + + double ratio = (double)size.Value.Width / size.Value.Height; + return ratio > 1.5 ? "1280:720" : "720:1280"; + } + + private static string MapRatioImageToVideo(Size? size) + { + if (size is null) + { + return "1280:720"; + } + + // Runway image-to-video supports more ratios + double ratio = (double)size.Value.Width / size.Value.Height; + if (ratio > 2.0) + { + return "1584:672"; + } + + if (ratio > 1.2) + { + return "1280:720"; + } + + if (ratio > 0.9) + { + return "960:960"; + } + + return "720:1280"; + } + + /// Maps an aspect ratio string like "16:9" to a Runway ratio string like "1280:720". + private static string MapAspectRatio(string aspectRatio) => aspectRatio switch + { + "16:9" => "1280:720", + "9:16" => "720:1280", + "1:1" => "960:960", + "4:3" => "1104:832", + "3:4" => "832:1104", + _ => aspectRatio.Replace(':', ':'), // pass through as-is if already in Runway format + }; +} diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGenerationOptions.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGenerationOptions.cs index 0375c856b03..9e23eea6a82 100644 --- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGenerationOptions.cs +++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGenerationOptions.cs @@ -27,16 +27,24 @@ protected VideoGenerationOptions(VideoGenerationOptions? other) } AdditionalProperties = other.AdditionalProperties?.Clone(); + AspectRatio = other.AspectRatio; Count = other.Count; Duration = other.Duration; FramesPerSecond = other.FramesPerSecond; + GenerateAudio = other.GenerateAudio; MediaType = other.MediaType; ModelId = other.ModelId; RawRepresentationFactory = other.RawRepresentationFactory; ResponseFormat = other.ResponseFormat; + Seed = other.Seed; VideoSize = other.VideoSize; } + /// + /// Gets or sets the desired aspect ratio for the generated video (e.g., "16:9", "9:16", "1:1"). + /// + public string? AspectRatio { get; set; } + /// /// Gets or sets the number of videos to generate. /// @@ -55,6 +63,11 @@ protected VideoGenerationOptions(VideoGenerationOptions? other) ///
public int? FramesPerSecond { get; set; } + /// + /// Gets or sets whether to generate synchronized audio alongside the video. + /// + public bool? GenerateAudio { get; set; } + /// /// Gets or sets the media type (also known as MIME type) of the generated video. /// @@ -89,6 +102,11 @@ protected VideoGenerationOptions(VideoGenerationOptions? other) /// public VideoGenerationResponseFormat? ResponseFormat { get; set; } + /// + /// Gets or sets a seed value for reproducible generation. + /// + public int? Seed { get; set; } + /// /// Gets or sets the size (resolution) of the generated video. /// @@ -113,9 +131,6 @@ protected VideoGenerationOptions(VideoGenerationOptions? other) /// /// Represents the requested response format of the generated video. /// -/// -/// Not all implementations support all response formats and this value might be ignored by the implementation if not supported. -/// [Experimental(DiagnosticIds.Experiments.AIVideoGeneration, UrlFormat = DiagnosticIds.UrlFormat)] public enum VideoGenerationResponseFormat { diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGenerationRequest.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGenerationRequest.cs index 998d12cb59e..53bbcec1b5b 100644 --- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGenerationRequest.cs +++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGenerationRequest.cs @@ -35,6 +35,9 @@ public VideoGenerationRequest(string prompt, IEnumerable? originalMed /// Gets or sets the prompt to guide the video generation. public string? Prompt { get; set; } + /// Gets or sets a negative prompt describing what to avoid in the generated video. + public string? NegativePrompt { get; set; } + /// Gets or sets the kind of video operation to perform. /// /// Defaults to . Set to or diff --git a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Video/VideoGenerationOptionsTests.cs b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Video/VideoGenerationOptionsTests.cs index e13a8c619e2..2b092ee79b7 100644 --- a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Video/VideoGenerationOptionsTests.cs +++ b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Video/VideoGenerationOptionsTests.cs @@ -14,13 +14,16 @@ public class VideoGenerationOptionsTests public void Constructor_Defaults() { var options = new VideoGenerationOptions(); + Assert.Null(options.AspectRatio); Assert.Null(options.Count); Assert.Null(options.Duration); Assert.Null(options.FramesPerSecond); + Assert.Null(options.GenerateAudio); Assert.Null(options.MediaType); Assert.Null(options.ModelId); Assert.Null(options.RawRepresentationFactory); Assert.Null(options.ResponseFormat); + Assert.Null(options.Seed); Assert.Null(options.VideoSize); Assert.Null(options.AdditionalProperties); } @@ -30,22 +33,28 @@ public void Properties_Roundtrip() { var options = new VideoGenerationOptions { + AspectRatio = "16:9", Count = 3, Duration = TimeSpan.FromSeconds(15), FramesPerSecond = 30, + GenerateAudio = true, MediaType = "video/webm", ModelId = "sora", ResponseFormat = VideoGenerationResponseFormat.Data, + Seed = 42, VideoSize = new Size(1280, 720), AdditionalProperties = new() { ["key"] = "value" }, }; + Assert.Equal("16:9", options.AspectRatio); Assert.Equal(3, options.Count); Assert.Equal(TimeSpan.FromSeconds(15), options.Duration); Assert.Equal(30, options.FramesPerSecond); + Assert.True(options.GenerateAudio); Assert.Equal("video/webm", options.MediaType); Assert.Equal("sora", options.ModelId); Assert.Equal(VideoGenerationResponseFormat.Data, options.ResponseFormat); + Assert.Equal(42, options.Seed); Assert.Equal(new Size(1280, 720), options.VideoSize); Assert.Equal("value", options.AdditionalProperties["key"]); } @@ -55,12 +64,15 @@ public void Clone_CreatesIndependentCopy() { var original = new VideoGenerationOptions { + AspectRatio = "9:16", Count = 2, Duration = TimeSpan.FromSeconds(5), FramesPerSecond = 24, + GenerateAudio = true, MediaType = "video/mp4", ModelId = "model-1", ResponseFormat = VideoGenerationResponseFormat.Uri, + Seed = 123, VideoSize = new Size(1920, 1080), AdditionalProperties = new() { ["key"] = "value" }, }; @@ -68,12 +80,15 @@ public void Clone_CreatesIndependentCopy() var clone = original.Clone(); Assert.NotSame(original, clone); + Assert.Equal(original.AspectRatio, clone.AspectRatio); Assert.Equal(original.Count, clone.Count); Assert.Equal(original.Duration, clone.Duration); Assert.Equal(original.FramesPerSecond, clone.FramesPerSecond); + Assert.Equal(original.GenerateAudio, clone.GenerateAudio); Assert.Equal(original.MediaType, clone.MediaType); Assert.Equal(original.ModelId, clone.ModelId); Assert.Equal(original.ResponseFormat, clone.ResponseFormat); + Assert.Equal(original.Seed, clone.Seed); Assert.Equal(original.VideoSize, clone.VideoSize); Assert.NotSame(original.AdditionalProperties, clone.AdditionalProperties); } @@ -82,9 +97,12 @@ public void Clone_CreatesIndependentCopy() public void Clone_FromNull_ReturnsDefaults() { var options = new DerivedVideoGenerationOptions(null); + Assert.Null(options.AspectRatio); Assert.Null(options.Count); Assert.Null(options.Duration); + Assert.Null(options.GenerateAudio); Assert.Null(options.ModelId); + Assert.Null(options.Seed); } [Theory] @@ -102,11 +120,14 @@ public void JsonSerialization_Roundtrip() { var options = new VideoGenerationOptions { + AspectRatio = "1:1", Count = 2, Duration = TimeSpan.FromSeconds(10), FramesPerSecond = 24, + GenerateAudio = true, MediaType = "video/mp4", ModelId = "test-model", + Seed = 99, VideoSize = new Size(640, 480), ResponseFormat = VideoGenerationResponseFormat.Data, AdditionalProperties = new() { ["custom"] = "prop" }, @@ -116,10 +137,13 @@ public void JsonSerialization_Roundtrip() var deserialized = JsonSerializer.Deserialize(json, AIJsonUtilities.DefaultOptions); Assert.NotNull(deserialized); - Assert.Equal(options.Count, deserialized!.Count); + Assert.Equal(options.AspectRatio, deserialized!.AspectRatio); + Assert.Equal(options.Count, deserialized.Count); + Assert.Equal(options.GenerateAudio, deserialized.GenerateAudio); Assert.Equal(options.MediaType, deserialized.MediaType); Assert.Equal(options.ModelId, deserialized.ModelId); Assert.Equal(options.ResponseFormat, deserialized.ResponseFormat); + Assert.Equal(options.Seed, deserialized.Seed); } private class DerivedVideoGenerationOptions : VideoGenerationOptions From 2d73fcf3604454d188d9247c2596aa501dc412eb Mon Sep 17 00:00:00 2001 From: "Eric St. John" Date: Thu, 26 Mar 2026 23:35:49 -0700 Subject: [PATCH 08/10] Fix google veo demo --- .../GoogleVeoVideoGenerationOperation.cs | 15 ++-- .../GoogleVeo/GoogleVeoVideoGenerator.cs | 79 ++++++++++--------- .../MultiProviderPOC/Program.cs | 62 ++++++++++----- .../MultiProviderPOC/demo-multi-provider.ps1 | 17 ++-- 4 files changed, 102 insertions(+), 71 deletions(-) diff --git a/samples/VideoProviders/GoogleVeo/GoogleVeoVideoGenerationOperation.cs b/samples/VideoProviders/GoogleVeo/GoogleVeoVideoGenerationOperation.cs index e61cdde97a8..92db61ebd65 100644 --- a/samples/VideoProviders/GoogleVeo/GoogleVeoVideoGenerationOperation.cs +++ b/samples/VideoProviders/GoogleVeo/GoogleVeoVideoGenerationOperation.cs @@ -65,14 +65,16 @@ public override async Task UpdateAsync(CancellationToken cancellationToken = def { _status = "SUCCEEDED"; - // Parse generated videos + // Parse generated videos — predictLongRunning response format: + // response.generateVideoResponse.generatedSamples[].video.uri if (root.TryGetProperty("response", out var resp) && - resp.TryGetProperty("generatedVideos", out var videos)) + resp.TryGetProperty("generateVideoResponse", out var videoResponse) && + videoResponse.TryGetProperty("generatedSamples", out var samples)) { _videoUris.Clear(); - foreach (var video in videos.EnumerateArray()) + foreach (var sample in samples.EnumerateArray()) { - if (video.TryGetProperty("video", out var videoObj) && + if (sample.TryGetProperty("video", out var videoObj) && videoObj.TryGetProperty("uri", out var uri)) { _videoUris.Add(uri.GetString()!); @@ -126,13 +128,16 @@ public override async Task> GetContentsAsync( var results = new List(); foreach (string videoUri in _videoUris) { + // Append API key to download URI + string downloadUri = videoUri.Contains('?') ? $"{videoUri}&key={_apiKey}" : $"{videoUri}?key={_apiKey}"; + if (options?.ResponseFormat == VideoGenerationResponseFormat.Uri) { results.Add(new UriContent(new Uri(videoUri), "video/mp4")); } else { - using var response = await _httpClient.GetAsync(videoUri, cancellationToken); + using var response = await _httpClient.GetAsync(downloadUri, cancellationToken); response.EnsureSuccessStatusCode(); byte[] data = await response.Content.ReadAsByteArrayAsync(cancellationToken); results.Add(new DataContent(data, "video/mp4")); diff --git a/samples/VideoProviders/GoogleVeo/GoogleVeoVideoGenerator.cs b/samples/VideoProviders/GoogleVeo/GoogleVeoVideoGenerator.cs index af8b0b09be1..93adb3545b6 100644 --- a/samples/VideoProviders/GoogleVeo/GoogleVeoVideoGenerator.cs +++ b/samples/VideoProviders/GoogleVeo/GoogleVeoVideoGenerator.cs @@ -15,7 +15,7 @@ namespace GoogleVeo; /// /// /// API Reference: https://ai.google.dev/gemini-api/docs/video -/// Endpoint: POST https://generativelanguage.googleapis.com/v1beta/models/{model}:generateVideos +/// Endpoint: POST https://generativelanguage.googleapis.com/v1beta/models/{model}:predictLongRunning /// Polling: GET https://generativelanguage.googleapis.com/v1beta/{operation.name} /// internal sealed class GoogleVeoVideoGenerator : IVideoGenerator @@ -41,12 +41,14 @@ public async Task GenerateAsync( ArgumentNullException.ThrowIfNull(request); string model = options?.ModelId ?? _modelId; - var body = new JsonObject(); + + // Build instance object (prompt, image, reference images, last frame, extension) + var instance = new JsonObject(); // Text prompt (required for most operations) if (request.Prompt is not null) { - body["prompt"] = request.Prompt; + instance["prompt"] = request.Prompt; } // Image for image-to-video @@ -55,59 +57,65 @@ public async Task GenerateAsync( var image = GetFirstImageContent(request.OriginalMedia); if (image is not null) { - body["image"] = image; + instance["image"] = image; } } // Reference images (provider-specific via AdditionalProperties) if (options?.AdditionalProperties?.TryGetValue("referenceImages", out object? refImgs) == true && refImgs is JsonArray refArray) { - body["referenceImages"] = JsonNode.Parse(refArray.ToJsonString())!; + instance["referenceImages"] = JsonNode.Parse(refArray.ToJsonString())!; } // Last frame for first+last frame interpolation if (options?.AdditionalProperties?.TryGetValue("lastFrameImage", out object? lastFrame) == true) { - body["lastFrame"] = new JsonObject + instance["lastFrame"] = new JsonObject { ["image"] = BuildImageNode(lastFrame), }; } - // Generation config - var config = new JsonObject(); + // Video extension + if (request.OperationKind == VideoOperationKind.Extend && request.SourceVideoId is not null) + { + instance["extensionSourceVideoId"] = request.SourceVideoId; + } + + // Build the parameters object (generation config) + var parameters = new JsonObject(); if (options?.AdditionalProperties?.TryGetValue("personGeneration", out object? personGen) == true && personGen is string personGenStr) { - config["personGeneration"] = personGenStr; + parameters["personGeneration"] = personGenStr; } if (options?.Duration is { } duration) { - config["durationSeconds"] = ((int)duration.TotalSeconds).ToString(); + parameters["durationSeconds"] = (int)duration.TotalSeconds; } if (options?.VideoSize is { } size) { - config["resolution"] = MapResolution(size); + parameters["resolution"] = MapResolution(size); } if (options?.AspectRatio is { } aspectRatio) { - config["aspectRatio"] = aspectRatio; + parameters["aspectRatio"] = aspectRatio; } else if (options?.AdditionalProperties?.TryGetValue("aspectRatio", out object? ar) == true && ar is string arStr) { - config["aspectRatio"] = arStr; + parameters["aspectRatio"] = arStr; } if (options?.AdditionalProperties?.TryGetValue("numberOfVideos", out object? numVids) == true && numVids is int numVidsInt) { - config["numberOfVideos"] = numVidsInt; + parameters["numberOfVideos"] = numVidsInt; } else if (options?.Count is { } count) { - config["numberOfVideos"] = count; + parameters["numberOfVideos"] = count; } // Negative prompt — prefer first-class property on request, fall back to AdditionalProperties @@ -119,48 +127,47 @@ public async Task GenerateAsync( if (negativePrompt is not null) { - config["negativePrompt"] = negativePrompt; + parameters["negativePrompt"] = negativePrompt; } if (options?.GenerateAudio is bool genAudio) { - config["generateAudio"] = genAudio; + parameters["generateAudio"] = genAudio; } else if (options?.AdditionalProperties?.TryGetValue("generateAudio", out object? genAudioObj) == true && genAudioObj is bool genAudioBool) { - config["generateAudio"] = genAudioBool; + parameters["generateAudio"] = genAudioBool; } if (options?.Seed is int seed) { - config["seed"] = seed; + parameters["seed"] = seed; } else if (options?.AdditionalProperties?.TryGetValue("seed", out object? seedObj) == true && seedObj is int seedInt) { - config["seed"] = seedInt; + parameters["seed"] = seedInt; } - if (config.Count > 0) + // Wrap in instances/parameters envelope for predictLongRunning + var body = new JsonObject { - body["generationConfig"] = config; - } - - // Video extension uses a different field structure - if (request.OperationKind == VideoOperationKind.Extend && request.SourceVideoId is not null) + ["instances"] = new JsonArray { instance }, + }; + if (parameters.Count > 0) { - // For extend, the sourceVideoId should be a video file URI or inline data - // The Veo API uses the image field for the last frame of the source video - // This is a simplification - real extension requires the Gemini Files API - body["extensionSourceVideoId"] = request.SourceVideoId; + body["parameters"] = parameters; } - string url = $"{BaseUrl}/models/{model}:generateVideos?key={_apiKey}"; + string url = $"{BaseUrl}/models/{model}:predictLongRunning?key={_apiKey}"; string json = body.ToJsonString(); using var content = new StringContent(json, System.Text.Encoding.UTF8, "application/json"); using var response = await _httpClient.PostAsync(url, content, cancellationToken); string responseBody = await response.Content.ReadAsStringAsync(cancellationToken); - response.EnsureSuccessStatusCode(); + if (!response.IsSuccessStatusCode) + { + throw new HttpRequestException($"Google Veo API error {(int)response.StatusCode} ({response.StatusCode}): {responseBody}"); + } var result = JsonDocument.Parse(responseBody); string operationName = result.RootElement.GetProperty("name").GetString()!; @@ -188,7 +195,7 @@ public async Task GenerateAsync( { return new JsonObject { - ["imageBytes"] = Convert.ToBase64String(dc.Data.ToArray()), + ["bytesBase64Encoded"] = Convert.ToBase64String(dc.Data.ToArray()), ["mimeType"] = dc.MediaType, }; } @@ -197,7 +204,7 @@ public async Task GenerateAsync( { return new JsonObject { - ["imageUri"] = uc.Uri.ToString(), + ["gcsUri"] = uc.Uri.ToString(), }; } } @@ -212,7 +219,7 @@ private static JsonNode BuildImageNode(object imageData) byte[] bytes = File.ReadAllBytes(path); return new JsonObject { - ["imageBytes"] = Convert.ToBase64String(bytes), + ["bytesBase64Encoded"] = Convert.ToBase64String(bytes), ["mimeType"] = "image/png", }; } @@ -222,7 +229,7 @@ private static JsonNode BuildImageNode(object imageData) return node; } - return new JsonObject { ["imageUri"] = imageData.ToString() }; + return new JsonObject { ["gcsUri"] = imageData.ToString() }; } private static string MapResolution(Size size) diff --git a/samples/VideoProviders/MultiProviderPOC/Program.cs b/samples/VideoProviders/MultiProviderPOC/Program.cs index 8614a9b169d..080a362973d 100644 --- a/samples/VideoProviders/MultiProviderPOC/Program.cs +++ b/samples/VideoProviders/MultiProviderPOC/Program.cs @@ -394,8 +394,10 @@ public async Task GenerateAsync( VideoGenerationRequest request, VideoGenerationOptions? options = null, CancellationToken cancellationToken = default) { string model = options?.ModelId ?? _modelId; - var body = new JsonObject(); - if (request.Prompt is not null) body["prompt"] = request.Prompt; + + // Build the instance object (prompt, image) + var instance = new JsonObject(); + if (request.Prompt is not null) instance["prompt"] = request.Prompt; if (request.OperationKind == VideoOperationKind.Create && request.OriginalMedia is not null) { @@ -403,33 +405,43 @@ public async Task GenerateAsync( { if (item is DataContent dc && (dc.MediaType?.StartsWith("image/", StringComparison.OrdinalIgnoreCase) ?? false) && dc.Data.Length > 0) { - body["image"] = new JsonObject { ["imageBytes"] = Convert.ToBase64String(dc.Data.ToArray()), ["mimeType"] = dc.MediaType }; + instance["image"] = new JsonObject { ["bytesBase64Encoded"] = Convert.ToBase64String(dc.Data.ToArray()), ["mimeType"] = dc.MediaType }; break; } if (item is UriContent uc) { - body["image"] = new JsonObject { ["imageUri"] = uc.Uri.ToString() }; + instance["image"] = new JsonObject { ["gcsUri"] = uc.Uri.ToString() }; break; } } } - var config = new JsonObject(); - if (options?.Duration is { } dur) config["durationSeconds"] = ((int)dur.TotalSeconds).ToString(); - if (options?.AspectRatio is { } ar) config["aspectRatio"] = ar; - if (options?.Count is { } cnt) config["numberOfVideos"] = cnt; - if (options?.Seed is int seed) config["seed"] = seed; - if (options?.GenerateAudio == true) config["generateAudio"] = true; - if (request.NegativePrompt is { } neg) config["negativePrompt"] = neg; - if (options?.AdditionalProperties?.TryGetValue("personGeneration", out object? pg) == true && pg is string pgs) config["personGeneration"] = pgs; - if (config.Count > 0) body["generationConfig"] = config; - - string url = $"{BaseUrl}/models/{model}:generateVideos?key={_apiKey}"; + // Build the parameters object (generation config) + var parameters = new JsonObject(); + if (options?.Duration is { } dur) parameters["durationSeconds"] = (int)dur.TotalSeconds; + if (options?.AspectRatio is { } ar) parameters["aspectRatio"] = ar; + if (options?.Count is { } cnt) parameters["numberOfVideos"] = cnt; + if (options?.Seed is int seed) parameters["seed"] = seed; + if (options?.GenerateAudio == true) parameters["generateAudio"] = true; + if (request.NegativePrompt is { } neg) parameters["negativePrompt"] = neg; + if (options?.AdditionalProperties?.TryGetValue("personGeneration", out object? pg) == true && pg is string pgs) parameters["personGeneration"] = pgs; + + // Wrap in instances/parameters envelope for predictLongRunning + var body = new JsonObject + { + ["instances"] = new JsonArray { instance }, + }; + if (parameters.Count > 0) body["parameters"] = parameters; + + string url = $"{BaseUrl}/models/{model}:predictLongRunning?key={_apiKey}"; using var content = new StringContent(body.ToJsonString(), System.Text.Encoding.UTF8, "application/json"); using var response = await _httpClient.PostAsync(url, content, cancellationToken); string responseBody = await response.Content.ReadAsStringAsync(cancellationToken); - response.EnsureSuccessStatusCode(); + if (!response.IsSuccessStatusCode) + { + throw new HttpRequestException($"Google Veo API error {(int)response.StatusCode} ({response.StatusCode}): {responseBody}"); + } var result = JsonDocument.Parse(responseBody); string opName = result.RootElement.GetProperty("name").GetString()!; return new GoogleVeoVideoGenerationOperation(opName, _apiKey, _httpClient, model); @@ -466,16 +478,22 @@ public override async Task UpdateAsync(CancellationToken cancellationToken = def { using var resp = await _httpClient.GetAsync($"{BaseUrl}/{OperationId}?key={_apiKey}", cancellationToken); string body = await resp.Content.ReadAsStringAsync(cancellationToken); - resp.EnsureSuccessStatusCode(); + if (!resp.IsSuccessStatusCode) + { + throw new HttpRequestException($"Google Veo poll error {(int)resp.StatusCode} ({resp.StatusCode}): {body}"); + } using var doc = JsonDocument.Parse(body); var root = doc.RootElement; _done = root.TryGetProperty("done", out var d) && d.GetBoolean(); if (_done) _status = "COMPLETED"; if (root.TryGetProperty("error", out var err)) { _failureReason = err.ToString(); _status = "FAILED"; _done = true; } _videoUris.Clear(); - if (root.TryGetProperty("response", out var response) && response.TryGetProperty("generatedVideos", out var vids)) - foreach (var v in vids.EnumerateArray()) - if (v.TryGetProperty("video", out var video) && video.TryGetProperty("uri", out var uri)) + // predictLongRunning response: response.generateVideoResponse.generatedSamples[].video.uri + if (root.TryGetProperty("response", out var response) && + response.TryGetProperty("generateVideoResponse", out var videoResponse) && + videoResponse.TryGetProperty("generatedSamples", out var samples)) + foreach (var s in samples.EnumerateArray()) + if (s.TryGetProperty("video", out var video) && video.TryGetProperty("uri", out var uri)) _videoUris.Add(uri.GetString()!); } @@ -498,8 +516,10 @@ public override async Task> GetContentsAsync(VideoGenerationOpt var results = new List(); foreach (var uri in _videoUris) { + // Append API key to download URI + string downloadUri = uri.Contains('?') ? $"{uri}&key={_apiKey}" : $"{uri}?key={_apiKey}"; if (options?.ResponseFormat == VideoGenerationResponseFormat.Uri) { results.Add(new UriContent(new Uri(uri), "video/mp4")); continue; } - using var r = await _httpClient.GetAsync(uri, cancellationToken); r.EnsureSuccessStatusCode(); + using var r = await _httpClient.GetAsync(downloadUri, cancellationToken); r.EnsureSuccessStatusCode(); results.Add(new DataContent(await r.Content.ReadAsByteArrayAsync(cancellationToken), "video/mp4")); } diff --git a/samples/VideoProviders/MultiProviderPOC/demo-multi-provider.ps1 b/samples/VideoProviders/MultiProviderPOC/demo-multi-provider.ps1 index aa033ffa867..e7e19571ca7 100644 --- a/samples/VideoProviders/MultiProviderPOC/demo-multi-provider.ps1 +++ b/samples/VideoProviders/MultiProviderPOC/demo-multi-provider.ps1 @@ -57,7 +57,7 @@ $providerMap = @{ } if ($Providers -ne "") { - $activeProviders = $Providers -split "," | ForEach-Object { $_.Trim().ToLower() } + $activeProviders = @($Providers -split "," | ForEach-Object { $_.Trim().ToLower() }) } else { $activeProviders = @() foreach ($p in $providerMap.Keys) { @@ -187,16 +187,15 @@ function Run-Veo { Write-Host "" Write-Host ("═" * 70) -ForegroundColor Green Write-Host " GOOGLE VEO" -ForegroundColor Green - Write-Host " Features: text-to-video, image-to-video, audio, negative prompt, seed, aspect ratio" -ForegroundColor Green + Write-Host " Features: text-to-video, image-to-video, native audio, negative prompt, seed, aspect ratio" -ForegroundColor Green Write-Host ("═" * 70) -ForegroundColor Green - # 1. Text-to-video with audio and negative prompt - $t2vPath = Join-Path $OutputDir "veo_01_text2video_audio.mp4" - Skip-OrRun "veo_t2v" "Veo: Text-to-video with audio" $t2vPath { - $out = Invoke-Tool "Veo: Text-to-video + audio + negative prompt" @( + # 1. Text-to-video with negative prompt + $t2vPath = Join-Path $OutputDir "veo_01_text2video.mp4" + Skip-OrRun "veo_t2v" "Veo: Text-to-video" $t2vPath { + $out = Invoke-Tool "Veo: Text-to-video + negative prompt" @( "generate", "--provider", "veo", "A serene mountain lake at dawn, birds singing, gentle water ripples.", - "--audio", "--negative-prompt", "people, buildings, cars, text, watermark", "--aspect-ratio", "16:9", "--duration", "8", @@ -223,11 +222,11 @@ function Run-Veo { if ($ReferenceImage -ne "" -and (Test-Path $ReferenceImage)) { $i2vPath = Join-Path $OutputDir "veo_03_image2video.mp4" Skip-OrRun "veo_i2v" "Veo: Image-to-video" $i2vPath { - $out = Invoke-Tool "Veo: Image-to-video with audio" @( + $out = Invoke-Tool "Veo: Image-to-video" @( "image-to-video", "--provider", "veo", "The scene in the image comes to life with natural movement and ambient sounds.", "--image", $ReferenceImage, - "--audio", "--duration", "4", + "--duration", "4", "--output", $i2vPath ) return (Extract-Id $out "OPERATION_ID") From c29b88f6ff68493d9cc356730abf00ebcd1c62fd Mon Sep 17 00:00:00 2001 From: "Eric St. John" Date: Fri, 3 Apr 2026 07:59:07 -0700 Subject: [PATCH 09/10] Fix up Veo provider --- .../GoogleVeoVideoGenerationOperation.cs | 47 ++++++++++++++++--- samples/VideoProviders/GoogleVeo/Program.cs | 8 ++-- .../MultiProviderPOC/Program.cs | 45 ++++++++++++++---- 3 files changed, 80 insertions(+), 20 deletions(-) diff --git a/samples/VideoProviders/GoogleVeo/GoogleVeoVideoGenerationOperation.cs b/samples/VideoProviders/GoogleVeo/GoogleVeoVideoGenerationOperation.cs index 92db61ebd65..fc80089baaf 100644 --- a/samples/VideoProviders/GoogleVeo/GoogleVeoVideoGenerationOperation.cs +++ b/samples/VideoProviders/GoogleVeo/GoogleVeoVideoGenerationOperation.cs @@ -68,18 +68,51 @@ public override async Task UpdateAsync(CancellationToken cancellationToken = def // Parse generated videos — predictLongRunning response format: // response.generateVideoResponse.generatedSamples[].video.uri if (root.TryGetProperty("response", out var resp) && - resp.TryGetProperty("generateVideoResponse", out var videoResponse) && - videoResponse.TryGetProperty("generatedSamples", out var samples)) + resp.TryGetProperty("generateVideoResponse", out var videoResponse)) { - _videoUris.Clear(); - foreach (var sample in samples.EnumerateArray()) + if (videoResponse.TryGetProperty("generatedSamples", out var samples)) { - if (sample.TryGetProperty("video", out var videoObj) && - videoObj.TryGetProperty("uri", out var uri)) + _videoUris.Clear(); + foreach (var sample in samples.EnumerateArray()) { - _videoUris.Add(uri.GetString()!); + if (sample.TryGetProperty("video", out var videoObj) && + videoObj.TryGetProperty("uri", out var uri)) + { + _videoUris.Add(uri.GetString()!); + } } } + + // Check if the response was filtered by safety (RAI) filters + if (_videoUris.Count == 0) + { + string? filterReason = null; + if (videoResponse.TryGetProperty("raiMediaFilteredCount", out var filteredCount) && filteredCount.GetInt32() > 0) + { + filterReason = $"Video was filtered by safety filters ({filteredCount.GetInt32()} filtered)."; + if (videoResponse.TryGetProperty("raiMediaFilteredReasons", out var reasons)) + { + filterReason += $" Reasons: {reasons}"; + } + } + else + { + filterReason = $"No videos in response. Full response: {resp}"; + } + + _status = "FAILED"; + _failureReason = filterReason; + } + } + else if (root.TryGetProperty("response", out var rawResp)) + { + _status = "FAILED"; + _failureReason = $"Unexpected response format: {rawResp}"; + } + else + { + _status = "FAILED"; + _failureReason = $"No response payload in completed operation. Raw: {root}"; } } else diff --git a/samples/VideoProviders/GoogleVeo/Program.cs b/samples/VideoProviders/GoogleVeo/Program.cs index ed86ffd892b..1815e081492 100644 --- a/samples/VideoProviders/GoogleVeo/Program.cs +++ b/samples/VideoProviders/GoogleVeo/Program.cs @@ -23,7 +23,7 @@ var resolutionOption = new Option("--resolution", () => "720p", "Resolution: 720p, 1080p, 4k."); var aspectRatioOption = new Option("--aspect-ratio", "Aspect ratio (e.g. 16:9, 9:16)."); var formatOption = new Option("--format", () => "data", "Response format: data or uri."); -var countOption = new Option("--count", () => 1, "Number of videos to generate."); +var countOption = new Option("--count", "Number of videos to generate."); var negativePromptOption = new Option("--negative-prompt", "What to avoid in the video."); var audioOption = new Option("--audio", () => false, "Generate audio (Veo 3+ only)."); var seedOption = new Option("--seed", "Seed for reproducibility."); @@ -56,7 +56,7 @@ int? duration = context.ParseResult.GetValueForOption(durationOption); string resolution = context.ParseResult.GetValueForOption(resolutionOption)!; string? aspectRatio = context.ParseResult.GetValueForOption(aspectRatioOption); - int count = context.ParseResult.GetValueForOption(countOption); + int? count = context.ParseResult.GetValueForOption(countOption); string? negativePrompt = context.ParseResult.GetValueForOption(negativePromptOption); bool audio = context.ParseResult.GetValueForOption(audioOption); int? seed = context.ParseResult.GetValueForOption(seedOption); @@ -134,9 +134,9 @@ byte[] refBytes = await File.ReadAllBytesAsync(refImg); refs.Add(new JsonObject { - ["referenceImage"] = new JsonObject + ["image"] = new JsonObject { - ["imageBytes"] = Convert.ToBase64String(refBytes), + ["bytesBase64Encoded"] = Convert.ToBase64String(refBytes), ["mimeType"] = "image/png", }, ["referenceType"] = refType.ToUpperInvariant(), diff --git a/samples/VideoProviders/MultiProviderPOC/Program.cs b/samples/VideoProviders/MultiProviderPOC/Program.cs index 080a362973d..3e1f8d0e0d1 100644 --- a/samples/VideoProviders/MultiProviderPOC/Program.cs +++ b/samples/VideoProviders/MultiProviderPOC/Program.cs @@ -485,16 +485,43 @@ public override async Task UpdateAsync(CancellationToken cancellationToken = def using var doc = JsonDocument.Parse(body); var root = doc.RootElement; _done = root.TryGetProperty("done", out var d) && d.GetBoolean(); - if (_done) _status = "COMPLETED"; if (root.TryGetProperty("error", out var err)) { _failureReason = err.ToString(); _status = "FAILED"; _done = true; } - _videoUris.Clear(); - // predictLongRunning response: response.generateVideoResponse.generatedSamples[].video.uri - if (root.TryGetProperty("response", out var response) && - response.TryGetProperty("generateVideoResponse", out var videoResponse) && - videoResponse.TryGetProperty("generatedSamples", out var samples)) - foreach (var s in samples.EnumerateArray()) - if (s.TryGetProperty("video", out var video) && video.TryGetProperty("uri", out var uri)) - _videoUris.Add(uri.GetString()!); + else if (_done) + { + _status = "COMPLETED"; + _videoUris.Clear(); + // predictLongRunning response: response.generateVideoResponse.generatedSamples[].video.uri + if (root.TryGetProperty("response", out var response) && + response.TryGetProperty("generateVideoResponse", out var videoResponse)) + { + if (videoResponse.TryGetProperty("generatedSamples", out var samples)) + foreach (var s in samples.EnumerateArray()) + if (s.TryGetProperty("video", out var video) && video.TryGetProperty("uri", out var uri)) + _videoUris.Add(uri.GetString()!); + + if (_videoUris.Count == 0) + { + string? reason = null; + if (videoResponse.TryGetProperty("raiMediaFilteredCount", out var fc) && fc.GetInt32() > 0) + { + reason = $"Video filtered by safety filters ({fc.GetInt32()} filtered)."; + if (videoResponse.TryGetProperty("raiMediaFilteredReasons", out var reasons)) reason += $" Reasons: {reasons}"; + } + else + { + reason = $"No videos in response. Full response: {response}"; + } + + _status = "FAILED"; + _failureReason = reason; + } + } + else + { + _status = "FAILED"; + _failureReason = $"Unexpected response format: {root}"; + } + } } public override async Task WaitForCompletionAsync(IProgress? progress = null, CancellationToken cancellationToken = default) From a7a06c9bc9becfa4e40fc55aa313037801505457 Mon Sep 17 00:00:00 2001 From: "Eric St. John" Date: Mon, 6 Apr 2026 15:11:08 -0700 Subject: [PATCH 10/10] Adjust how we represent different video operations --- samples/VideoGenerationPOC/Program.cs | 10 ++- .../GoogleVeo/GoogleVeoVideoGenerator.cs | 70 +++++++++++----- samples/VideoProviders/GoogleVeo/README.md | 12 +-- .../LumaAI/LumaVideoGenerator.cs | 61 +++++--------- samples/VideoProviders/LumaAI/README.md | 6 +- .../MultiProviderPOC/Program.cs | 59 ++++++------- samples/VideoProviders/PROVIDER_COMPARISON.md | 25 ++---- samples/VideoProviders/Runway/README.md | 10 +-- .../Runway/RunwayVideoGenerator.cs | 83 +++---------------- .../Video/VideoGenerationRequest.cs | 65 +++++++-------- .../Video/VideoGeneratorExtensions.cs | 33 ++++---- .../Video/VideoOperationKind.cs | 6 +- .../OpenAIClientExtensions.cs | 4 +- .../OpenAIVideoGenerator.cs | 43 +++------- .../OpenTelemetryVideoGenerator.cs | 19 ++++- .../Video/VideoGeneratorExtensionsTests.cs | 17 ++-- .../Video/VideoGeneratorTests.cs | 6 +- .../OpenAIVideoGeneratorTests.cs | 2 +- .../Video/LoggingVideoGeneratorTests.cs | 6 +- .../Video/OpenTelemetryVideoGeneratorTests.cs | 2 +- 20 files changed, 230 insertions(+), 309 deletions(-) diff --git a/samples/VideoGenerationPOC/Program.cs b/samples/VideoGenerationPOC/Program.cs index 87db81f185f..78afef12b63 100644 --- a/samples/VideoGenerationPOC/Program.cs +++ b/samples/VideoGenerationPOC/Program.cs @@ -52,15 +52,19 @@ using var generator = CreateGenerator(model); - List? originalMedia = await LoadInputFilesAsync(inputPaths); - if (originalMedia is null && inputPaths.Length > 0) + List? inputMedia = await LoadInputFilesAsync(inputPaths); + if (inputMedia is null && inputPaths.Length > 0) { context.ExitCode = 1; return; } var options = BuildOptions(duration, width, height, format, characterIds); - var request = new VideoGenerationRequest(prompt, originalMedia); + var request = new VideoGenerationRequest(prompt); + if (inputMedia is { Count: > 0 }) + { + request.StartFrame = inputMedia[0]; + } var operation = await generator.GenerateAsync(request, options); await CompleteAndSaveAsync(operation, options, outputPath); diff --git a/samples/VideoProviders/GoogleVeo/GoogleVeoVideoGenerator.cs b/samples/VideoProviders/GoogleVeo/GoogleVeoVideoGenerator.cs index 93adb3545b6..54cc293991c 100644 --- a/samples/VideoProviders/GoogleVeo/GoogleVeoVideoGenerator.cs +++ b/samples/VideoProviders/GoogleVeo/GoogleVeoVideoGenerator.cs @@ -52,23 +52,47 @@ public async Task GenerateAsync( } // Image for image-to-video - if (request.OperationKind == VideoOperationKind.Create && request.OriginalMedia is not null) + if (request.OperationKind == VideoOperationKind.Create && request.StartFrame is not null) { - var image = GetFirstImageContent(request.OriginalMedia); + var image = GetImageNode(request.StartFrame); if (image is not null) { instance["image"] = image; } } - // Reference images (provider-specific via AdditionalProperties) - if (options?.AdditionalProperties?.TryGetValue("referenceImages", out object? refImgs) == true && refImgs is JsonArray refArray) + // Reference images (first-class property) + if (request.ReferenceImages is { Count: > 0 } refImages) { - instance["referenceImages"] = JsonNode.Parse(refArray.ToJsonString())!; + var refArray = new JsonArray(); + foreach (var refImg in refImages) + { + var imgNode = BuildImageNode(refImg); + if (imgNode is not null) + { + refArray.Add(new JsonObject { ["referenceImage"] = new JsonObject { ["image"] = imgNode } }); + } + } + + if (refArray.Count > 0) + { + instance["referenceImages"] = refArray; + } + } + else if (options?.AdditionalProperties?.TryGetValue("referenceImages", out object? refImgs) == true && refImgs is JsonArray refArrayLegacy) + { + instance["referenceImages"] = JsonNode.Parse(refArrayLegacy.ToJsonString())!; } // Last frame for first+last frame interpolation - if (options?.AdditionalProperties?.TryGetValue("lastFrameImage", out object? lastFrame) == true) + if (request.EndFrame is not null) + { + instance["lastFrame"] = new JsonObject + { + ["image"] = BuildImageNode(request.EndFrame), + }; + } + else if (options?.AdditionalProperties?.TryGetValue("lastFrameImage", out object? lastFrame) == true) { instance["lastFrame"] = new JsonObject { @@ -187,33 +211,35 @@ public async Task GenerateAsync( public void Dispose() => _httpClient.Dispose(); - private static JsonNode? GetFirstImageContent(IEnumerable media) + private static JsonNode? GetImageNode(AIContent content) { - foreach (var item in media) + if (content is DataContent dc && (dc.MediaType?.StartsWith("image/", StringComparison.OrdinalIgnoreCase) ?? false) && dc.Data.Length > 0) { - if (item is DataContent dc && (dc.MediaType?.StartsWith("image/", StringComparison.OrdinalIgnoreCase) ?? false) && dc.Data.Length > 0) + return new JsonObject { - return new JsonObject - { - ["bytesBase64Encoded"] = Convert.ToBase64String(dc.Data.ToArray()), - ["mimeType"] = dc.MediaType, - }; - } + ["bytesBase64Encoded"] = Convert.ToBase64String(dc.Data.ToArray()), + ["mimeType"] = dc.MediaType, + }; + } - if (item is UriContent uc && uc.Uri is not null) + if (content is UriContent uc && uc.Uri is not null) + { + return new JsonObject { - return new JsonObject - { - ["gcsUri"] = uc.Uri.ToString(), - }; - } + ["gcsUri"] = uc.Uri.ToString(), + }; } return null; } - private static JsonNode BuildImageNode(object imageData) + private static JsonNode? BuildImageNode(object imageData) { + if (imageData is AIContent aiContent) + { + return GetImageNode(aiContent); + } + if (imageData is string path && File.Exists(path)) { byte[] bytes = File.ReadAllBytes(path); diff --git a/samples/VideoProviders/GoogleVeo/README.md b/samples/VideoProviders/GoogleVeo/README.md index 32229f4f6a1..083211f4ab5 100644 --- a/samples/VideoProviders/GoogleVeo/README.md +++ b/samples/VideoProviders/GoogleVeo/README.md @@ -28,10 +28,10 @@ export GOOGLE_API_KEY="AIza..." | Operation | MEAI Mapping | Notes | |---|---|---| -| Text-to-video | `VideoOperationKind.Create`, no `OriginalMedia` | Prompt-only generation | -| Image-to-video | `VideoOperationKind.Create` + `OriginalMedia` (image) | Image as starting reference | -| First+last frame interpolation | `OriginalMedia` + `AdditionalProperties["lastFrameImage"]` | Generate video between two frames | -| Reference images (up to 3) | `AdditionalProperties["referenceImages"]` | Style/subject transfer with `reference_type` | +| Text-to-video | `VideoOperationKind.Create`, no `StartFrame` | Prompt-only generation | +| Image-to-video | `VideoOperationKind.Create` + `StartFrame` (image) | Image as starting reference | +| First+last frame interpolation | `StartFrame` + `EndFrame` | Generate video between two frames | +| Reference images (up to 3) | `ReferenceImages` | Style/subject transfer with `reference_type` | | Video extension | `VideoOperationKind.Extend` | Extend up to 20 times (7s each, 720p only) | | Multiple outputs | `VideoGenerationOptions.Count` | Generate 1-4 videos from one request | @@ -65,8 +65,8 @@ dotnet run -- generate "A sunset" --count 4 --output sunset.mp4 ## API Gaps / Limitations -- **Reference images with typed purpose**: Veo supports `referenceImages` with `referenceType` ("REFERENCE_TYPE_STYLE" or "REFERENCE_TYPE_SUBJECT"), allowing up to 3 images for style/subject transfer. MEAI's `OriginalMedia` doesn't distinguish between "input image for image-to-video" and "reference image for style transfer". -- **First/last frame interpolation**: Veo generates a video between two keyframe images. MEAI has no concept of a "last frame" — this requires `AdditionalProperties`. +- **Reference images with typed purpose**: Veo supports `referenceImages` with `referenceType` ("REFERENCE_TYPE_STYLE" or "REFERENCE_TYPE_SUBJECT"), allowing up to 3 images for style/subject transfer. MEAI's `ReferenceImages` collection maps well to this but doesn't include the `referenceType` metadata — provider-specific `AdditionalProperties` can be used for that. +- **First/last frame interpolation**: Veo generates a video between two keyframe images. MEAI's `StartFrame` and `EndFrame` properties map directly to this. - **Native audio generation**: Veo 3+ can generate synchronized audio with video. MEAI has no audio-related option. - **Negative prompts**: Veo supports `negativePrompt` to exclude unwanted elements. Not part of the core MEAI options. - **Resolution as named tier**: Veo uses `"720p"`, `"1080p"`, `"4k"` — not pixel dimensions. The `VideoSize` abstraction works but the mapping is lossy. diff --git a/samples/VideoProviders/LumaAI/LumaVideoGenerator.cs b/samples/VideoProviders/LumaAI/LumaVideoGenerator.cs index 380b34ff5e1..8fdc3876ec3 100644 --- a/samples/VideoProviders/LumaAI/LumaVideoGenerator.cs +++ b/samples/VideoProviders/LumaAI/LumaVideoGenerator.cs @@ -89,10 +89,15 @@ public async Task GenerateAsync( switch (request.OperationKind) { case VideoOperationKind.Create: - // Image-to-video: use original media as first frame (frame0) - if (request.OriginalMedia is not null) + // Image-to-video: use StartFrame as first frame (frame0) and EndFrame as last frame (frame1) + if (request.StartFrame is not null) { - await AddImageKeyframesAsync(keyframes, request.OriginalMedia, options); + AddImageKeyframe(keyframes, "frame0", request.StartFrame); + } + + if (request.EndFrame is not null) + { + AddImageKeyframe(keyframes, "frame1", request.EndFrame); } break; @@ -154,47 +159,25 @@ public async Task GenerateAsync( public void Dispose() => _httpClient.Dispose(); - private static async Task AddImageKeyframesAsync(JsonObject keyframes, IEnumerable media, VideoGenerationOptions? options) + private static void AddImageKeyframe(JsonObject keyframes, string frameKey, AIContent content) { - int index = 0; - foreach (var item in media) + if (content is UriContent uc && uc.Uri is not null) { - if (item is not DataContent dc) + keyframes[frameKey] = new JsonObject { - continue; - } - - string frameKey = index == 0 ? "frame0" : "frame1"; - - if (item is UriContent uc && uc.Uri is not null) - { - // If it's a URL-based image, Luma requires HTTPS URLs - keyframes[frameKey] = new JsonObject - { - ["type"] = "image", - ["url"] = uc.Uri.ToString(), - }; - } - else if (dc.Data.Length > 0) - { - // Luma only accepts HTTPS URLs for images, not data URIs. - // (Limitation: callers must upload images to a CDN first.) - string dataUri = dc.Uri ?? $"data:{dc.MediaType ?? "image/png"};base64,{Convert.ToBase64String(dc.Data.ToArray())}"; - keyframes[frameKey] = new JsonObject - { - ["type"] = "image", - ["url"] = dataUri, - }; - } - - index++; - if (index >= 2) + ["type"] = "image", + ["url"] = uc.Uri.ToString(), + }; + } + else if (content is DataContent dc && dc.Data.Length > 0) + { + string dataUri = dc.Uri ?? $"data:{dc.MediaType ?? "image/png"};base64,{Convert.ToBase64String(dc.Data.ToArray())}"; + keyframes[frameKey] = new JsonObject { - break; // Luma supports max 2 keyframes (frame0 + frame1) - } + ["type"] = "image", + ["url"] = dataUri, + }; } - - await Task.CompletedTask; } private static string MapResolution(Size size) diff --git a/samples/VideoProviders/LumaAI/README.md b/samples/VideoProviders/LumaAI/README.md index aefb3314aa4..4d530c0556b 100644 --- a/samples/VideoProviders/LumaAI/README.md +++ b/samples/VideoProviders/LumaAI/README.md @@ -26,9 +26,9 @@ export LUMA_API_KEY="luma-xxxx" | Operation | MEAI Mapping | Notes | |---|---|---| -| Text-to-video | `VideoOperationKind.Create`, no `OriginalMedia` | Basic prompt → video | -| Image-to-video (start frame) | `VideoOperationKind.Create` + `OriginalMedia` (1 image) | Image as first frame (`keyframes.frame0`) | -| Image-to-video (start+end frames) | `VideoOperationKind.Create` + `OriginalMedia` (2 images) | Two images as keyframes (`frame0`+`frame1`) for interpolation | +| Text-to-video | `VideoOperationKind.Create`, no `StartFrame` | Basic prompt → video | +| Image-to-video (start frame) | `VideoOperationKind.Create` + `StartFrame` (image) | Image as first frame (`keyframes.frame0`) | +| Image-to-video (start+end frames) | `VideoOperationKind.Create` + `StartFrame` + `EndFrame` | Two images as keyframes (`frame0`+`frame1`) for interpolation | | Extend video | `VideoOperationKind.Extend` + `SourceVideoId` | Extend using the generation ID of a completed video | | Reverse extend | `AdditionalProperties` | Extend backwards — requires provider-specific keyframe manipulation | | Video interpolation | `AdditionalProperties` | Interpolate between two generation IDs | diff --git a/samples/VideoProviders/MultiProviderPOC/Program.cs b/samples/VideoProviders/MultiProviderPOC/Program.cs index 3e1f8d0e0d1..34b7ac0de8b 100644 --- a/samples/VideoProviders/MultiProviderPOC/Program.cs +++ b/samples/VideoProviders/MultiProviderPOC/Program.cs @@ -399,21 +399,15 @@ public async Task GenerateAsync( var instance = new JsonObject(); if (request.Prompt is not null) instance["prompt"] = request.Prompt; - if (request.OperationKind == VideoOperationKind.Create && request.OriginalMedia is not null) + if (request.OperationKind == VideoOperationKind.Create && request.StartFrame is not null) { - foreach (var item in request.OriginalMedia) + if (request.StartFrame is DataContent dc && (dc.MediaType?.StartsWith("image/", StringComparison.OrdinalIgnoreCase) ?? false) && dc.Data.Length > 0) { - if (item is DataContent dc && (dc.MediaType?.StartsWith("image/", StringComparison.OrdinalIgnoreCase) ?? false) && dc.Data.Length > 0) - { - instance["image"] = new JsonObject { ["bytesBase64Encoded"] = Convert.ToBase64String(dc.Data.ToArray()), ["mimeType"] = dc.MediaType }; - break; - } - - if (item is UriContent uc) - { - instance["image"] = new JsonObject { ["gcsUri"] = uc.Uri.ToString() }; - break; - } + instance["image"] = new JsonObject { ["bytesBase64Encoded"] = Convert.ToBase64String(dc.Data.ToArray()), ["mimeType"] = dc.MediaType }; + } + else if (request.StartFrame is UriContent uc) + { + instance["image"] = new JsonObject { ["gcsUri"] = uc.Uri.ToString() }; } } @@ -577,20 +571,20 @@ public async Task GenerateAsync( string endpoint; JsonObject body; - bool hasVideo = request.OriginalMedia?.Any(m => m is DataContent dc && dc.MediaType?.StartsWith("video/", StringComparison.OrdinalIgnoreCase) == true) == true; - bool hasImage = request.OriginalMedia?.Any(m => m is DataContent dc && dc.MediaType?.StartsWith("image/", StringComparison.OrdinalIgnoreCase) == true) == true; + bool hasVideo = request.SourceVideo is DataContent svDc && svDc.MediaType?.StartsWith("video/", StringComparison.OrdinalIgnoreCase) == true; + bool hasImage = request.StartFrame is DataContent sfDc && sfDc.MediaType?.StartsWith("image/", StringComparison.OrdinalIgnoreCase) == true; if (request.OperationKind == VideoOperationKind.Edit && hasVideo) { endpoint = "/v1/video_to_video"; body = new JsonObject { ["model"] = "gen4_aleph", ["promptText"] = request.Prompt ?? "" }; - string? vidUri = GetMediaUri(request.OriginalMedia, "video/"); + string? vidUri = GetMediaUri(request.SourceVideo); if (vidUri is not null) body["videoUri"] = vidUri; } else if (hasImage) { endpoint = "/v1/image_to_video"; - string? imgUri = GetMediaUri(request.OriginalMedia, "image/"); + string? imgUri = GetMediaUri(request.StartFrame); string ratio = options?.AspectRatio is { } ar ? MapAspectRatio(ar) : "1280:720"; body = new JsonObject { ["model"] = model, ["promptText"] = request.Prompt ?? "", ["promptImage"] = imgUri ?? "", ["ratio"] = ratio }; } @@ -616,16 +610,12 @@ public async Task GenerateAsync( => serviceKey is null && serviceType.IsInstanceOfType(this) ? this : null; public void Dispose() => _httpClient.Dispose(); - private static string? GetMediaUri(IEnumerable? media, string prefix) + private static string? GetMediaUri(AIContent? content) { - if (media is null) return null; - foreach (var item in media) - { - if (item is UriContent uc) return uc.Uri.ToString(); - if (item is DataContent dc && dc.Data.Length > 0) - return dc.Uri ?? $"data:{dc.MediaType ?? "application/octet-stream"};base64,{Convert.ToBase64String(dc.Data.ToArray())}"; - } - + if (content is null) return null; + if (content is UriContent uc) return uc.Uri.ToString(); + if (content is DataContent dc && dc.Data.Length > 0) + return dc.Uri ?? $"data:{dc.MediaType ?? "application/octet-stream"};base64,{Convert.ToBase64String(dc.Data.ToArray())}"; return null; } @@ -715,16 +705,17 @@ public async Task GenerateAsync( if (options?.AspectRatio is { } ar) body["aspect_ratio"] = ar; var keyframes = new JsonObject(); - if (request.OperationKind == VideoOperationKind.Create && request.OriginalMedia is not null) + if (request.OperationKind == VideoOperationKind.Create && request.StartFrame is not null) { - int idx = 0; - foreach (var item in request.OriginalMedia) + if (request.StartFrame is UriContent uc) keyframes["frame0"] = new JsonObject { ["type"] = "image", ["url"] = uc.Uri.ToString() }; + else if (request.StartFrame is DataContent dc && dc.Data.Length > 0) + keyframes["frame0"] = new JsonObject { ["type"] = "image", ["url"] = dc.Uri ?? $"data:{dc.MediaType ?? "image/png"};base64,{Convert.ToBase64String(dc.Data.ToArray())}" }; + + if (request.EndFrame is not null) { - string key = idx == 0 ? "frame0" : "frame1"; - if (item is UriContent uc) keyframes[key] = new JsonObject { ["type"] = "image", ["url"] = uc.Uri.ToString() }; - else if (item is DataContent dc && dc.Data.Length > 0) - keyframes[key] = new JsonObject { ["type"] = "image", ["url"] = dc.Uri ?? $"data:{dc.MediaType ?? "image/png"};base64,{Convert.ToBase64String(dc.Data.ToArray())}" }; - if (++idx >= 2) break; + if (request.EndFrame is UriContent euc) keyframes["frame1"] = new JsonObject { ["type"] = "image", ["url"] = euc.Uri.ToString() }; + else if (request.EndFrame is DataContent edc && edc.Data.Length > 0) + keyframes["frame1"] = new JsonObject { ["type"] = "image", ["url"] = edc.Uri ?? $"data:{edc.MediaType ?? "image/png"};base64,{Convert.ToBase64String(edc.Data.ToArray())}" }; } } else if (request.OperationKind == VideoOperationKind.Extend && request.SourceVideoId is not null) diff --git a/samples/VideoProviders/PROVIDER_COMPARISON.md b/samples/VideoProviders/PROVIDER_COMPARISON.md index 7deb9668f7b..ea29409f73e 100644 --- a/samples/VideoProviders/PROVIDER_COMPARISON.md +++ b/samples/VideoProviders/PROVIDER_COMPARISON.md @@ -103,20 +103,9 @@ public string? NegativePrompt { get; set; } ### Gap 4: Reference Images with Purpose (MEDIUM PRIORITY) -**Problem**: Google Veo supports up to 3 reference images, each with a `referenceType` ("STYLE" or "SUBJECT"). Runway's video-to-video supports reference images for style transfer. The current `OriginalMedia` collection doesn't distinguish between "this is the source image for image-to-video" vs "this is a style reference". +**Problem**: Google Veo supports up to 3 reference images, each with a `referenceType` ("STYLE" or "SUBJECT"). Runway's video-to-video supports reference images for style transfer. -**Recommendation**: Consider a typed reference media collection, or a new property: - -```csharp -/// Reference images for guiding style, subject, or other attributes of the generation. -public IList? ReferenceMedia { get; set; } - -public class ReferenceMedia -{ - public AIContent Content { get; set; } - public string? ReferenceType { get; set; } // "style", "subject", etc. -} -``` +**Status**: ✅ ADDRESSED — `VideoGenerationRequest.ReferenceImages` (IList?) provides a first-class collection for reference images. The `referenceType` metadata can be provided via provider-specific `AdditionalProperties` on the individual `AIContent` items. **Providers**: Google Veo ✅ (3 refs, typed), Runway ✅ (1 ref for v2v) @@ -141,11 +130,9 @@ Alternatively, this could be modeled as part of `MediaType` (e.g., `"video/mp4; ### Gap 6: Keyframe / Interpolation (LOW PRIORITY) -**Problem**: Both Luma and Google Veo support first+last frame interpolation — providing a start and end image and generating the video in between. The current API only models "input media" without positional semantics. - -**Current workaround**: Send two images in `OriginalMedia` and the provider implementation knows first = frame0, second = frame1. +**Problem**: Both Luma and Google Veo support first+last frame interpolation — providing a start and end image and generating the video in between. -**Recommendation**: No immediate API change needed. The `OriginalMedia` collection with provider convention (first item = first frame, second = last frame) is workable. Could add `string? FramePosition` to a future `ReferenceMedia` type. +**Status**: ✅ ADDRESSED — `VideoGenerationRequest.StartFrame` and `VideoGenerationRequest.EndFrame` provide first-class properties for first/last frame interpolation. --- @@ -177,7 +164,7 @@ Alternatively, this could be modeled as part of `MediaType` (e.g., `"video/mp4; ### 1. Runway's Separate Endpoints -Runway uses three separate endpoints (`text_to_video`, `image_to_video`, `video_to_video`) rather than a single unified endpoint. The `IVideoGenerator.GenerateAsync` single-method approach requires the implementation to inspect `OriginalMedia` content types to determine which endpoint to call. This works but adds complexity. +Runway uses three separate endpoints (`text_to_video`, `image_to_video`, `video_to_video`) rather than a single unified endpoint. The `IVideoGenerator.GenerateAsync` single-method approach requires the implementation to inspect `StartFrame`/`SourceVideo` properties to determine which endpoint to call. This works cleanly with the new typed properties. ### 2. Luma's URL-Only Image Input @@ -222,7 +209,7 @@ All providers handle duration differently: - **`VideoGenerationOperation` pattern**: The submit → poll → download lifecycle maps perfectly to all four providers. - **`VideoOperationKind` enum**: Create/Edit/Extend covers the core operations well. -- **`OriginalMedia` collection**: Handles image-to-video input for all providers. +- **`StartFrame`/`EndFrame`/`ReferenceImages` properties**: Handles image-to-video, interpolation, and reference images with clear semantics for all providers. - **`AdditionalProperties` escape hatch**: Provider-specific features (concepts, camera motion, content moderation) flow through cleanly. - **`GetService()` pattern**: Enables provider-specific extensions (like OpenAI's `UploadVideoCharacterAsync`) without polluting the interface. - **`VideoGenerationResponseFormat`**: Uri vs Data choice is useful for all providers. diff --git a/samples/VideoProviders/Runway/README.md b/samples/VideoProviders/Runway/README.md index 519e603f734..4078b511fec 100644 --- a/samples/VideoProviders/Runway/README.md +++ b/samples/VideoProviders/Runway/README.md @@ -29,9 +29,9 @@ export RUNWAY_API_KEY="rw_xxxx" | Operation | MEAI Mapping | Endpoint | |---|---|---| -| Text-to-video | `VideoOperationKind.Create`, no `OriginalMedia` | `POST /v1/text_to_video` | -| Image-to-video | `VideoOperationKind.Create` + `OriginalMedia` (image) | `POST /v1/image_to_video` | -| Video-to-video | `VideoOperationKind.Edit` + `OriginalMedia` (video) | `POST /v1/video_to_video` | +| Text-to-video | `VideoOperationKind.Create`, no `StartFrame` | `POST /v1/text_to_video` | +| Image-to-video | `VideoOperationKind.Create` + `StartFrame` (image) | `POST /v1/image_to_video` | +| Video-to-video | `VideoOperationKind.Edit` + `SourceVideo` (video) | `POST /v1/video_to_video` | ## Usage @@ -56,8 +56,8 @@ dotnet run -- text-to-video "A sunset over mountains" --seed 42 --output sunset. - **Ratio vs Size**: Runway uses fixed ratio strings (`"1280:720"`, `"720:1280"`, etc.) rather than arbitrary pixel dimensions. The `VideoSize` → ratio mapping loses information. - **Character performance** (`act_two`): Runway has a unique `character_performance` endpoint for driving a character with a reference video. This is fundamentally different from OpenAI's character system and has no MEAI equivalent. - **Seed**: Available via `AdditionalProperties` — consider promoting to a first-class option. -- **Image position**: Runway's `image_to_video` accepts an array of `PromptImages` with `position` (currently only `"first"`). MEAI only models a single image via `OriginalMedia` without position metadata. +- **Image position**: Runway's `image_to_video` accepts an array of `PromptImages` with `position` (currently only `"first"`). MEAI models this via `StartFrame` for the first frame. - **Duration as integer**: Runway passes duration as an integer (2-10), while OpenAI requires a string enum. The MEAI `TimeSpan Duration` maps cleanly to both. -- **Video-to-video references**: `gen4_aleph` supports `references` (array of image references for style). This is modeled via `AdditionalProperties` but could benefit from a first-class reference images concept. +- **Video-to-video references**: `gen4_aleph` supports `references` (array of image references for style). These could be modeled via `ReferenceImages` on the request. - **Content moderation**: Runway has `contentModeration.publicFigureThreshold` — provider-specific safety control. - **No resolution control for v2v**: For video-to-video, the output resolution is determined by the input video. diff --git a/samples/VideoProviders/Runway/RunwayVideoGenerator.cs b/samples/VideoProviders/Runway/RunwayVideoGenerator.cs index 4cb2cb72aef..b0cd93f4a0e 100644 --- a/samples/VideoProviders/Runway/RunwayVideoGenerator.cs +++ b/samples/VideoProviders/Runway/RunwayVideoGenerator.cs @@ -49,13 +49,13 @@ public async Task GenerateAsync( JsonObject body; // Determine which endpoint to use based on operation kind and media - if (request.OperationKind == VideoOperationKind.Edit && HasVideoMedia(request.OriginalMedia)) + if (request.OperationKind == VideoOperationKind.Edit && request.SourceVideo is not null) { // Video-to-video (gen4_aleph only) endpoint = "/v1/video_to_video"; body = BuildVideoToVideoBody(request, model, options); } - else if (HasImageMedia(request.OriginalMedia)) + else if (request.StartFrame is not null) { // Image-to-video endpoint = "/v1/image_to_video"; @@ -113,7 +113,7 @@ private static JsonObject BuildTextToVideoBody(VideoGenerationRequest request, s private static JsonObject BuildImageToVideoBody(VideoGenerationRequest request, string model, VideoGenerationOptions? options) { - string? imageUri = GetFirstImageUri(request.OriginalMedia); + string? imageUri = GetContentUri(request.StartFrame); var body = new JsonObject { @@ -134,7 +134,7 @@ private static JsonObject BuildImageToVideoBody(VideoGenerationRequest request, private static JsonObject BuildVideoToVideoBody(VideoGenerationRequest request, string model, VideoGenerationOptions? options) { - string? videoUri = GetFirstVideoUri(request.OriginalMedia); + string? videoUri = GetContentUri(request.SourceVideo); var body = new JsonObject { @@ -166,89 +166,26 @@ private static void AddSeed(JsonObject body, VideoGenerationOptions? options) } } - private static string? GetFirstImageUri(IEnumerable? media) + private static string? GetContentUri(AIContent? content) { - if (media is null) + if (content is null) { return null; } - foreach (var item in media) + if (content is UriContent uc && uc.Uri is not null) { - if (item is UriContent uc && uc.Uri is not null && (uc.MediaType?.StartsWith("image/", StringComparison.OrdinalIgnoreCase) ?? true)) - { - return uc.Uri.ToString(); - } - - if (item is DataContent dc && dc.Data.Length > 0 && (dc.MediaType?.StartsWith("image/", StringComparison.OrdinalIgnoreCase) ?? true)) - { - // Runway accepts data URIs for images - return dc.Uri ?? $"data:{dc.MediaType ?? "image/png"};base64,{Convert.ToBase64String(dc.Data.ToArray())}"; - } + return uc.Uri.ToString(); } - return null; - } - - private static string? GetFirstVideoUri(IEnumerable? media) - { - if (media is null) + if (content is DataContent dc && dc.Data.Length > 0) { - return null; - } - - foreach (var item in media) - { - if (item is UriContent uc && uc.Uri is not null && (uc.MediaType?.StartsWith("video/", StringComparison.OrdinalIgnoreCase) ?? true)) - { - return uc.Uri.ToString(); - } - - if (item is DataContent dc && dc.Data.Length > 0 && (dc.MediaType?.StartsWith("video/", StringComparison.OrdinalIgnoreCase) ?? false)) - { - return dc.Uri ?? $"data:{dc.MediaType};base64,{Convert.ToBase64String(dc.Data.ToArray())}"; - } + return dc.Uri ?? $"data:{dc.MediaType ?? "application/octet-stream"};base64,{Convert.ToBase64String(dc.Data.ToArray())}"; } return null; } - private static bool HasImageMedia(IEnumerable? media) - { - if (media is null) - { - return false; - } - - foreach (var item in media) - { - if (item is DataContent dc && (dc.MediaType?.StartsWith("image/", StringComparison.OrdinalIgnoreCase) ?? false)) - { - return true; - } - } - - return false; - } - - private static bool HasVideoMedia(IEnumerable? media) - { - if (media is null) - { - return false; - } - - foreach (var item in media) - { - if (item is DataContent dc && (dc.MediaType?.StartsWith("video/", StringComparison.OrdinalIgnoreCase) ?? false)) - { - return true; - } - } - - return false; - } - private static string MapRatio(Size? size) { if (size is null) diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGenerationRequest.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGenerationRequest.cs index 53bbcec1b5b..2e330fe9308 100644 --- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGenerationRequest.cs +++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGenerationRequest.cs @@ -23,15 +23,6 @@ public VideoGenerationRequest(string prompt) Prompt = prompt; } - /// Initializes a new instance of the class. - /// The prompt to guide the video generation. - /// The original media (images or videos) to base edits on. - public VideoGenerationRequest(string prompt, IEnumerable? originalMedia) - { - Prompt = prompt; - OriginalMedia = originalMedia; - } - /// Gets or sets the prompt to guide the video generation. public string? Prompt { get; set; } @@ -42,7 +33,7 @@ public VideoGenerationRequest(string prompt, IEnumerable? originalMed /// /// Defaults to . Set to or /// when working with an existing video referenced by - /// or uploaded via . + /// or . /// public VideoOperationKind OperationKind { get; set; } @@ -54,30 +45,36 @@ public VideoGenerationRequest(string prompt, IEnumerable? originalMed /// public string? SourceVideoId { get; set; } - /// - /// Gets or sets the original media (images or videos) to use as input for the video generation. - /// + /// Gets or sets the starting frame image for image-to-video generation. + /// + /// When provided with , the provider uses this image as the + /// initial frame from which the video is generated. Typically an image content such as + /// or with an image media type. + /// + public AIContent? StartFrame { get; set; } + + /// Gets or sets the ending frame image for video interpolation. + /// + /// When provided alongside , providers that support frame interpolation + /// generate a video that transitions from to this ending frame. + /// Not all providers support this feature. + /// + public AIContent? EndFrame { get; set; } + + /// Gets or sets reference images for style or subject guidance. + /// + /// Reference images influence the visual style or subject matter of the generated video without + /// being used as literal frames. For example, a provider may use these for style transfer or + /// subject consistency. Not all providers support this feature. + /// + public IList? ReferenceImages { get; set; } + + /// Gets or sets the source video content for editing. /// - /// - /// The interpretation of this property depends on the content type of the media, the , - /// and the capabilities of the underlying provider. Common behaviors include: - /// - /// - /// - /// Image content (e.g., image/png, image/jpeg): Used as a reference image to guide new video - /// generation. The provider creates a video inspired by or based on the image. Supported by most providers. - /// - /// - /// Video content (e.g., video/mp4): Used as a source video for editing when - /// is . The provider modifies the - /// existing video according to the . - /// - /// - /// - /// If this property is or empty, the request is treated as a text-to-video generation - /// using only the . To edit or extend a previously generated video by ID rather than by - /// uploading media, set and the appropriate . - /// + /// Used when is and the source + /// video is provided as content rather than by ID. Typically a or + /// with a video media type. To reference a previously generated video + /// by its ID, use instead. /// - public IEnumerable? OriginalMedia { get; set; } + public AIContent? SourceVideo { get; set; } } diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGeneratorExtensions.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGeneratorExtensions.cs index e29fb8c3eb6..89dabcdff5a 100644 --- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGeneratorExtensions.cs +++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoGeneratorExtensions.cs @@ -2,7 +2,6 @@ // The .NET Foundation licenses this file to you under the MIT license. using System; -using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; using System.Net.Mime; using System.Threading; @@ -106,28 +105,28 @@ public static Task GenerateVideoAsync( } /// - /// Submits an edit request for original media using the specified prompt. + /// Submits an edit request for existing video content using the specified prompt. /// /// The video generator. - /// The original media (images or videos) to use as input. - /// The prompt to guide the video generation or editing. + /// The source video content to edit. + /// The prompt to guide the video editing. /// The video generation options to configure the request. /// The to monitor for cancellation requests. The default is . - /// , , or is . + /// , , or is . /// A representing the submitted video generation job. public static Task EditVideoAsync( this IVideoGenerator generator, - IEnumerable originalMedia, + AIContent sourceVideo, string prompt, VideoGenerationOptions? options = null, CancellationToken cancellationToken = default) { _ = Throw.IfNull(generator); - _ = Throw.IfNull(originalMedia); + _ = Throw.IfNull(sourceVideo); _ = Throw.IfNull(prompt); return generator.GenerateAsync( - new VideoGenerationRequest { Prompt = prompt, OriginalMedia = originalMedia, OperationKind = VideoOperationKind.Edit }, + new VideoGenerationRequest { Prompt = prompt, SourceVideo = sourceVideo, OperationKind = VideoOperationKind.Edit }, options, cancellationToken); } @@ -135,25 +134,25 @@ public static Task EditVideoAsync( /// Submits an edit request for a single video using the specified prompt. /// /// The video generator. - /// The single video to use as input. + /// The single video to use as input. /// The prompt to guide the video editing. /// The video generation options to configure the request. /// The to monitor for cancellation requests. The default is . - /// , , or is . + /// , , or is . /// A representing the submitted video generation job. public static Task EditVideoAsync( this IVideoGenerator generator, - DataContent originalVideo, + DataContent sourceVideo, string prompt, VideoGenerationOptions? options = null, CancellationToken cancellationToken = default) { _ = Throw.IfNull(generator); - _ = Throw.IfNull(originalVideo); + _ = Throw.IfNull(sourceVideo); _ = Throw.IfNull(prompt); return generator.GenerateAsync( - new VideoGenerationRequest { Prompt = prompt, OriginalMedia = [originalVideo], OperationKind = VideoOperationKind.Edit }, + new VideoGenerationRequest { Prompt = prompt, SourceVideo = sourceVideo, OperationKind = VideoOperationKind.Edit }, options, cancellationToken); } @@ -161,7 +160,7 @@ public static Task EditVideoAsync( /// Submits an edit request for video data provided as a byte array. /// /// The video generator. - /// The byte array containing the video data to use as input. + /// The byte array containing the video data to use as input. /// The filename for the video data. /// The prompt to guide the video generation. /// The video generation options to configure the request. @@ -172,7 +171,7 @@ public static Task EditVideoAsync( /// A representing the submitted video generation job. public static Task EditVideoAsync( this IVideoGenerator generator, - ReadOnlyMemory originalVideoData, + ReadOnlyMemory sourceVideoData, string fileName, string prompt, VideoGenerationOptions? options = null, @@ -183,10 +182,10 @@ public static Task EditVideoAsync( _ = Throw.IfNull(prompt); string mediaType = GetMediaTypeFromFileName(fileName); - var dataContent = new DataContent(originalVideoData, mediaType) { Name = fileName }; + var dataContent = new DataContent(sourceVideoData, mediaType) { Name = fileName }; return generator.GenerateAsync( - new VideoGenerationRequest { Prompt = prompt, OriginalMedia = [dataContent], OperationKind = VideoOperationKind.Edit }, + new VideoGenerationRequest { Prompt = prompt, SourceVideo = dataContent, OperationKind = VideoOperationKind.Edit }, options, cancellationToken); } diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoOperationKind.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoOperationKind.cs index 8ff0dab943e..78dcda46f39 100644 --- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoOperationKind.cs +++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Video/VideoOperationKind.cs @@ -13,14 +13,14 @@ namespace Microsoft.Extensions.AI; public enum VideoOperationKind { /// - /// Create a new video from a text prompt, optionally guided by a reference image - /// supplied via . + /// Create a new video from a text prompt, optionally guided by a starting frame image + /// supplied via . /// Create, /// /// Edit an existing video identified by - /// or uploaded via . + /// or provided via . /// Edit, diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIClientExtensions.cs b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIClientExtensions.cs index e9322acfc25..7dcc48168f3 100644 --- a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIClientExtensions.cs +++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIClientExtensions.cs @@ -206,7 +206,7 @@ public static IImageGenerator AsIImageGenerator(this ImageClient imageClient) => /// /// /// Image-to-video (): When - /// contains image content, uses it as an + /// contains image content, uses it as an /// input_reference via POST /videos. /// /// @@ -216,7 +216,7 @@ public static IImageGenerator AsIImageGenerator(this ImageClient imageClient) => /// /// /// Edit by upload (): When - /// contains video content and no + /// contains video content and no /// is set, uploads the video for editing. /// /// diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIVideoGenerator.cs b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIVideoGenerator.cs index d29a1053d05..9a5b6568eaf 100644 --- a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIVideoGenerator.cs +++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIVideoGenerator.cs @@ -32,12 +32,12 @@ namespace Microsoft.Extensions.AI; /// /// /// Text-to-video (): When -/// is , +/// is , /// creates a new video from the text prompt via POST /videos. /// /// /// Image-to-video (): When -/// contains image content +/// contains image content /// (e.g., image/png), uses the image as an input_reference to guide /// new video creation via POST /videos. A sends the /// image URL in JSON; a uploads the image bytes via @@ -50,7 +50,7 @@ namespace Microsoft.Extensions.AI; /// /// /// Edit by upload (): When -/// contains video content +/// contains video content /// (e.g., video/mp4) and no /// is set, uploads the video for editing via POST /videos/edits with multipart/form-data. /// @@ -104,45 +104,28 @@ public async Task GenerateAsync( string modelId = options?.ModelId ?? _defaultModelId ?? "sora-2"; - // Determine OriginalMedia type based on the operation kind + // Determine input media based on the operation kind DataContent? videoEditContent = null; DataContent? imageReferenceData = null; UriContent? imageReferenceUri = null; if (request.OperationKind == VideoOperationKind.Create && - request.OriginalMedia is { } createMedia) + request.StartFrame is { } startFrame) { - foreach (AIContent media in createMedia) + if (startFrame is DataContent dc && dc.Data.Length > 0 && IsImageMediaType(dc.MediaType)) { - if (media is DataContent dc && dc.Data.Length > 0) - { - if (IsImageMediaType(dc.MediaType)) - { - imageReferenceData = dc; - } - - break; - } - - if (media is UriContent uc && IsImageMediaType(uc.MediaType)) - { - imageReferenceUri = uc; - break; - } + imageReferenceData = dc; + } + else if (startFrame is UriContent uc && IsImageMediaType(uc.MediaType)) + { + imageReferenceUri = uc; } } else if (request.OperationKind == VideoOperationKind.Edit && request.SourceVideoId is null && - request.OriginalMedia is { } editMedia) + request.SourceVideo is DataContent editDc && editDc.Data.Length > 0 && IsVideoMediaType(editDc.MediaType)) { - foreach (AIContent media in editMedia) - { - if (media is DataContent dc && dc.Data.Length > 0 && IsVideoMediaType(dc.MediaType)) - { - videoEditContent = dc; - break; - } - } + videoEditContent = editDc; } // Route to the appropriate endpoint and submit the video generation job diff --git a/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/OpenTelemetryVideoGenerator.cs b/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/OpenTelemetryVideoGenerator.cs index 72e673d8e12..ba0fa330f7d 100644 --- a/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/OpenTelemetryVideoGenerator.cs +++ b/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/OpenTelemetryVideoGenerator.cs @@ -200,9 +200,24 @@ public async override Task GenerateAsync( content.Add(new TextContent(request.Prompt)); } - if (request.OriginalMedia is not null) + if (request.StartFrame is not null) { - content.AddRange(request.OriginalMedia); + content.Add(request.StartFrame); + } + + if (request.EndFrame is not null) + { + content.Add(request.EndFrame); + } + + if (request.ReferenceImages is not null) + { + content.AddRange(request.ReferenceImages); + } + + if (request.SourceVideo is not null) + { + content.Add(request.SourceVideo); } _ = activity.AddTag( diff --git a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Video/VideoGeneratorExtensionsTests.cs b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Video/VideoGeneratorExtensionsTests.cs index 54f00720575..759084dc3d5 100644 --- a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Video/VideoGeneratorExtensionsTests.cs +++ b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Video/VideoGeneratorExtensionsTests.cs @@ -2,7 +2,6 @@ // The .NET Foundation licenses this file to you under the MIT license. using System; -using System.Collections.Generic; using System.Threading.Tasks; using Xunit; @@ -83,22 +82,22 @@ public async Task GenerateVideosAsync_CallsGenerateAsync() Assert.NotNull(capturedRequest); Assert.Equal("A cat video", capturedRequest!.Prompt); - Assert.Null(capturedRequest.OriginalMedia); + Assert.Null(capturedRequest.StartFrame); } [Fact] public async Task EditVideosAsync_NullGenerator_Throws() { await Assert.ThrowsAsync("generator", () => - ((IVideoGenerator)null!).EditVideoAsync(Array.Empty(), "prompt")); + ((IVideoGenerator)null!).EditVideoAsync(new DataContent("dGVzdA=="u8.ToArray(), "video/mp4"), "prompt")); } [Fact] - public async Task EditVideosAsync_NullOriginalMedia_Throws() + public async Task EditVideosAsync_NullSourceVideo_Throws() { using var generator = new TestVideoGenerator(); - await Assert.ThrowsAsync("originalMedia", () => - generator.EditVideoAsync((IEnumerable)null!, "prompt")); + await Assert.ThrowsAsync("sourceVideo", () => + generator.EditVideoAsync((AIContent)null!, "prompt")); } [Fact] @@ -106,7 +105,7 @@ public async Task EditVideosAsync_NullPrompt_Throws() { using var generator = new TestVideoGenerator(); await Assert.ThrowsAsync("prompt", () => - generator.EditVideoAsync(Array.Empty(), null!)); + generator.EditVideoAsync(new DataContent("dGVzdA=="u8.ToArray(), "video/mp4"), null!)); } [Fact] @@ -127,7 +126,7 @@ public async Task EditVideoAsync_DataContent_CallsGenerateAsync() Assert.NotNull(capturedRequest); Assert.Equal("Make it faster", capturedRequest!.Prompt); - Assert.NotNull(capturedRequest.OriginalMedia); + Assert.NotNull(capturedRequest.SourceVideo); } [Fact] @@ -147,7 +146,7 @@ public async Task EditVideoAsync_ByteArray_CallsGenerateAsync() Assert.NotNull(capturedRequest); Assert.Equal("Add effects", capturedRequest!.Prompt); - Assert.NotNull(capturedRequest.OriginalMedia); + Assert.NotNull(capturedRequest.SourceVideo); } [Fact] diff --git a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Video/VideoGeneratorTests.cs b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Video/VideoGeneratorTests.cs index 6b829671e5f..84531d2a321 100644 --- a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Video/VideoGeneratorTests.cs +++ b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Video/VideoGeneratorTests.cs @@ -111,8 +111,8 @@ public async Task GenerateVideosAsync_WithOptions_PassesThroughCorrectly() [Fact] public async Task GenerateVideosAsync_WithEditRequest_PassesThroughCorrectly() { - var originalVideos = new AIContent[] { new DataContent("dGVzdA=="u8.ToArray(), "video/mp4") }; - var request = new VideoGenerationRequest("Edit this", originalVideos); + var sourceVideo = new DataContent("dGVzdA=="u8.ToArray(), "video/mp4"); + var request = new VideoGenerationRequest("Edit this") { SourceVideo = sourceVideo }; VideoGenerationRequest? capturedRequest = null; @@ -129,6 +129,6 @@ public async Task GenerateVideosAsync_WithEditRequest_PassesThroughCorrectly() Assert.NotNull(capturedRequest); Assert.Equal("Edit this", capturedRequest!.Prompt); - Assert.NotNull(capturedRequest.OriginalMedia); + Assert.NotNull(capturedRequest.SourceVideo); } } diff --git a/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIVideoGeneratorTests.cs b/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIVideoGeneratorTests.cs index 84598d2613b..773dee0177b 100644 --- a/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIVideoGeneratorTests.cs +++ b/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIVideoGeneratorTests.cs @@ -125,7 +125,7 @@ await generator.GenerateAsync( new VideoGenerationRequest { Prompt = "animate this image", - OriginalMedia = [new DataContent(imageBytes, "image/png")], + StartFrame = new DataContent(imageBytes, "image/png"), }); // Should be JSON, not multipart diff --git a/test/Libraries/Microsoft.Extensions.AI.Tests/Video/LoggingVideoGeneratorTests.cs b/test/Libraries/Microsoft.Extensions.AI.Tests/Video/LoggingVideoGeneratorTests.cs index 3126179019f..96c00c097a0 100644 --- a/test/Libraries/Microsoft.Extensions.AI.Tests/Video/LoggingVideoGeneratorTests.cs +++ b/test/Libraries/Microsoft.Extensions.AI.Tests/Video/LoggingVideoGeneratorTests.cs @@ -95,7 +95,7 @@ await generator.GenerateAsync( [InlineData(LogLevel.Trace)] [InlineData(LogLevel.Debug)] [InlineData(LogLevel.Information)] - public async Task GenerateVideosAsync_WithOriginalMedia_LogsInvocationAndCompletion(LogLevel level) + public async Task GenerateVideosAsync_WithSourceVideo_LogsInvocationAndCompletion(LogLevel level) { var collector = new FakeLogCollector(); using ILoggerFactory loggerFactory = LoggerFactory.Create(b => b.AddProvider(new FakeLoggerProvider(collector)).SetMinimumLevel(level)); @@ -113,9 +113,9 @@ public async Task GenerateVideosAsync_WithOriginalMedia_LogsInvocationAndComplet .UseLogging(loggerFactory) .Build(); - AIContent[] originalMedia = [new DataContent((byte[])[1, 2, 3, 4], "video/mp4")]; + AIContent sourceVideo = new DataContent((byte[])[1, 2, 3, 4], "video/mp4"); await generator.GenerateAsync( - new VideoGenerationRequest("Make it more colorful", originalMedia), + new VideoGenerationRequest("Make it more colorful") { SourceVideo = sourceVideo }, new VideoGenerationOptions { ModelId = "sora" }); var logs = collector.GetSnapshot(); diff --git a/test/Libraries/Microsoft.Extensions.AI.Tests/Video/OpenTelemetryVideoGeneratorTests.cs b/test/Libraries/Microsoft.Extensions.AI.Tests/Video/OpenTelemetryVideoGeneratorTests.cs index 8c176a0c3bf..0ee0ea1976e 100644 --- a/test/Libraries/Microsoft.Extensions.AI.Tests/Video/OpenTelemetryVideoGeneratorTests.cs +++ b/test/Libraries/Microsoft.Extensions.AI.Tests/Video/OpenTelemetryVideoGeneratorTests.cs @@ -67,7 +67,7 @@ public async Task ExpectedInformationLogged_Async(bool enableSensitiveData) VideoGenerationRequest request = new() { Prompt = "This is the input prompt.", - OriginalMedia = [new UriContent("http://example/input.mp4", "video/mp4")], + SourceVideo = new UriContent("http://example/input.mp4", "video/mp4"), }; VideoGenerationOptions options = new()