From 0cfbea411d13cc1b9905ee56aa701c69c6d99db4 Mon Sep 17 00:00:00 2001 From: sa_ddam213 Date: Mon, 11 Dec 2023 08:37:44 +1300 Subject: [PATCH] Add LatentConsistencyXL pipeline --- .../LatentConsistencyXL/ImageDiffuser.cs | 84 +++++++ .../InpaintLegacyDiffuser.cs | 230 ++++++++++++++++++ .../LatentConsistencyXLDiffuser.cs | 79 ++++++ .../LatentConsistencyXL/TextDiffuser.cs | 56 +++++ .../Enums/DiffuserPipelineType.cs | 3 +- OnnxStack.StableDiffusion/Extensions.cs | 45 ++-- .../Pipelines/LatentConsistencyXLPipeline.cs | 56 +++++ OnnxStack.StableDiffusion/Registration.cs | 6 + .../LatentConsistency/LCMScheduler.cs | 3 + .../UserControls/SchedulerControl.xaml.cs | 15 +- OnnxStack.UI/Views/ModelView.xaml.cs | 14 +- 11 files changed, 562 insertions(+), 29 deletions(-) create mode 100644 OnnxStack.StableDiffusion/Diffusers/LatentConsistencyXL/ImageDiffuser.cs create mode 100644 OnnxStack.StableDiffusion/Diffusers/LatentConsistencyXL/InpaintLegacyDiffuser.cs create mode 100644 OnnxStack.StableDiffusion/Diffusers/LatentConsistencyXL/LatentConsistencyXLDiffuser.cs create mode 100644 OnnxStack.StableDiffusion/Diffusers/LatentConsistencyXL/TextDiffuser.cs create mode 100644 OnnxStack.StableDiffusion/Pipelines/LatentConsistencyXLPipeline.cs diff --git a/OnnxStack.StableDiffusion/Diffusers/LatentConsistencyXL/ImageDiffuser.cs b/OnnxStack.StableDiffusion/Diffusers/LatentConsistencyXL/ImageDiffuser.cs new file mode 100644 index 00000000..61a9e417 --- /dev/null +++ b/OnnxStack.StableDiffusion/Diffusers/LatentConsistencyXL/ImageDiffuser.cs @@ -0,0 +1,84 @@ +using Microsoft.Extensions.Logging; +using Microsoft.ML.OnnxRuntime.Tensors; +using OnnxStack.Core; +using OnnxStack.Core.Config; +using OnnxStack.Core.Model; +using OnnxStack.Core.Services; +using OnnxStack.StableDiffusion.Common; +using OnnxStack.StableDiffusion.Config; +using OnnxStack.StableDiffusion.Enums; +using OnnxStack.StableDiffusion.Helpers; +using SixLabors.ImageSharp; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading.Tasks; + +namespace OnnxStack.StableDiffusion.Diffusers.LatentConsistencyXL +{ + public sealed class ImageDiffuser : LatentConsistencyXLDiffuser + { + /// + /// Initializes a new instance of the class. + /// + /// The configuration. + /// The onnx model service. + public ImageDiffuser(IOnnxModelService onnxModelService, IPromptService promptService, ILogger logger) + : base(onnxModelService, promptService, logger) + { + } + + + /// + /// Gets the type of the diffuser. + /// + public override DiffuserType DiffuserType => DiffuserType.ImageToImage; + + + /// + /// Gets the timesteps. + /// + /// The prompt. + /// The options. + /// The scheduler. + /// + protected override IReadOnlyList GetTimesteps(SchedulerOptions options, IScheduler scheduler) + { + // Image2Image we narrow step the range by the Strength + var inittimestep = Math.Min((int)(options.InferenceSteps * options.Strength), options.InferenceSteps); + var start = Math.Max(options.InferenceSteps - inittimestep, 0); + return scheduler.Timesteps.Skip(start).ToList(); + } + + + /// + /// Prepares the latents for inference. + /// + /// The prompt. + /// The options. + /// The scheduler. + /// + protected override async Task> PrepareLatentsAsync(StableDiffusionModelSet model, PromptOptions prompt, SchedulerOptions options, IScheduler scheduler, IReadOnlyList timesteps) + { + var imageTensor = prompt.InputImage.ToDenseTensor(new[] { 1, 3, options.Height, options.Width }); + + //TODO: Model Config, Channels + var outputDimension = options.GetScaledDimension(); + var metadata = _onnxModelService.GetModelMetadata(model, OnnxModelType.VaeEncoder); + using (var inferenceParameters = new OnnxInferenceParameters(metadata)) + { + inferenceParameters.AddInputTensor(imageTensor); + inferenceParameters.AddOutputBuffer(outputDimension); + + var results = await _onnxModelService.RunInferenceAsync(model, OnnxModelType.VaeEncoder, inferenceParameters); + using (var result = results.First()) + { + var outputResult = result.ToDenseTensor(); + var scaledSample = outputResult.MultiplyBy(model.ScaleFactor); + return scheduler.AddNoise(scaledSample, scheduler.CreateRandomSample(scaledSample.Dimensions), timesteps); + } + } + } + + } +} diff --git a/OnnxStack.StableDiffusion/Diffusers/LatentConsistencyXL/InpaintLegacyDiffuser.cs b/OnnxStack.StableDiffusion/Diffusers/LatentConsistencyXL/InpaintLegacyDiffuser.cs new file mode 100644 index 00000000..4a27c9da --- /dev/null +++ b/OnnxStack.StableDiffusion/Diffusers/LatentConsistencyXL/InpaintLegacyDiffuser.cs @@ -0,0 +1,230 @@ +using Microsoft.Extensions.Logging; +using Microsoft.ML.OnnxRuntime.Tensors; +using OnnxStack.Core; +using OnnxStack.Core.Config; +using OnnxStack.Core.Model; +using OnnxStack.Core.Services; +using OnnxStack.StableDiffusion.Common; +using OnnxStack.StableDiffusion.Config; +using OnnxStack.StableDiffusion.Enums; +using OnnxStack.StableDiffusion.Helpers; +using SixLabors.ImageSharp; +using SixLabors.ImageSharp.Processing; +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; + +namespace OnnxStack.StableDiffusion.Diffusers.LatentConsistencyXL +{ + public sealed class InpaintLegacyDiffuser : LatentConsistencyXLDiffuser + { + /// + /// Initializes a new instance of the class. + /// + /// The configuration. + /// The onnx model service. + public InpaintLegacyDiffuser(IOnnxModelService onnxModelService, IPromptService promptService, ILogger logger) + : base(onnxModelService, promptService, logger) + { + } + + + /// + /// Gets the type of the diffuser. + /// + public override DiffuserType DiffuserType => DiffuserType.ImageInpaintLegacy; + + + /// + /// Runs the scheduler steps. + /// + /// The model options. + /// The prompt options. + /// The scheduler options. + /// The prompt embeddings. + /// if set to true [perform guidance]. + /// The progress callback. + /// The cancellation token. + /// + protected override async Task> SchedulerStepAsync(StableDiffusionModelSet modelOptions, PromptOptions promptOptions, SchedulerOptions schedulerOptions, PromptEmbeddingsResult promptEmbeddings, bool performGuidance, Action progressCallback = null, CancellationToken cancellationToken = default) + { + using (var scheduler = GetScheduler(schedulerOptions)) + { + // Get timesteps + var timesteps = GetTimesteps(schedulerOptions, scheduler); + + // Create latent sample + var latentsOriginal = await PrepareLatentsAsync(modelOptions, promptOptions, schedulerOptions, scheduler, timesteps); + + // Create masks sample + var maskImage = PrepareMask(modelOptions, promptOptions, schedulerOptions); + + // Generate some noise + var noise = scheduler.CreateRandomSample(latentsOriginal.Dimensions); + + // Add noise to original latent + var latents = scheduler.AddNoise(latentsOriginal, noise, timesteps); + + // Get Model metadata + var metadata = _onnxModelService.GetModelMetadata(modelOptions, OnnxModelType.Unet); + + // Get Time ids + var addTimeIds = GetAddTimeIds(modelOptions, schedulerOptions, performGuidance); + + // Loop though the timesteps + var step = 0; + foreach (var timestep in timesteps) + { + step++; + var stepTime = Stopwatch.GetTimestamp(); + cancellationToken.ThrowIfCancellationRequested(); + + // Create input tensor. + var inputLatent = performGuidance ? latents.Repeat(2) : latents; + var inputTensor = scheduler.ScaleInput(inputLatent, timestep); + var timestepTensor = CreateTimestepTensor(timestep); + + var outputChannels = performGuidance ? 2 : 1; + var outputDimension = schedulerOptions.GetScaledDimension(outputChannels); + using (var inferenceParameters = new OnnxInferenceParameters(metadata)) + { + inferenceParameters.AddInputTensor(inputTensor); + inferenceParameters.AddInputTensor(timestepTensor); + inferenceParameters.AddInputTensor(promptEmbeddings.PromptEmbeds); + inferenceParameters.AddInputTensor(promptEmbeddings.PooledPromptEmbeds); + inferenceParameters.AddInputTensor(addTimeIds); + inferenceParameters.AddOutputBuffer(outputDimension); + + var results = await _onnxModelService.RunInferenceAsync(modelOptions, OnnxModelType.Unet, inferenceParameters); + using (var result = results.First()) + { + var noisePred = result.ToDenseTensor(); + + // Perform guidance + if (performGuidance) + noisePred = PerformGuidance(noisePred, schedulerOptions.GuidanceScale); + + // Scheduler Step + var steplatents = scheduler.Step(noisePred, timestep, latents).Result; + + // Add noise to original latent + var initLatentsProper = scheduler.AddNoise(latentsOriginal, noise, new[] { timestep }); + + // Apply mask and combine + latents = ApplyMaskedLatents(steplatents, initLatentsProper, maskImage); + } + } + + progressCallback?.Invoke(step, timesteps.Count); + _logger?.LogEnd($"Step {step}/{timesteps.Count}", stepTime); + } + + // Decode Latents + return await DecodeLatentsAsync(modelOptions, promptOptions, schedulerOptions, latents); + } + } + + + /// + /// Gets the timesteps. + /// + /// The prompt. + /// The options. + /// The scheduler. + /// + protected override IReadOnlyList GetTimesteps(SchedulerOptions options, IScheduler scheduler) + { + var inittimestep = Math.Min((int)(options.InferenceSteps * options.Strength), options.InferenceSteps); + var start = Math.Max(options.InferenceSteps - inittimestep, 0); + return scheduler.Timesteps.Skip(start).ToList(); + } + + + /// + /// Prepares the latents for inference. + /// + /// The prompt. + /// The options. + /// The scheduler. + /// + protected override async Task> PrepareLatentsAsync(StableDiffusionModelSet model, PromptOptions prompt, SchedulerOptions options, IScheduler scheduler, IReadOnlyList timesteps) + { + var imageTensor = prompt.InputImage.ToDenseTensor(new[] { 1, 3, options.Height, options.Width }); + + //TODO: Model Config, Channels + var outputDimensions = options.GetScaledDimension(); + var metadata = _onnxModelService.GetModelMetadata(model, OnnxModelType.VaeEncoder); + using (var inferenceParameters = new OnnxInferenceParameters(metadata)) + { + inferenceParameters.AddInputTensor(imageTensor); + inferenceParameters.AddOutputBuffer(outputDimensions); + + var results = await _onnxModelService.RunInferenceAsync(model, OnnxModelType.VaeEncoder, inferenceParameters); + using (var result = results.First()) + { + var outputResult = result.ToDenseTensor(); + var scaledSample = outputResult.MultiplyBy(model.ScaleFactor); + return scaledSample; + } + } + } + + + /// + /// Prepares the mask. + /// + /// The prompt options. + /// The scheduler options. + /// + private DenseTensor PrepareMask(StableDiffusionModelSet modelOptions, PromptOptions promptOptions, SchedulerOptions schedulerOptions) + { + using (var mask = promptOptions.InputImageMask.ToImage()) + { + // Prepare the mask + int width = schedulerOptions.GetScaledWidth(); + int height = schedulerOptions.GetScaledHeight(); + mask.Mutate(x => x.Grayscale()); + mask.Mutate(x => x.Resize(new Size(width, height), KnownResamplers.NearestNeighbor, true)); + var maskTensor = new DenseTensor(new[] { 1, 4, width, height }); + mask.ProcessPixelRows(img => + { + for (int x = 0; x < width; x++) + { + for (int y = 0; y < height; y++) + { + var pixelSpan = img.GetRowSpan(y); + var value = 1f - (pixelSpan[x].A / 255.0f); + maskTensor[0, 0, y, x] = value; + maskTensor[0, 1, y, x] = value; // Needed for shape only + maskTensor[0, 2, y, x] = value; // Needed for shape only + maskTensor[0, 3, y, x] = value; // Needed for shape only + } + } + }); + return maskTensor; + } + } + + + /// + /// Applies the masked latents. + /// + /// The latents. + /// The initialize latents proper. + /// The mask. + /// + private DenseTensor ApplyMaskedLatents(DenseTensor latents, DenseTensor initLatentsProper, DenseTensor mask) + { + var result = new DenseTensor(latents.Dimensions); + for (int i = 0; i < result.Length; i++) + { + float maskValue = mask.GetValue(i); + result.SetValue(i, initLatentsProper.GetValue(i) * maskValue + latents.GetValue(i) * (1f - maskValue)); + } + return result; + } + } +} diff --git a/OnnxStack.StableDiffusion/Diffusers/LatentConsistencyXL/LatentConsistencyXLDiffuser.cs b/OnnxStack.StableDiffusion/Diffusers/LatentConsistencyXL/LatentConsistencyXLDiffuser.cs new file mode 100644 index 00000000..8b91e9c6 --- /dev/null +++ b/OnnxStack.StableDiffusion/Diffusers/LatentConsistencyXL/LatentConsistencyXLDiffuser.cs @@ -0,0 +1,79 @@ +using Microsoft.Extensions.Logging; +using Microsoft.ML.OnnxRuntime.Tensors; +using OnnxStack.Core.Services; +using OnnxStack.StableDiffusion.Common; +using OnnxStack.StableDiffusion.Config; +using OnnxStack.StableDiffusion.Diffusers.StableDiffusionXL; +using OnnxStack.StableDiffusion.Enums; +using OnnxStack.StableDiffusion.Models; +using System.Collections.Generic; +using System.Threading.Tasks; +using System.Threading; +using System; +using OnnxStack.StableDiffusion.Schedulers.LatentConsistency; + +namespace OnnxStack.StableDiffusion.Diffusers.LatentConsistencyXL +{ + public abstract class LatentConsistencyXLDiffuser : StableDiffusionXLDiffuser + { + protected LatentConsistencyXLDiffuser(IOnnxModelService onnxModelService, IPromptService promptService, ILogger logger) + : base(onnxModelService, promptService, logger) { } + + + /// + /// Gets the type of the pipeline. + /// + public override DiffuserPipelineType PipelineType => DiffuserPipelineType.LatentConsistencyXL; + + + /// + /// Runs the stable diffusion loop + /// + /// + /// The prompt options. + /// The scheduler options. + /// + /// The cancellation token. + /// + public override Task> DiffuseAsync(StableDiffusionModelSet modelOptions, PromptOptions promptOptions, SchedulerOptions schedulerOptions, Action progressCallback = null, CancellationToken cancellationToken = default) + { + // LCM does not support negative prompting + promptOptions.NegativePrompt = string.Empty; + return base.DiffuseAsync(modelOptions, promptOptions, schedulerOptions, progressCallback, cancellationToken); + } + + + /// + /// Runs the stable diffusion batch loop + /// + /// The model options. + /// The prompt options. + /// The scheduler options. + /// The batch options. + /// The progress callback. + /// The cancellation token. + /// + public override IAsyncEnumerable DiffuseBatchAsync(StableDiffusionModelSet modelOptions, PromptOptions promptOptions, SchedulerOptions schedulerOptions, BatchOptions batchOptions, Action progressCallback = null, CancellationToken cancellationToken = default) + { + // LCM does not support negative prompting + promptOptions.NegativePrompt = string.Empty; + return base.DiffuseBatchAsync(modelOptions, promptOptions, schedulerOptions, batchOptions, progressCallback, cancellationToken); + } + + + /// + /// Gets the scheduler. + /// + /// + /// The options. + /// + protected override IScheduler GetScheduler(SchedulerOptions options) + { + return options.SchedulerType switch + { + SchedulerType.LCM => new LCMScheduler(options), + _ => default + }; + } + } +} diff --git a/OnnxStack.StableDiffusion/Diffusers/LatentConsistencyXL/TextDiffuser.cs b/OnnxStack.StableDiffusion/Diffusers/LatentConsistencyXL/TextDiffuser.cs new file mode 100644 index 00000000..e4642193 --- /dev/null +++ b/OnnxStack.StableDiffusion/Diffusers/LatentConsistencyXL/TextDiffuser.cs @@ -0,0 +1,56 @@ +using Microsoft.Extensions.Logging; +using Microsoft.ML.OnnxRuntime.Tensors; +using OnnxStack.Core.Services; +using OnnxStack.StableDiffusion.Common; +using OnnxStack.StableDiffusion.Config; +using OnnxStack.StableDiffusion.Enums; +using System.Collections.Generic; +using System.Threading.Tasks; + +namespace OnnxStack.StableDiffusion.Diffusers.LatentConsistencyXL +{ + public sealed class TextDiffuser : LatentConsistencyXLDiffuser + { + /// + /// Initializes a new instance of the class. + /// + /// The configuration. + /// The onnx model service. + public TextDiffuser(IOnnxModelService onnxModelService, IPromptService promptService, ILogger logger) + : base(onnxModelService, promptService, logger) + { + } + + + /// + /// Gets the type of the diffuser. + /// + public override DiffuserType DiffuserType => DiffuserType.TextToImage; + + + /// + /// Gets the timesteps. + /// + /// The prompt. + /// The options. + /// The scheduler. + /// + protected override IReadOnlyList GetTimesteps(SchedulerOptions options, IScheduler scheduler) + { + return scheduler.Timesteps; + } + + + /// + /// Prepares the latents for inference. + /// + /// The prompt. + /// The options. + /// The scheduler. + /// + protected override Task> PrepareLatentsAsync(StableDiffusionModelSet model, PromptOptions prompt, SchedulerOptions options, IScheduler scheduler, IReadOnlyList timesteps) + { + return Task.FromResult(scheduler.CreateRandomSample(options.GetScaledDimension(), scheduler.InitNoiseSigma)); + } + } +} diff --git a/OnnxStack.StableDiffusion/Enums/DiffuserPipelineType.cs b/OnnxStack.StableDiffusion/Enums/DiffuserPipelineType.cs index 23cca36e..fb8591d9 100644 --- a/OnnxStack.StableDiffusion/Enums/DiffuserPipelineType.cs +++ b/OnnxStack.StableDiffusion/Enums/DiffuserPipelineType.cs @@ -5,6 +5,7 @@ public enum DiffuserPipelineType StableDiffusion = 0, StableDiffusionXL = 1, LatentConsistency = 10, - InstaFlow = 11, + LatentConsistencyXL = 11, + InstaFlow = 30, } } diff --git a/OnnxStack.StableDiffusion/Extensions.cs b/OnnxStack.StableDiffusion/Extensions.cs index d916152f..ce40a870 100644 --- a/OnnxStack.StableDiffusion/Extensions.cs +++ b/OnnxStack.StableDiffusion/Extensions.cs @@ -97,26 +97,33 @@ internal static int[] GetScaledDimension(this SchedulerOptions options, int batc /// public static SchedulerType[] GetSchedulerTypes(this DiffuserPipelineType pipelineType) { - return pipelineType switch + switch (pipelineType) { - DiffuserPipelineType.InstaFlow => new[] - { - SchedulerType.InstaFlow - }, - DiffuserPipelineType.LatentConsistency => new[] - { - SchedulerType.LCM - }, - _ => new[] - { - SchedulerType.LMS, - SchedulerType.Euler, - SchedulerType.EulerAncestral, - SchedulerType.DDPM, - SchedulerType.DDIM, - SchedulerType.KDPM2 - } - }; + case DiffuserPipelineType.StableDiffusion: + case DiffuserPipelineType.StableDiffusionXL: + return new[] + { + SchedulerType.LMS, + SchedulerType.Euler, + SchedulerType.EulerAncestral, + SchedulerType.DDPM, + SchedulerType.DDIM, + SchedulerType.KDPM2 + }; + case DiffuserPipelineType.LatentConsistency: + case DiffuserPipelineType.LatentConsistencyXL: + return new[] + { + SchedulerType.LCM + }; + case DiffuserPipelineType.InstaFlow: + return new[] + { + SchedulerType.InstaFlow + }; + default: + return default; + } } } diff --git a/OnnxStack.StableDiffusion/Pipelines/LatentConsistencyXLPipeline.cs b/OnnxStack.StableDiffusion/Pipelines/LatentConsistencyXLPipeline.cs new file mode 100644 index 00000000..d2ebbdcd --- /dev/null +++ b/OnnxStack.StableDiffusion/Pipelines/LatentConsistencyXLPipeline.cs @@ -0,0 +1,56 @@ +using Microsoft.Extensions.Logging; +using OnnxStack.Core; +using OnnxStack.StableDiffusion.Common; +using OnnxStack.StableDiffusion.Diffusers; +using OnnxStack.StableDiffusion.Enums; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.Linq; + +namespace OnnxStack.StableDiffusion.Pipelines +{ + public sealed class LatentConsistencyXLPipeline : IPipeline + { + private readonly DiffuserPipelineType _pipelineType; + private readonly ILogger _logger; + private readonly ConcurrentDictionary _diffusers; + + /// + /// Initializes a new instance of the class. + /// + /// The onnx model service. + /// The prompt service. + public LatentConsistencyXLPipeline(IEnumerable diffusers, ILogger logger) + { + _logger = logger; + _pipelineType = DiffuserPipelineType.LatentConsistencyXL; + _diffusers = diffusers + .Where(x => x.PipelineType == _pipelineType) + .ToConcurrentDictionary(k => k.DiffuserType, v => v); + } + + + /// + /// Gets the type of the pipeline. + /// + public DiffuserPipelineType PipelineType => _pipelineType; + + + /// + /// Gets the diffusers. + /// + public ConcurrentDictionary Diffusers => _diffusers; + + + /// + /// Gets the diffuser. + /// + /// Type of the diffuser. + /// + public IDiffuser GetDiffuser(DiffuserType diffuserType) + { + _diffusers.TryGetValue(diffuserType, out var diffuser); + return diffuser; + } + } +} diff --git a/OnnxStack.StableDiffusion/Registration.cs b/OnnxStack.StableDiffusion/Registration.cs index 828668d2..16d0f94e 100644 --- a/OnnxStack.StableDiffusion/Registration.cs +++ b/OnnxStack.StableDiffusion/Registration.cs @@ -51,6 +51,7 @@ private static void RegisterServices(this IServiceCollection serviceCollection) serviceCollection.AddSingleton(); serviceCollection.AddSingleton(); serviceCollection.AddSingleton(); + serviceCollection.AddSingleton(); serviceCollection.AddSingleton(); //StableDiffusion @@ -69,6 +70,11 @@ private static void RegisterServices(this IServiceCollection serviceCollection) serviceCollection.AddSingleton(); serviceCollection.AddSingleton(); + //LatentConsistencyXL + serviceCollection.AddSingleton(); + serviceCollection.AddSingleton(); + serviceCollection.AddSingleton(); + //InstaFlow serviceCollection.AddSingleton(); } diff --git a/OnnxStack.StableDiffusion/Schedulers/LatentConsistency/LCMScheduler.cs b/OnnxStack.StableDiffusion/Schedulers/LatentConsistency/LCMScheduler.cs index 619e008d..baf40860 100644 --- a/OnnxStack.StableDiffusion/Schedulers/LatentConsistency/LCMScheduler.cs +++ b/OnnxStack.StableDiffusion/Schedulers/LatentConsistency/LCMScheduler.cs @@ -60,6 +60,9 @@ protected override int[] SetTimesteps() // Currently, only linear spacing is supported. var timeIncrement = Options.TrainTimesteps / Options.OriginalInferenceSteps; + if (Options.InferenceSteps <= 1) + return new[] { Options.TrainTimesteps / 2 - 1 }; + //# LCM Training Steps Schedule var lcmOriginTimesteps = Enumerable.Range(1, Options.OriginalInferenceSteps) .Select(x => x * timeIncrement - 1) diff --git a/OnnxStack.UI/UserControls/SchedulerControl.xaml.cs b/OnnxStack.UI/UserControls/SchedulerControl.xaml.cs index 27fc2e4a..e045991a 100644 --- a/OnnxStack.UI/UserControls/SchedulerControl.xaml.cs +++ b/OnnxStack.UI/UserControls/SchedulerControl.xaml.cs @@ -1,8 +1,6 @@ -using LibGit2Sharp; -using Models; +using Models; using OnnxStack.Core; using OnnxStack.StableDiffusion; -using OnnxStack.StableDiffusion.Config; using OnnxStack.StableDiffusion.Enums; using OnnxStack.UI.Commands; using OnnxStack.UI.Models; @@ -144,6 +142,7 @@ private void OnModelChanged(ModelOptionsModel model) { SchedulerOptions.OriginalInferenceSteps = 100; SchedulerOptions.InferenceSteps = 30; + SchedulerOptions.GuidanceScale = 7.5f; SchedulerOptions.SchedulerType = SchedulerType.DDIM; } else if (model.ModelOptions.PipelineType == DiffuserPipelineType.LatentConsistency) @@ -153,6 +152,15 @@ private void OnModelChanged(ModelOptionsModel model) SchedulerOptions.GuidanceScale = 1f; SchedulerOptions.SchedulerType = SchedulerType.LCM; } + else if (model.ModelOptions.PipelineType == DiffuserPipelineType.LatentConsistencyXL) + { + SchedulerOptions.OriginalInferenceSteps = 50; + SchedulerOptions.InferenceSteps = 6; + SchedulerOptions.GuidanceScale = 1f; + SchedulerOptions.Width = 1024; + SchedulerOptions.Height = 1024; + SchedulerOptions.SchedulerType = SchedulerType.LCM; + } else if (model.ModelOptions.PipelineType == DiffuserPipelineType.InstaFlow) { SchedulerOptions.InferenceSteps = 1; @@ -168,7 +176,6 @@ private void OnModelChanged(ModelOptionsModel model) SchedulerOptions.Height = 1024; SchedulerOptions.SchedulerType = SchedulerType.EulerAncestral; } - } diff --git a/OnnxStack.UI/Views/ModelView.xaml.cs b/OnnxStack.UI/Views/ModelView.xaml.cs index b225e4b4..40fabbc8 100644 --- a/OnnxStack.UI/Views/ModelView.xaml.cs +++ b/OnnxStack.UI/Views/ModelView.xaml.cs @@ -576,12 +576,12 @@ private Task Add() // TODO: Select pipleine in dialog, then setting any required bits PipelineType = pipeline, - ScaleFactor = pipeline == DiffuserPipelineType.StableDiffusionXL ? 0.13025f : 0.18215f, + ScaleFactor = IsXLPipeline(pipeline) ? 0.13025f : 0.18215f, TokenizerLimit = 77, - PadTokenId = pipeline == DiffuserPipelineType.StableDiffusionXL ? 1 : 49407, + PadTokenId = IsXLPipeline(pipeline) ? 1 : 49407, TokenizerLength = 768, Tokenizer2Length = 1280, - TokenizerType = pipeline == DiffuserPipelineType.StableDiffusionXL ? TokenizerType.Both : TokenizerType.One, + TokenizerType = IsXLPipeline(pipeline) ? TokenizerType.Both : TokenizerType.One, BlankTokenId = 49407, Diffusers = Enum.GetValues().ToList(), }; @@ -883,7 +883,7 @@ private bool ValidateModelSet(StableDiffusionModelSet model) if (!model.ModelConfigurations.Any()) return false; - var filesToValidate = model.PipelineType == DiffuserPipelineType.StableDiffusionXL + var filesToValidate = IsXLPipeline(model.PipelineType) ? model.ModelConfigurations : model.ModelConfigurations.Where(x => x.Type != OnnxModelType.Tokenizer2 && x.Type != OnnxModelType.TextEncoder2); @@ -916,6 +916,10 @@ private Task SaveConfigurationFile() } } + private bool IsXLPipeline(DiffuserPipelineType pipelineType) + { + return pipelineType == DiffuserPipelineType.StableDiffusionXL || pipelineType == DiffuserPipelineType.LatentConsistencyXL; + } /// /// Creates the view model. @@ -924,7 +928,7 @@ private Task SaveConfigurationFile() /// private ModelSetViewModel CreateViewModel(ModelConfigTemplate modelTemplate) { - var modelTypes = modelTemplate.PipelineType == DiffuserPipelineType.StableDiffusionXL + var modelTypes = IsXLPipeline(modelTemplate.PipelineType) ? Enum.GetValues() : Enum.GetValues().Where(x => x != OnnxModelType.Tokenizer2 && x != OnnxModelType.TextEncoder2); return new ModelSetViewModel