diff --git a/OnnxStack.Console/appsettings.json b/OnnxStack.Console/appsettings.json
index ccf95574..b6365c2f 100644
--- a/OnnxStack.Console/appsettings.json
+++ b/OnnxStack.Console/appsettings.json
@@ -132,6 +132,46 @@
 						"OnnxModelPath": "D:\\Repositories\\photon\\vae_decoder\\model.onnx"
 					}
 				]
+			},
+			{
+				"Name": "InstaFlow",
+				"IsEnabled": true,
+				"PadTokenId": 49407,
+				"BlankTokenId": 49407,
+				"TokenizerLimit": 77,
+				"EmbeddingsLength": 768,
+				"ScaleFactor": 0.18215,
+				"PipelineType": "InstaFlow",
+				"Diffusers": [
+					"TextToImage"
+				],
+				"DeviceId": 0,
+				"InterOpNumThreads": 0,
+				"IntraOpNumThreads": 0,
+				"ExecutionMode": "ORT_SEQUENTIAL",
+				"ExecutionProvider": "DirectML",
+				"ModelConfigurations": [
+					{
+						"Type": "Tokenizer",
+						"OnnxModelPath": "D:\\Repositories\\InstaFlow-0.9B-ONNX\\tokenizer\\model.onnx"
+					},
+					{
+						"Type": "Unet",
+						"OnnxModelPath": "D:\\Repositories\\InstaFlow-0.9B-ONNX\\unet\\model.onnx"
+					},
+					{
+						"Type": "TextEncoder",
+						"OnnxModelPath": "D:\\Repositories\\InstaFlow-0.9B-ONNX\\text_encoder\\model.onnx"
+					},
+					{
+						"Type": "VaeEncoder",
+						"OnnxModelPath": "D:\\Repositories\\InstaFlow-0.9B-ONNX\\vae_encoder\\model.onnx"
+					},
+					{
+						"Type": "VaeDecoder",
+						"OnnxModelPath": "D:\\Repositories\\InstaFlow-0.9B-ONNX\\vae_decoder\\model.onnx"
+					}
+				]
 			}
 		]
 	}
diff --git a/OnnxStack.StableDiffusion/Common/IScheduler.cs b/OnnxStack.StableDiffusion/Common/IScheduler.cs
index f8d1d649..026d2f88 100644
--- a/OnnxStack.StableDiffusion/Common/IScheduler.cs
+++ b/OnnxStack.StableDiffusion/Common/IScheduler.cs
@@ -1,5 +1,4 @@
 using Microsoft.ML.OnnxRuntime.Tensors;
-using OnnxStack.StableDiffusion.Enums;
 using OnnxStack.StableDiffusion.Schedulers;
 using System;
 using System.Collections.Generic;
@@ -8,11 +7,6 @@ namespace OnnxStack.StableDiffusion.Common
 {
     public interface IScheduler : IDisposable
     {
-        /// 
-        /// Gets the compatible pipeline
-        /// 
-        DiffuserPipelineType PipelineType { get; }
-
         /// 
         /// Gets the initial noise sigma.
         /// 
diff --git a/OnnxStack.StableDiffusion/Diffusers/InstaFlow/InstaFlowDiffuser.cs b/OnnxStack.StableDiffusion/Diffusers/InstaFlow/InstaFlowDiffuser.cs
new file mode 100644
index 00000000..a16f44ac
--- /dev/null
+++ b/OnnxStack.StableDiffusion/Diffusers/InstaFlow/InstaFlowDiffuser.cs
@@ -0,0 +1,144 @@
+using Microsoft.Extensions.Logging;
+using Microsoft.ML.OnnxRuntime.Tensors;
+using OnnxStack.Core;
+using OnnxStack.Core.Config;
+using OnnxStack.Core.Model;
+using OnnxStack.Core.Services;
+using OnnxStack.StableDiffusion.Common;
+using OnnxStack.StableDiffusion.Config;
+using OnnxStack.StableDiffusion.Enums;
+using OnnxStack.StableDiffusion.Helpers;
+using OnnxStack.StableDiffusion.Schedulers.InstaFlow;
+using System;
+using System.Diagnostics;
+using System.Linq;
+using System.Threading;
+using System.Threading.Tasks;
+
+namespace OnnxStack.StableDiffusion.Diffusers.InstaFlow
+{
+    public abstract class InstaFlowDiffuser : DiffuserBase, IDiffuser
+    {
+        /// 
+        /// Initializes a new instance of the  class.
+        /// 
+        /// The configuration.
+        /// The onnx model service.
+        public InstaFlowDiffuser(IOnnxModelService onnxModelService, IPromptService promptService, ILogger logger)
+            : base(onnxModelService, promptService, logger) { }
+
+
+        /// 
+        /// Gets the type of the pipeline.
+        /// 
+        public override DiffuserPipelineType PipelineType => DiffuserPipelineType.InstaFlow;
+
+
+        /// 
+        /// Runs the scheduler steps.
+        /// 
+        /// The model options.
+        /// The prompt options.
+        /// The scheduler options.
+        /// The prompt embeddings.
+        /// if set to true [perform guidance].
+        /// The progress callback.
+        /// The cancellation token.
+        /// 
+        protected override async Task> SchedulerStepAsync(IModelOptions modelOptions, PromptOptions promptOptions, SchedulerOptions schedulerOptions, DenseTensor promptEmbeddings, bool performGuidance, Action progressCallback = null, CancellationToken cancellationToken = default)
+        {
+            // Get Scheduler
+            using (var scheduler = GetScheduler(schedulerOptions))
+            {
+                // Get timesteps
+                var timesteps = GetTimesteps(schedulerOptions, scheduler);
+
+                // Create latent sample
+                var latents = await PrepareLatentsAsync(modelOptions, promptOptions, schedulerOptions, scheduler, timesteps);
+
+                // Get Model metadata
+                var metadata = _onnxModelService.GetModelMetadata(modelOptions, OnnxModelType.Unet);
+
+                // Get the distilled Timestep
+                var distilledTimestep = 1.0f / timesteps.Count;
+
+                // Loop though the timesteps
+                var step = 0;
+                foreach (var timestep in timesteps)
+                {
+                    step++;
+                    var stepTime = Stopwatch.GetTimestamp();
+                    cancellationToken.ThrowIfCancellationRequested();
+
+                    // Create input tensor.
+                    var inputLatent = performGuidance ? latents.Repeat(2) : latents;
+                    var inputTensor = scheduler.ScaleInput(inputLatent, timestep);
+                    var timestepTensor = CreateTimestepTensor(inputLatent, timestep);
+
+                    var outputChannels = performGuidance ? 2 : 1;
+                    var outputDimension = schedulerOptions.GetScaledDimension(outputChannels);
+                    using (var inferenceParameters = new OnnxInferenceParameters(metadata))
+                    {
+                        inferenceParameters.AddInputTensor(inputTensor);
+                        inferenceParameters.AddInputTensor(timestepTensor);
+                        inferenceParameters.AddInputTensor(promptEmbeddings);
+                        inferenceParameters.AddOutputBuffer(outputDimension);
+
+                        var results = await _onnxModelService.RunInferenceAsync(modelOptions, OnnxModelType.Unet, inferenceParameters);
+                        using (var result = results.First())
+                        {
+                            var noisePred = result.ToDenseTensor();
+
+                            // Perform guidance
+                            if (performGuidance)
+                                noisePred = PerformGuidance(noisePred, schedulerOptions.GuidanceScale);
+
+                            // Scheduler Step
+                            latents = scheduler.Step(noisePred, timestep, latents).Result;
+
+                            latents = noisePred
+                                .MultiplyTensorByFloat(distilledTimestep)
+                                .AddTensors(latents);
+                        }
+                    }
+
+                    progressCallback?.Invoke(step, timesteps.Count);
+                    _logger?.LogEnd($"Step {step}/{timesteps.Count}", stepTime);
+                }
+
+                // Decode Latents
+                return await DecodeLatentsAsync(modelOptions, promptOptions, schedulerOptions, latents);
+            }
+        }
+
+
+        /// 
+        /// Creates the timestep tensor.
+        /// 
+        /// The latents.
+        /// The timestep.
+        /// 
+        private DenseTensor CreateTimestepTensor(DenseTensor latents, int timestep)
+        {
+            var timestepTensor = new DenseTensor(new[] { latents.Dimensions[0] });
+            timestepTensor.Fill(timestep);
+            return timestepTensor;
+        }
+
+
+        /// 
+        /// Gets the scheduler.
+        /// 
+        /// The options.
+        /// The scheduler configuration.
+        /// 
+        protected override IScheduler GetScheduler(SchedulerOptions options)
+        {
+            return options.SchedulerType switch
+            {
+                SchedulerType.InstaFlow => new InstaFlowScheduler(options),
+                _ => default
+            };
+        }
+    }
+}
diff --git a/OnnxStack.StableDiffusion/Diffusers/InstaFlow/TextDiffuser.cs b/OnnxStack.StableDiffusion/Diffusers/InstaFlow/TextDiffuser.cs
new file mode 100644
index 00000000..e4af6e15
--- /dev/null
+++ b/OnnxStack.StableDiffusion/Diffusers/InstaFlow/TextDiffuser.cs
@@ -0,0 +1,56 @@
+using Microsoft.Extensions.Logging;
+using Microsoft.ML.OnnxRuntime.Tensors;
+using OnnxStack.Core.Services;
+using OnnxStack.StableDiffusion.Common;
+using OnnxStack.StableDiffusion.Config;
+using OnnxStack.StableDiffusion.Enums;
+using System.Collections.Generic;
+using System.Threading.Tasks;
+
+namespace OnnxStack.StableDiffusion.Diffusers.InstaFlow
+{
+    public sealed class TextDiffuser : InstaFlowDiffuser
+    {
+        /// 
+        /// Initializes a new instance of the  class.
+        /// 
+        /// The configuration.
+        /// The onnx model service.
+        public TextDiffuser(IOnnxModelService onnxModelService, IPromptService promptService, ILogger logger)
+            : base(onnxModelService, promptService, logger)
+        {
+        }
+
+
+        /// 
+        /// Gets the type of the diffuser.
+        /// 
+        public override DiffuserType DiffuserType => DiffuserType.TextToImage;
+
+
+        /// 
+        /// Gets the timesteps.
+        /// 
+        /// The prompt.
+        /// The options.
+        /// The scheduler.
+        /// 
+        protected override IReadOnlyList GetTimesteps(SchedulerOptions options, IScheduler scheduler)
+        {
+            return scheduler.Timesteps;
+        }
+
+
+        /// 
+        /// Prepares the latents for inference.
+        /// 
+        /// The prompt.
+        /// The options.
+        /// The scheduler.
+        /// 
+        protected override Task> PrepareLatentsAsync(IModelOptions model, PromptOptions prompt, SchedulerOptions options, IScheduler scheduler, IReadOnlyList timesteps)
+        {
+            return Task.FromResult(scheduler.CreateRandomSample(options.GetScaledDimension(), scheduler.InitNoiseSigma));
+        }
+    }
+}
diff --git a/OnnxStack.StableDiffusion/Enums/DiffuserPipelineType.cs b/OnnxStack.StableDiffusion/Enums/DiffuserPipelineType.cs
index 4a7f204f..50d4787c 100644
--- a/OnnxStack.StableDiffusion/Enums/DiffuserPipelineType.cs
+++ b/OnnxStack.StableDiffusion/Enums/DiffuserPipelineType.cs
@@ -3,6 +3,7 @@
     public enum DiffuserPipelineType
     {
         StableDiffusion = 0,
-        LatentConsistency = 10
+        LatentConsistency = 10,
+        InstaFlow = 11,
     }
 }
diff --git a/OnnxStack.StableDiffusion/Enums/SchedulerType.cs b/OnnxStack.StableDiffusion/Enums/SchedulerType.cs
index ed9c0f29..2474103c 100644
--- a/OnnxStack.StableDiffusion/Enums/SchedulerType.cs
+++ b/OnnxStack.StableDiffusion/Enums/SchedulerType.cs
@@ -22,7 +22,10 @@ public enum SchedulerType
         [Display(Name = "KDPM2")]
         KDPM2 = 5,
 
-         [Display(Name = "LCM")]
-        LCM = 20
+        [Display(Name = "LCM")]
+        LCM = 20,
+
+        [Display(Name = "InstaFlow")]
+        InstaFlow = 21
     }
 }
diff --git a/OnnxStack.StableDiffusion/Extensions.cs b/OnnxStack.StableDiffusion/Extensions.cs
index 24d97b41..d916152f 100644
--- a/OnnxStack.StableDiffusion/Extensions.cs
+++ b/OnnxStack.StableDiffusion/Extensions.cs
@@ -1,12 +1,9 @@
 using Microsoft.ML.OnnxRuntime;
-using NumSharp;
 using OnnxStack.StableDiffusion.Config;
 using OnnxStack.StableDiffusion.Enums;
 
 using System;
 using System.Linq;
-using System.Numerics;
-using System.Threading.Tasks;
 
 namespace OnnxStack.StableDiffusion
 {
@@ -102,7 +99,15 @@ public static SchedulerType[] GetSchedulerTypes(this DiffuserPipelineType pipeli
         {
             return pipelineType switch
             {
-                DiffuserPipelineType.StableDiffusion => new[]
+                DiffuserPipelineType.InstaFlow => new[]
+                {
+                    SchedulerType.InstaFlow
+                },
+                DiffuserPipelineType.LatentConsistency => new[]
+                {
+                    SchedulerType.LCM
+                },
+                _ => new[]
                 {
                     SchedulerType.LMS,
                     SchedulerType.Euler,
@@ -110,12 +115,7 @@ public static SchedulerType[] GetSchedulerTypes(this DiffuserPipelineType pipeli
                     SchedulerType.DDPM,
                     SchedulerType.DDIM,
                     SchedulerType.KDPM2
-                },
-                DiffuserPipelineType.LatentConsistency => new[]
-                {
-                    SchedulerType.LCM
-                },
-                _ => default
+                }
             };
         }
 
diff --git a/OnnxStack.StableDiffusion/Pipelines/InstaFlowPipeline.cs b/OnnxStack.StableDiffusion/Pipelines/InstaFlowPipeline.cs
new file mode 100644
index 00000000..74ae3fbf
--- /dev/null
+++ b/OnnxStack.StableDiffusion/Pipelines/InstaFlowPipeline.cs
@@ -0,0 +1,56 @@
+using Microsoft.Extensions.Logging;
+using OnnxStack.Core;
+using OnnxStack.StableDiffusion.Common;
+using OnnxStack.StableDiffusion.Diffusers;
+using OnnxStack.StableDiffusion.Enums;
+using System.Collections.Concurrent;
+using System.Collections.Generic;
+using System.Linq;
+
+namespace OnnxStack.StableDiffusion.Pipelines
+{
+    public sealed class InstaFlowPipeline : IPipeline
+    {
+        private readonly DiffuserPipelineType _pipelineType;
+        private readonly ILogger _logger;
+        private readonly ConcurrentDictionary _diffusers;
+
+        /// 
+        /// Initializes a new instance of the  class.
+        /// 
+        /// The onnx model service.
+        /// The prompt service.
+        public InstaFlowPipeline(IEnumerable diffusers, ILogger logger)
+        {
+            _logger = logger;
+            _pipelineType = DiffuserPipelineType.InstaFlow;
+            _diffusers = diffusers
+                .Where(x => x.PipelineType == _pipelineType)
+                .ToConcurrentDictionary(k => k.DiffuserType, v => v);
+        }
+
+
+        /// 
+        /// Gets the type of the pipeline.
+        /// 
+        public DiffuserPipelineType PipelineType => _pipelineType;
+
+
+        /// 
+        /// Gets the diffusers.
+        /// 
+        public ConcurrentDictionary Diffusers => _diffusers;
+
+
+        /// 
+        /// Gets the diffuser.
+        /// 
+        /// Type of the diffuser.
+        /// 
+        public IDiffuser GetDiffuser(DiffuserType diffuserType)
+        {
+            _diffusers.TryGetValue(diffuserType, out var diffuser);
+            return diffuser;
+        }
+    }
+}
diff --git a/OnnxStack.StableDiffusion/Registration.cs b/OnnxStack.StableDiffusion/Registration.cs
index 46165289..62fe7d6b 100644
--- a/OnnxStack.StableDiffusion/Registration.cs
+++ b/OnnxStack.StableDiffusion/Registration.cs
@@ -32,6 +32,7 @@ public static void AddOnnxStackStableDiffusion(this IServiceCollection serviceCo
             //Pipelines
             serviceCollection.AddSingleton();
             serviceCollection.AddSingleton();
+            serviceCollection.AddSingleton();
 
             //StableDiffusion
             serviceCollection.AddSingleton();
@@ -43,6 +44,9 @@ public static void AddOnnxStackStableDiffusion(this IServiceCollection serviceCo
             serviceCollection.AddSingleton();
             serviceCollection.AddSingleton();
             serviceCollection.AddSingleton();
+
+            //InstaFlow
+            serviceCollection.AddSingleton();
         }
 
 
diff --git a/OnnxStack.StableDiffusion/Schedulers/InstaFlow/InstaFlowScheduler.cs b/OnnxStack.StableDiffusion/Schedulers/InstaFlow/InstaFlowScheduler.cs
new file mode 100644
index 00000000..ecb4e277
--- /dev/null
+++ b/OnnxStack.StableDiffusion/Schedulers/InstaFlow/InstaFlowScheduler.cs
@@ -0,0 +1,67 @@
+using Microsoft.ML.OnnxRuntime.Tensors;
+using OnnxStack.StableDiffusion.Common;
+using OnnxStack.StableDiffusion.Config;
+using OnnxStack.StableDiffusion.Helpers;
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+using System.Threading.Tasks;
+
+namespace OnnxStack.StableDiffusion.Schedulers.InstaFlow
+{
+    internal class InstaFlowScheduler : SchedulerBase
+    {
+        /// 
+        /// Initializes a new instance of the  class.
+        /// 
+        /// The stable diffusion options.
+        public InstaFlowScheduler() : this(new SchedulerOptions()) { }
+
+        /// 
+        /// Initializes a new instance of the  class.
+        /// 
+        /// The stable diffusion options.
+        /// The scheduler options.
+        public InstaFlowScheduler(SchedulerOptions options) : base(options) { }
+
+        protected override void Initialize()
+        {
+            SetInitNoiseSigma(1f);
+        }
+
+        protected override int[] SetTimesteps()
+        {
+            var timesteps = new List();
+            for (int i = 0; i < Options.InferenceSteps; i++)
+            {
+                double timestep = (1.0 - (double)i / Options.InferenceSteps) * 1000.0;
+                timesteps.Add(timestep);
+            }
+
+            return timesteps
+                .Select(x => (int)x)
+                .OrderByDescending(x => x)
+                .ToArray();
+        }
+
+
+
+        public override DenseTensor ScaleInput(DenseTensor sample, int timestep)
+        {
+            return sample;
+        }
+
+
+        public override SchedulerStepResult Step(DenseTensor modelOutput, int timestep, DenseTensor sample, int order = 4)
+        {
+            return new SchedulerStepResult(sample);
+        }
+
+
+        public override DenseTensor AddNoise(DenseTensor originalSamples, DenseTensor noise, IReadOnlyList timesteps)
+        {
+            return originalSamples;
+        }
+    }
+}
diff --git a/OnnxStack.StableDiffusion/Schedulers/LatentConsistency/LCMScheduler.cs b/OnnxStack.StableDiffusion/Schedulers/LatentConsistency/LCMScheduler.cs
index 35963b59..619e008d 100644
--- a/OnnxStack.StableDiffusion/Schedulers/LatentConsistency/LCMScheduler.cs
+++ b/OnnxStack.StableDiffusion/Schedulers/LatentConsistency/LCMScheduler.cs
@@ -1,5 +1,4 @@
 using Microsoft.ML.OnnxRuntime.Tensors;
-using NumSharp;
 using OnnxStack.Core;
 using OnnxStack.StableDiffusion.Config;
 using OnnxStack.StableDiffusion.Enums;
@@ -7,7 +6,6 @@
 using System;
 using System.Collections.Generic;
 using System.Linq;
-using System.Net;
 
 namespace OnnxStack.StableDiffusion.Schedulers.LatentConsistency
 {
@@ -30,12 +28,6 @@ public LCMScheduler() : this(new SchedulerOptions()) { }
         public LCMScheduler(SchedulerOptions options) : base(options) { }
 
 
-        /// 
-        /// Gets the compatible pipeline.
-        /// 
-        public override DiffuserPipelineType PipelineType => DiffuserPipelineType.LatentConsistency;
-
-
         /// 
         /// Initializes this instance.
         /// 
diff --git a/OnnxStack.StableDiffusion/Schedulers/SchedulerBase.cs b/OnnxStack.StableDiffusion/Schedulers/SchedulerBase.cs
index 8a8415b4..ab12ed10 100644
--- a/OnnxStack.StableDiffusion/Schedulers/SchedulerBase.cs
+++ b/OnnxStack.StableDiffusion/Schedulers/SchedulerBase.cs
@@ -50,11 +50,6 @@ public SchedulerBase(SchedulerOptions schedulerOptions)
         /// 
         public IReadOnlyList Timesteps => _timesteps;
 
-        /// 
-        /// Gets the compatible pipeline.
-        /// 
-        public virtual DiffuserPipelineType PipelineType => DiffuserPipelineType.StableDiffusion;
-
         /// 
         /// Scales the input.
         /// 
diff --git a/OnnxStack.UI/Models/SchedulerOptionsModel.cs b/OnnxStack.UI/Models/SchedulerOptionsModel.cs
index 136642f9..857a9ea1 100644
--- a/OnnxStack.UI/Models/SchedulerOptionsModel.cs
+++ b/OnnxStack.UI/Models/SchedulerOptionsModel.cs
@@ -80,7 +80,7 @@ public int Seed
         /// 
         /// The number of steps to run inference for. The more steps the longer it will take to run the inference loop but the image quality should improve.
         /// 
-        [Range(5, 200)]
+        [Range(1, 200)]
         public int InferenceSteps
         {
             get { return _inferenceSteps; }
diff --git a/OnnxStack.UI/UserControls/SchedulerControl.xaml b/OnnxStack.UI/UserControls/SchedulerControl.xaml
index 9dec9524..7abb3ea5 100644
--- a/OnnxStack.UI/UserControls/SchedulerControl.xaml
+++ b/OnnxStack.UI/UserControls/SchedulerControl.xaml
@@ -163,7 +163,7 @@
                                 
                                 
                             
-                            
+                            
                                 
                                     
                                 
diff --git a/OnnxStack.UI/UserControls/SchedulerControl.xaml.cs b/OnnxStack.UI/UserControls/SchedulerControl.xaml.cs
index 38f9d29e..146d933c 100644
--- a/OnnxStack.UI/UserControls/SchedulerControl.xaml.cs
+++ b/OnnxStack.UI/UserControls/SchedulerControl.xaml.cs
@@ -1,4 +1,5 @@
-using Models;
+using LibGit2Sharp;
+using Models;
 using OnnxStack.Core;
 using OnnxStack.StableDiffusion;
 using OnnxStack.StableDiffusion.Config;
@@ -144,21 +145,19 @@ private void OnModelChanged(ModelOptionsModel model)
                 SchedulerOptions.InferenceSteps = 6;
                 SchedulerOptions.GuidanceScale = 1f;
             }
+            else if (model.ModelOptions.PipelineType == DiffuserPipelineType.InstaFlow)
+            {
+                SchedulerOptions.InferenceSteps = 1;
+                SchedulerOptions.GuidanceScale = 0f;
+            }
 
 
             SchedulerTypes.Clear();
             if (model is null)
                 return;
 
-            if (model.ModelOptions.PipelineType == DiffuserPipelineType.StableDiffusion)
-            {
-                foreach (SchedulerType type in Enum.GetValues().Where(x => x != SchedulerType.LCM))
-                    SchedulerTypes.Add(type);
-            }
-            else if (model.ModelOptions.PipelineType == DiffuserPipelineType.LatentConsistency)
-            {
-                SchedulerTypes.Add(SchedulerType.LCM);
-            }
+            foreach (SchedulerType type in model.ModelOptions.PipelineType.GetSchedulerTypes())
+                SchedulerTypes.Add(type);
 
             SchedulerOptions.SchedulerType = SchedulerTypes.FirstOrDefault();
         }
diff --git a/OnnxStack.UI/appsettings.json b/OnnxStack.UI/appsettings.json
index 6ce58baa..45a30643 100644
--- a/OnnxStack.UI/appsettings.json
+++ b/OnnxStack.UI/appsettings.json
@@ -441,6 +441,39 @@
           "https://raw.githubusercontent.com/saddam213/OnnxStack/master/Assets/Templates/Deliberate-V3/Preview5.png",
           "https://raw.githubusercontent.com/saddam213/OnnxStack/master/Assets/Templates/Deliberate-V3/Preview6.png"
         ]
+      },
+      {
+        "Name": "InstaFlow 0.9B",
+        "Description": "",
+        "Author": "TheyCallMeHex",
+        "Repository": "https://huggingface.co/TheyCallMeHex/InstaFlow-0.9B-ONNX",
+        "ImageIcon": "",
+        "Status": "Active",
+        "PadTokenId": 49407,
+        "BlankTokenId": 49407,
+        "TokenizerLimit": 77,
+        "EmbeddingsLength": 768,
+        "ScaleFactor": 0.18215,
+        "PipelineType": "InstaFlow",
+        "Diffusers": [
+          "TextToImage"
+        ],
+        "ModelFiles": [
+          "https://huggingface.co/TheyCallMeHex/InstaFlow-0.9B-ONNX/resolve/main/tokenizer/model.onnx",
+          "https://huggingface.co/TheyCallMeHex/InstaFlow-0.9B-ONNX/resolve/main/unet/model.onnx",
+          "https://huggingface.co/TheyCallMeHex/InstaFlow-0.9B-ONNX/resolve/main/unet/model.onnx_data",
+          "https://huggingface.co/TheyCallMeHex/InstaFlow-0.9B-ONNX/resolve/main/text_encoder/model.onnx",
+          "https://huggingface.co/TheyCallMeHex/InstaFlow-0.9B-ONNX/resolve/main/vae_decoder/model.onnx",
+          "https://huggingface.co/TheyCallMeHex/InstaFlow-0.9B-ONNX/resolve/main/vae_encoder/model.onnx"
+        ],
+        "Images": [
+          "",
+          "",
+          "",
+          "",
+          "",
+          ""
+        ]
       }
     ]
   }