Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
28 commits
Select commit Hold shift + click to select a range
95883c2
Add Initial (non-working) Seamless Implementation
hipsterusername Aug 27, 2023
3de45af
updates
hipsterusername Aug 27, 2023
c6d0038
Revert old latent changes, update seamless
hipsterusername Aug 27, 2023
0d2e194
Fixed dict error
hipsterusername Aug 27, 2023
ea40a78
add VAE
hipsterusername Aug 27, 2023
19e0f36
Fix vae fields
hipsterusername Aug 27, 2023
5fdd255
updates per stalkers comments
hipsterusername Aug 28, 2023
1f47669
Seamless fixes
hipsterusername Aug 28, 2023
b9731cb
Merge branch 'main' into Seamless
hipsterusername Aug 28, 2023
3ef3670
chore: Black lint
blessedcoolant Aug 28, 2023
421f5b7
Seamless Updates
hipsterusername Aug 28, 2023
1ed0d7b
Merge branch 'main' into Seamless
blessedcoolant Aug 28, 2023
3efb1f6
Merge branch 'Seamless' of https://github.com/invoke-ai/InvokeAI into…
hipsterusername Aug 28, 2023
bb085c5
Move monkeypatch for diffusers/torch bug to hotfixes.py
StAlKeR7779 Aug 28, 2023
594e547
feat: Add Seamless to T2I / I2I / SDXL T2I / I2I + Refiner
blessedcoolant Aug 28, 2023
ef58635
chore: black lint
blessedcoolant Aug 28, 2023
6db19a8
fix: Connection type on Seamless Node VAE Input
blessedcoolant Aug 28, 2023
0ea6705
fix: Seamless not correctly plugged to SDXL Denoise Latents
blessedcoolant Aug 28, 2023
a08d225
fix: Incorrect node ID's for Seamless plugging
blessedcoolant Aug 28, 2023
b5dac99
feat: Add Seamless To Canvas Text To Image / Image To Image + SDXL + …
blessedcoolant Aug 28, 2023
fcb60a7
chore: Update var names that were not updated
blessedcoolant Aug 28, 2023
87bb4d8
fix: Seamless not working with SDXL on Canvas
blessedcoolant Aug 28, 2023
50a266e
feat: Add Seamless to Inpaint & Outpaint
blessedcoolant Aug 28, 2023
99475ab
chore: pyflake lint fixes
blessedcoolant Aug 28, 2023
5133825
fix: Incorrect plug in Dynamic Prompt Graph
blessedcoolant Aug 28, 2023
56ed76f
fix: useMultiSelect file named incorrectly
blessedcoolant Aug 28, 2023
aaae471
fix: SDXL Canvas Inpaint & Outpaint being broken
blessedcoolant Aug 28, 2023
5774640
fix: SDXL LoRA's not working with seamless
blessedcoolant Aug 28, 2023
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 3 additions & 2 deletions invokeai/app/invocations/latent.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@
from invokeai.backend.model_management.models import ModelType, SilenceWarnings

from ...backend.model_management.lora import ModelPatcher
from ...backend.model_management.seamless import set_seamless
from ...backend.model_management.models import BaseModelType
from ...backend.stable_diffusion import PipelineIntermediateState
from ...backend.stable_diffusion.diffusers_pipeline import (
Expand Down Expand Up @@ -456,7 +457,7 @@ def _lora_loader():
)
with ExitStack() as exit_stack, ModelPatcher.apply_lora_unet(
unet_info.context.model, _lora_loader()
), unet_info as unet:
), set_seamless(unet_info.context.model, self.unet.seamless_axes), unet_info as unet:
latents = latents.to(device=unet.device, dtype=unet.dtype)
if noise is not None:
noise = noise.to(device=unet.device, dtype=unet.dtype)
Expand Down Expand Up @@ -549,7 +550,7 @@ def invoke(self, context: InvocationContext) -> ImageOutput:
context=context,
)

with vae_info as vae:
with set_seamless(vae_info.context.model, self.vae.seamless_axes), vae_info as vae:
latents = latents.to(vae.device)
if self.fp32:
vae.to(dtype=torch.float32)
Expand Down
51 changes: 50 additions & 1 deletion invokeai/app/invocations/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,8 @@
BaseInvocation,
BaseInvocationOutput,
FieldDescriptions,
InputField,
Input,
InputField,
InvocationContext,
OutputField,
UIType,
Expand All @@ -33,6 +33,7 @@ class UNetField(BaseModel):
unet: ModelInfo = Field(description="Info to load unet submodel")
scheduler: ModelInfo = Field(description="Info to load scheduler submodel")
loras: List[LoraInfo] = Field(description="Loras to apply on model loading")
seamless_axes: List[str] = Field(default_factory=list, description='Axes("x" and "y") to which apply seamless')


class ClipField(BaseModel):
Expand All @@ -45,6 +46,7 @@ class ClipField(BaseModel):
class VaeField(BaseModel):
# TODO: better naming?
vae: ModelInfo = Field(description="Info to load vae submodel")
seamless_axes: List[str] = Field(default_factory=list, description='Axes("x" and "y") to which apply seamless')


class ModelLoaderOutput(BaseInvocationOutput):
Expand Down Expand Up @@ -388,3 +390,50 @@ def invoke(self, context: InvocationContext) -> VaeLoaderOutput:
)
)
)


class SeamlessModeOutput(BaseInvocationOutput):
"""Modified Seamless Model output"""

type: Literal["seamless_output"] = "seamless_output"

# Outputs
unet: Optional[UNetField] = OutputField(description=FieldDescriptions.unet, title="UNet")
vae: Optional[VaeField] = OutputField(description=FieldDescriptions.vae, title="VAE")


@title("Seamless")
@tags("seamless", "model")
class SeamlessModeInvocation(BaseInvocation):
"""Applies the seamless transformation to the Model UNet and VAE."""

type: Literal["seamless"] = "seamless"

# Inputs
unet: Optional[UNetField] = InputField(
default=None, description=FieldDescriptions.unet, input=Input.Connection, title="UNet"
)
vae: Optional[VaeField] = InputField(
default=None, description=FieldDescriptions.vae_model, input=Input.Connection, title="VAE"
)
seamless_y: bool = InputField(default=True, input=Input.Any, description="Specify whether Y axis is seamless")
seamless_x: bool = InputField(default=True, input=Input.Any, description="Specify whether X axis is seamless")

def invoke(self, context: InvocationContext) -> SeamlessModeOutput:
# Conditionally append 'x' and 'y' based on seamless_x and seamless_y
unet = copy.deepcopy(self.unet)
vae = copy.deepcopy(self.vae)

seamless_axes_list = []

if self.seamless_x:
seamless_axes_list.append("x")
if self.seamless_y:
seamless_axes_list.append("y")

if unet is not None:
unet.seamless_axes = seamless_axes_list
if vae is not None:
vae.seamless_axes = seamless_axes_list

return SeamlessModeOutput(unet=unet, vae=vae)
3 changes: 2 additions & 1 deletion invokeai/backend/image_util/seamless.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,8 @@ def _conv_forward_asymmetric(self, input, weight, bias):

def configure_model_padding(model, seamless, seamless_axes):
"""
Modifies the 2D convolution layers to use a circular padding mode based on the `seamless` and `seamless_axes` options.
Modifies the 2D convolution layers to use a circular padding mode based on
the `seamless` and `seamless_axes` options.
"""
# TODO: get an explicit interface for this in diffusers: https://github.com/huggingface/diffusers/issues/556
for m in model.modules():
Expand Down
62 changes: 62 additions & 0 deletions invokeai/backend/model_management/seamless.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
from __future__ import annotations

from contextlib import contextmanager
from typing import List, Union

import torch.nn as nn
from diffusers.models import AutoencoderKL, UNet2DModel


def _conv_forward_asymmetric(self, input, weight, bias):
"""
Patch for Conv2d._conv_forward that supports asymmetric padding
"""
working = nn.functional.pad(input, self.asymmetric_padding["x"], mode=self.asymmetric_padding_mode["x"])
working = nn.functional.pad(working, self.asymmetric_padding["y"], mode=self.asymmetric_padding_mode["y"])
return nn.functional.conv2d(
working,
weight,
bias,
self.stride,
nn.modules.utils._pair(0),
self.dilation,
self.groups,
)


@contextmanager
def set_seamless(model: Union[UNet2DModel, AutoencoderKL], seamless_axes: List[str]):
try:
to_restore = []

for m in model.modules():
if isinstance(m, (nn.Conv2d, nn.ConvTranspose2d)):
m.asymmetric_padding_mode = {}
m.asymmetric_padding = {}
m.asymmetric_padding_mode["x"] = "circular" if ("x" in seamless_axes) else "constant"
m.asymmetric_padding["x"] = (
m._reversed_padding_repeated_twice[0],
m._reversed_padding_repeated_twice[1],
0,
0,
)
m.asymmetric_padding_mode["y"] = "circular" if ("y" in seamless_axes) else "constant"
m.asymmetric_padding["y"] = (
0,
0,
m._reversed_padding_repeated_twice[2],
m._reversed_padding_repeated_twice[3],
)

to_restore.append((m, m._conv_forward))
m._conv_forward = _conv_forward_asymmetric.__get__(m, nn.Conv2d)

yield

finally:
for module, orig_conv_forward in to_restore:
module._conv_forward = orig_conv_forward
if hasattr(m, "asymmetric_padding_mode"):
del m.asymmetric_padding_mode
if hasattr(m, "asymmetric_padding"):
del m.asymmetric_padding
15 changes: 15 additions & 0 deletions invokeai/backend/util/hotfixes.py
Original file line number Diff line number Diff line change
Expand Up @@ -761,3 +761,18 @@ def forward(

diffusers.ControlNetModel = ControlNetModel
diffusers.models.controlnet.ControlNetModel = ControlNetModel


# patch LoRACompatibleConv to use original Conv2D forward function
# this needed to make work seamless patch
# NOTE: with this patch, torch.compile crashes on 2.0 torch(already fixed in nightly)
# https://github.com/huggingface/diffusers/pull/4315
# https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/lora.py#L96C18-L96C18
def new_LoRACompatibleConv_forward(self, x):
if self.lora_layer is None:
return super(diffusers.models.lora.LoRACompatibleConv, self).forward(x)
else:
return super(diffusers.models.lora.LoRACompatibleConv, self).forward(x) + self.lora_layer(x)


diffusers.models.lora.LoRACompatibleConv.forward = new_LoRACompatibleConv_forward
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ import {
ImageDraggableData,
TypesafeDraggableData,
} from 'features/dnd/types';
import { useMultiselect } from 'features/gallery/hooks/useMultiselect.ts';
import { useMultiselect } from 'features/gallery/hooks/useMultiselect';
import { MouseEvent, memo, useCallback, useMemo, useState } from 'react';
import { FaTrash } from 'react-icons/fa';
import { MdStar, MdStarBorder } from 'react-icons/md';
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ export const addDynamicPromptsToGraph = (
{
source: {
node_id: DYNAMIC_PROMPT,
field: 'prompt_collection',
field: 'collection',
},
destination: {
node_id: ITERATE,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,11 @@ import {
METADATA_ACCUMULATOR,
NEGATIVE_CONDITIONING,
POSITIVE_CONDITIONING,
REFINER_SEAMLESS,
SDXL_CANVAS_INPAINT_GRAPH,
SDXL_CANVAS_OUTPAINT_GRAPH,
SDXL_MODEL_LOADER,
SEAMLESS,
} from './constants';

export const addSDXLLoRAsToGraph = (
Expand All @@ -36,20 +38,25 @@ export const addSDXLLoRAsToGraph = (
| MetadataAccumulatorInvocation
| undefined;

// Handle Seamless Plugs
const unetLoaderId = modelLoaderNodeId;
let clipLoaderId = modelLoaderNodeId;
if ([SEAMLESS, REFINER_SEAMLESS].includes(modelLoaderNodeId)) {
clipLoaderId = SDXL_MODEL_LOADER;
}

if (loraCount > 0) {
// Remove modelLoaderNodeId unet/clip/clip2 connections to feed it to LoRAs
graph.edges = graph.edges.filter(
(e) =>
!(
e.source.node_id === modelLoaderNodeId &&
['unet'].includes(e.source.field)
e.source.node_id === unetLoaderId && ['unet'].includes(e.source.field)
) &&
!(
e.source.node_id === modelLoaderNodeId &&
['clip'].includes(e.source.field)
e.source.node_id === clipLoaderId && ['clip'].includes(e.source.field)
) &&
!(
e.source.node_id === modelLoaderNodeId &&
e.source.node_id === clipLoaderId &&
['clip2'].includes(e.source.field)
)
);
Expand Down Expand Up @@ -88,7 +95,7 @@ export const addSDXLLoRAsToGraph = (
// first lora = start the lora chain, attach directly to model loader
graph.edges.push({
source: {
node_id: modelLoaderNodeId,
node_id: unetLoaderId,
field: 'unet',
},
destination: {
Expand All @@ -99,7 +106,7 @@ export const addSDXLLoRAsToGraph = (

graph.edges.push({
source: {
node_id: modelLoaderNodeId,
node_id: clipLoaderId,
field: 'clip',
},
destination: {
Expand All @@ -110,7 +117,7 @@ export const addSDXLLoRAsToGraph = (

graph.edges.push({
source: {
node_id: modelLoaderNodeId,
node_id: clipLoaderId,
field: 'clip2',
},
destination: {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,11 +1,15 @@
import { RootState } from 'app/store/store';
import { MetadataAccumulatorInvocation } from 'services/api/types';
import {
MetadataAccumulatorInvocation,
SeamlessModeInvocation,
} from 'services/api/types';
import { NonNullableGraph } from '../../types/types';
import {
CANVAS_OUTPUT,
LATENTS_TO_IMAGE,
MASK_BLUR,
METADATA_ACCUMULATOR,
REFINER_SEAMLESS,
SDXL_CANVAS_IMAGE_TO_IMAGE_GRAPH,
SDXL_CANVAS_INPAINT_GRAPH,
SDXL_CANVAS_OUTPAINT_GRAPH,
Expand All @@ -21,7 +25,8 @@ import { craftSDXLStylePrompt } from './helpers/craftSDXLStylePrompt';
export const addSDXLRefinerToGraph = (
state: RootState,
graph: NonNullableGraph,
baseNodeId: string
baseNodeId: string,
modelLoaderNodeId?: string
): void => {
const {
refinerModel,
Expand All @@ -33,6 +38,8 @@ export const addSDXLRefinerToGraph = (
refinerStart,
} = state.sdxl;

const { seamlessXAxis, seamlessYAxis } = state.generation;

if (!refinerModel) {
return;
}
Expand All @@ -53,6 +60,10 @@ export const addSDXLRefinerToGraph = (
metadataAccumulator.refiner_steps = refinerSteps;
}

const modelLoaderId = modelLoaderNodeId
? modelLoaderNodeId
: SDXL_MODEL_LOADER;

// Construct Style Prompt
const { craftedPositiveStylePrompt, craftedNegativeStylePrompt } =
craftSDXLStylePrompt(state, true);
Expand All @@ -65,10 +76,7 @@ export const addSDXLRefinerToGraph = (

graph.edges = graph.edges.filter(
(e) =>
!(
e.source.node_id === SDXL_MODEL_LOADER &&
['vae'].includes(e.source.field)
)
!(e.source.node_id === modelLoaderId && ['vae'].includes(e.source.field))
);

graph.nodes[SDXL_REFINER_MODEL_LOADER] = {
Expand Down Expand Up @@ -98,8 +106,39 @@ export const addSDXLRefinerToGraph = (
denoising_end: 1,
};

graph.edges.push(
{
// Add Seamless To Refiner
if (seamlessXAxis || seamlessYAxis) {
graph.nodes[REFINER_SEAMLESS] = {
id: REFINER_SEAMLESS,
type: 'seamless',
seamless_x: seamlessXAxis,
seamless_y: seamlessYAxis,
} as SeamlessModeInvocation;

graph.edges.push(
{
source: {
node_id: SDXL_REFINER_MODEL_LOADER,
field: 'unet',
},
destination: {
node_id: REFINER_SEAMLESS,
field: 'unet',
},
},
{
source: {
node_id: REFINER_SEAMLESS,
field: 'unet',
},
destination: {
node_id: SDXL_REFINER_DENOISE_LATENTS,
field: 'unet',
},
}
);
} else {
graph.edges.push({
source: {
node_id: SDXL_REFINER_MODEL_LOADER,
field: 'unet',
Expand All @@ -108,7 +147,10 @@ export const addSDXLRefinerToGraph = (
node_id: SDXL_REFINER_DENOISE_LATENTS,
field: 'unet',
},
},
});
}

graph.edges.push(
{
source: {
node_id: SDXL_REFINER_MODEL_LOADER,
Expand Down
Loading