Skip to content

Commit

Permalink
1.0.45 (#313)
Browse files Browse the repository at this point in the history
* Reworked SAG, removed unnecessary patch
* Reworked anisotropic filters for faster compute.
* Replaced with guided anisotropic filter for less distribution.
  • Loading branch information
lllyasviel authored Sep 2, 2023
1 parent 7538b4d commit 09e0d1c
Show file tree
Hide file tree
Showing 6 changed files with 59 additions and 343 deletions.
2 changes: 1 addition & 1 deletion fooocus_version.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
version = '1.0.43'
version = '1.0.45'
15 changes: 15 additions & 0 deletions modules/anisotropic.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,6 +126,21 @@ def bilateral_blur(
return _bilateral_blur(input, None, kernel_size, sigma_color, sigma_space, border_type, color_distance_type)


def adaptive_anisotropic_filter(x, g=None):
if g is None:
g = x
s, m = torch.std_mean(g, dim=(1, 2, 3), keepdim=True)
s = s + 1e-5
guidance = (g - m) / s
y = _bilateral_blur(x, guidance,
kernel_size=(13, 13),
sigma_color=3.0,
sigma_space=3.0,
border_type='reflect',
color_distance_type='l1')
return y


def joint_bilateral_blur(
input: Tensor,
guidance: Tensor,
Expand Down
10 changes: 10 additions & 0 deletions modules/default_pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import modules.path

from comfy.model_base import SDXL, SDXLRefiner
from modules.patch import cfg_patched


xl_base: core.StableDiffusionModel = None
Expand Down Expand Up @@ -123,6 +124,15 @@ def process(positive_prompt, negative_prompt, steps, switch, width, height, imag
global positive_conditions_cache, negative_conditions_cache, \
positive_conditions_refiner_cache, negative_conditions_refiner_cache

if xl_base is not None:
xl_base.unet.model_options['sampler_cfg_function'] = cfg_patched

if xl_base_patched is not None:
xl_base_patched.unet.model_options['sampler_cfg_function'] = cfg_patched

if xl_refiner is not None:
xl_refiner.unet.model_options['sampler_cfg_function'] = cfg_patched

positive_conditions = core.encode_prompt_condition(clip=xl_base_patched.clip, prompt=positive_prompt) if positive_conditions_cache is None else positive_conditions_cache
negative_conditions = core.encode_prompt_condition(clip=xl_base_patched.clip, prompt=negative_prompt) if negative_conditions_cache is None else negative_conditions_cache

Expand Down
Loading

0 comments on commit 09e0d1c

Please sign in to comment.