diff --git a/scripts/relauncher.py b/scripts/relauncher.py index 7179d7f41..457d539c3 100644 --- a/scripts/relauncher.py +++ b/scripts/relauncher.py @@ -19,6 +19,8 @@ # Creates a public xxxxx.gradio.app share link to allow others to use your interface (requires properly forwarded ports to work correctly) share = False +# Generate tiling images +tiling = False # Enter other `--arguments` you wish to use - Must be entered as a `--argument ` syntax additional_arguments = "" @@ -37,6 +39,8 @@ common_arguments += "--optimized-turbo " if optimized == True: common_arguments += "--optimized " +if tiling == True: + common_arguments += "--tiling " if share == True: common_arguments += "--share " diff --git a/scripts/webui.py b/scripts/webui.py index d50bda391..dd40a7429 100644 --- a/scripts/webui.py +++ b/scripts/webui.py @@ -44,6 +44,7 @@ parser.add_argument("--skip-save", action='store_true', help="do not save indiviual samples. For speed measurements.", default=False) parser.add_argument('--no-job-manager', action='store_true', help="Don't use the experimental job manager on top of gradio", default=False) parser.add_argument("--max-jobs", type=int, help="Maximum number of concurrent 'generate' commands", default=1) +parser.add_argument("--tiling", action='store_true', help="Generate tiling images", default=False) opt = parser.parse_args() #Should not be needed anymore @@ -86,6 +87,18 @@ from ldm.models.diffusion.plms import PLMSSampler from ldm.util import instantiate_from_config +# add global options to models +def patch_conv(**patch): + cls = torch.nn.Conv2d + init = cls.__init__ + def __init__(self, *args, **kwargs): + return init(self, *args, **kwargs, **patch) + cls.__init__ = __init__ + +if opt.tiling: + patch_conv(padding_mode='circular') + print("patched for tiling") + try: # this silences the annoying "Some weights of the model checkpoint were not used when initializing..." message at start.