Skip to content

Commit d7969cb

Browse files
authored
Replace print with logging (comfyanonymous#6138)
* Replace print with logging * nit * nit * nit * nit * nit * nit
1 parent bddb026 commit d7969cb

File tree

22 files changed

+49
-45
lines changed

22 files changed

+49
-45
lines changed

Diff for: .ci/update_windows/update.py

+7-7
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ def pull(repo, remote_name='origin', branch='master'):
2828

2929
if repo.index.conflicts is not None:
3030
for conflict in repo.index.conflicts:
31-
print('Conflicts found in:', conflict[0].path)
31+
print('Conflicts found in:', conflict[0].path) # noqa: T201
3232
raise AssertionError('Conflicts, ahhhhh!!')
3333

3434
user = repo.default_signature
@@ -49,18 +49,18 @@ def pull(repo, remote_name='origin', branch='master'):
4949
repo = pygit2.Repository(repo_path)
5050
ident = pygit2.Signature('comfyui', 'comfy@ui')
5151
try:
52-
print("stashing current changes")
52+
print("stashing current changes") # noqa: T201
5353
repo.stash(ident)
5454
except KeyError:
55-
print("nothing to stash")
55+
print("nothing to stash") # noqa: T201
5656
backup_branch_name = 'backup_branch_{}'.format(datetime.today().strftime('%Y-%m-%d_%H_%M_%S'))
57-
print("creating backup branch: {}".format(backup_branch_name))
57+
print("creating backup branch: {}".format(backup_branch_name)) # noqa: T201
5858
try:
5959
repo.branches.local.create(backup_branch_name, repo.head.peel())
6060
except:
6161
pass
6262

63-
print("checking out master branch")
63+
print("checking out master branch") # noqa: T201
6464
branch = repo.lookup_branch('master')
6565
if branch is None:
6666
ref = repo.lookup_reference('refs/remotes/origin/master')
@@ -72,7 +72,7 @@ def pull(repo, remote_name='origin', branch='master'):
7272
ref = repo.lookup_reference(branch.name)
7373
repo.checkout(ref)
7474

75-
print("pulling latest changes")
75+
print("pulling latest changes") # noqa: T201
7676
pull(repo)
7777

7878
if "--stable" in sys.argv:
@@ -94,7 +94,7 @@ def latest_tag(repo):
9494
if latest_tag is not None:
9595
repo.checkout(latest_tag)
9696

97-
print("Done!")
97+
print("Done!") # noqa: T201
9898

9999
self_update = True
100100
if len(sys.argv) > 2:

Diff for: app/user_manager.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -38,8 +38,8 @@ def __init__(self):
3838
if not os.path.exists(user_directory):
3939
os.makedirs(user_directory, exist_ok=True)
4040
if not args.multi_user:
41-
print("****** User settings have been changed to be stored on the server instead of browser storage. ******")
42-
print("****** For multi-user setups add the --multi-user CLI argument to enable multiple user profiles. ******")
41+
logging.warning("****** User settings have been changed to be stored on the server instead of browser storage. ******")
42+
logging.warning("****** For multi-user setups add the --multi-user CLI argument to enable multiple user profiles. ******")
4343

4444
if args.multi_user:
4545
if os.path.isfile(self.get_users_file()):

Diff for: comfy/cldm/cldm.py

-1
Original file line numberDiff line numberDiff line change
@@ -160,7 +160,6 @@ def __init__(
160160
if isinstance(self.num_classes, int):
161161
self.label_emb = nn.Embedding(num_classes, time_embed_dim)
162162
elif self.num_classes == "continuous":
163-
print("setting up linear c_adm embedding layer")
164163
self.label_emb = nn.Linear(1, time_embed_dim)
165164
elif self.num_classes == "sequential":
166165
assert adm_in_channels is not None

Diff for: comfy/extra_samplers/uni_pc.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22

33
import torch
44
import math
5+
import logging
56

67
from tqdm.auto import trange
78

@@ -474,7 +475,7 @@ def multistep_uni_pc_update(self, x, model_prev_list, t_prev_list, t, order, **k
474475
return self.multistep_uni_pc_vary_update(x, model_prev_list, t_prev_list, t, order, **kwargs)
475476

476477
def multistep_uni_pc_vary_update(self, x, model_prev_list, t_prev_list, t, order, use_corrector=True):
477-
print(f'using unified predictor-corrector with order {order} (solver type: vary coeff)')
478+
logging.info(f'using unified predictor-corrector with order {order} (solver type: vary coeff)')
478479
ns = self.noise_schedule
479480
assert order <= len(model_prev_list)
480481

@@ -518,7 +519,6 @@ def multistep_uni_pc_vary_update(self, x, model_prev_list, t_prev_list, t, order
518519
A_p = C_inv_p
519520

520521
if use_corrector:
521-
print('using corrector')
522522
C_inv = torch.linalg.inv(C)
523523
A_c = C_inv
524524

Diff for: comfy/hooks.py

+2-1
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@
55
import torch
66
import numpy as np
77
import itertools
8+
import logging
89

910
if TYPE_CHECKING:
1011
from comfy.model_patcher import ModelPatcher, PatcherInjection
@@ -575,7 +576,7 @@ def load_hook_lora_for_models(model: 'ModelPatcher', clip: 'CLIP', lora: dict[st
575576
k1 = set(k1)
576577
for x in loaded:
577578
if (x not in k) and (x not in k1):
578-
print(f"NOT LOADED {x}")
579+
logging.warning(f"NOT LOADED {x}")
579580
return (new_modelpatcher, new_clip, hook_group)
580581

581582
def _combine_hooks_from_values(c_dict: dict[str, HookGroup], values: dict[str, HookGroup], cache: dict[tuple[HookGroup, HookGroup], HookGroup]):

Diff for: comfy/ldm/aura/mmdit.py

-1
Original file line numberDiff line numberDiff line change
@@ -381,7 +381,6 @@ def extend_pe(self, init_dim=(16, 16), target_dim=(64, 64)):
381381
pe_new = pe_as_2d.squeeze(0).permute(1, 2, 0).flatten(0, 1)
382382
self.positional_encoding.data = pe_new.unsqueeze(0).contiguous()
383383
self.h_max, self.w_max = target_dim
384-
print("PE extended to", target_dim)
385384

386385
def pe_selection_index_based_on_dim(self, h, w):
387386
h_p, w_p = h // self.patch_size, w // self.patch_size

Diff for: comfy/ldm/modules/diffusionmodules/util.py

+4-3
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@
99

1010

1111
import math
12+
import logging
1213
import torch
1314
import torch.nn as nn
1415
import numpy as np
@@ -130,7 +131,7 @@ def make_ddim_timesteps(ddim_discr_method, num_ddim_timesteps, num_ddpm_timestep
130131
# add one to get the final alpha values right (the ones from first scale to data during sampling)
131132
steps_out = ddim_timesteps + 1
132133
if verbose:
133-
print(f'Selected timesteps for ddim sampler: {steps_out}')
134+
logging.info(f'Selected timesteps for ddim sampler: {steps_out}')
134135
return steps_out
135136

136137

@@ -142,8 +143,8 @@ def make_ddim_sampling_parameters(alphacums, ddim_timesteps, eta, verbose=True):
142143
# according the the formula provided in https://arxiv.org/abs/2010.02502
143144
sigmas = eta * np.sqrt((1 - alphas_prev) / (1 - alphas) * (1 - alphas / alphas_prev))
144145
if verbose:
145-
print(f'Selected alphas for ddim sampler: a_t: {alphas}; a_(t-1): {alphas_prev}')
146-
print(f'For the chosen value of eta, which is {eta}, '
146+
logging.info(f'Selected alphas for ddim sampler: a_t: {alphas}; a_(t-1): {alphas_prev}')
147+
logging.info(f'For the chosen value of eta, which is {eta}, '
147148
f'this results in the following sigma_t schedule for ddim sampler {sigmas}')
148149
return sigmas, alphas, alphas_prev
149150

Diff for: comfy/ldm/util.py

+3-2
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
import importlib
2+
import logging
23

34
import torch
45
from torch import optim
@@ -23,7 +24,7 @@ def log_txt_as_img(wh, xc, size=10):
2324
try:
2425
draw.text((0, 0), lines, fill="black", font=font)
2526
except UnicodeEncodeError:
26-
print("Cant encode string for logging. Skipping.")
27+
logging.warning("Cant encode string for logging. Skipping.")
2728

2829
txt = np.array(txt).transpose(2, 0, 1) / 127.5 - 1.0
2930
txts.append(txt)
@@ -65,7 +66,7 @@ def mean_flat(tensor):
6566
def count_params(model, verbose=False):
6667
total_params = sum(p.numel() for p in model.parameters())
6768
if verbose:
68-
print(f"{model.__class__.__name__} has {total_params*1.e-6:.2f} M params.")
69+
logging.info(f"{model.__class__.__name__} has {total_params*1.e-6:.2f} M params.")
6970
return total_params
7071

7172

Diff for: comfy/model_base.py

-1
Original file line numberDiff line numberDiff line change
@@ -770,7 +770,6 @@ def concat_cond(self, **kwargs):
770770
mask = torch.ones_like(noise)[:, :1]
771771

772772
mask = torch.mean(mask, dim=1, keepdim=True)
773-
print(mask.shape)
774773
mask = utils.common_upscale(mask.to(device), noise.shape[-1] * 8, noise.shape[-2] * 8, "bilinear", "center")
775774
mask = mask.view(mask.shape[0], mask.shape[2] // 8, 8, mask.shape[3] // 8, 8).permute(0, 2, 4, 1, 3).reshape(mask.shape[0], -1, mask.shape[2] // 8, mask.shape[3] // 8)
776775
mask = utils.resize_to_batch_size(mask, noise.shape[0])

Diff for: comfy/model_management.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -1084,7 +1084,7 @@ def unload_all_models():
10841084

10851085

10861086
def resolve_lowvram_weight(weight, model, key): #TODO: remove
1087-
print("WARNING: The comfy.model_management.resolve_lowvram_weight function will be removed soon, please stop using it.")
1087+
logging.warning("The comfy.model_management.resolve_lowvram_weight function will be removed soon, please stop using it.")
10881088
return weight
10891089

10901090
#TODO: might be cleaner to put this somewhere else

Diff for: comfy/model_patcher.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -773,7 +773,7 @@ def current_loaded_device(self):
773773
return self.model.device
774774

775775
def calculate_weight(self, patches, weight, key, intermediate_dtype=torch.float32):
776-
print("WARNING the ModelPatcher.calculate_weight function is deprecated, please use: comfy.lora.calculate_weight instead")
776+
logging.warning("The ModelPatcher.calculate_weight function is deprecated, please use: comfy.lora.calculate_weight instead")
777777
return comfy.lora.calculate_weight(patches, weight, key, intermediate_dtype=intermediate_dtype)
778778

779779
def cleanup(self):
@@ -1029,7 +1029,7 @@ def patch_hooks(self, hooks: comfy.hooks.HookGroup):
10291029
if cached_weights is not None:
10301030
for key in cached_weights:
10311031
if key not in model_sd_keys:
1032-
print(f"WARNING cached hook could not patch. key does not exist in model: {key}")
1032+
logging.warning(f"Cached hook could not patch. Key does not exist in model: {key}")
10331033
continue
10341034
self.patch_cached_hook_weights(cached_weights=cached_weights, key=key, memory_counter=memory_counter)
10351035
else:
@@ -1039,7 +1039,7 @@ def patch_hooks(self, hooks: comfy.hooks.HookGroup):
10391039
original_weights = self.get_key_patches()
10401040
for key in relevant_patches:
10411041
if key not in model_sd_keys:
1042-
print(f"WARNING cached hook would not patch. key does not exist in model: {key}")
1042+
logging.warning(f"Cached hook would not patch. Key does not exist in model: {key}")
10431043
continue
10441044
self.patch_hook_weight_to_device(hooks=hooks, combined_patches=relevant_patches, key=key, original_weights=original_weights,
10451045
memory_counter=memory_counter)

Diff for: comfy/sd.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -940,11 +940,11 @@ def load_diffusion_model(unet_path, model_options={}):
940940
return model
941941

942942
def load_unet(unet_path, dtype=None):
943-
print("WARNING: the load_unet function has been deprecated and will be removed please switch to: load_diffusion_model")
943+
logging.warning("The load_unet function has been deprecated and will be removed please switch to: load_diffusion_model")
944944
return load_diffusion_model(unet_path, model_options={"dtype": dtype})
945945

946946
def load_unet_state_dict(sd, dtype=None):
947-
print("WARNING: the load_unet_state_dict function has been deprecated and will be removed please switch to: load_diffusion_model_state_dict")
947+
logging.warning("The load_unet_state_dict function has been deprecated and will be removed please switch to: load_diffusion_model_state_dict")
948948
return load_diffusion_model_state_dict(sd, model_options={"dtype": dtype})
949949

950950
def save_checkpoint(output_path, model, clip=None, vae=None, clip_vision=None, metadata=None, extra_keys={}):

Diff for: comfy/sd1_clip.py

+1-2
Original file line numberDiff line numberDiff line change
@@ -41,8 +41,7 @@ def encode_token_weights(self, token_weight_pairs):
4141
to_encode.append(self.gen_empty_tokens(self.special_tokens, max_token_len))
4242
else:
4343
to_encode.append(gen_empty_tokens(self.special_tokens, max_token_len))
44-
print(to_encode)
45-
44+
4645
o = self.encode(to_encode)
4746
out, pooled = o[:2]
4847

Diff for: comfy_extras/chainner_models/model_loading.py

+2-1
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
1+
import logging
12
from spandrel import ModelLoader
23

34
def load_state_dict(state_dict):
4-
print("WARNING: comfy_extras.chainner_models is deprecated and has been replaced by the spandrel library.")
5+
logging.warning("comfy_extras.chainner_models is deprecated and has been replaced by the spandrel library.")
56
return ModelLoader().load_from_state_dict(state_dict).eval()

Diff for: comfy_extras/nodes_hooks.py

+3-2
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
from __future__ import annotations
22
from typing import TYPE_CHECKING, Union
3+
import logging
34
import torch
45
from collections.abc import Iterable
56

@@ -539,7 +540,7 @@ def create_hook_keyframes(self, strength_start: float, strength_end: float, inte
539540
is_first = False
540541
prev_hook_kf.add(comfy.hooks.HookKeyframe(strength=strength, start_percent=percent, guarantee_steps=guarantee_steps))
541542
if print_keyframes:
542-
print(f"Hook Keyframe - start_percent:{percent} = {strength}")
543+
logging.info(f"Hook Keyframe - start_percent:{percent} = {strength}")
543544
return (prev_hook_kf,)
544545

545546
class CreateHookKeyframesFromFloats:
@@ -588,7 +589,7 @@ def create_hook_keyframes(self, floats_strength: Union[float, list[float]],
588589
is_first = False
589590
prev_hook_kf.add(comfy.hooks.HookKeyframe(strength=strength, start_percent=percent, guarantee_steps=guarantee_steps))
590591
if print_keyframes:
591-
print(f"Hook Keyframe - start_percent:{percent} = {strength}")
592+
logging.info(f"Hook Keyframe - start_percent:{percent} = {strength}")
592593
return (prev_hook_kf,)
593594
#------------------------------------------
594595
###########################################

Diff for: main.py

+4-4
Original file line numberDiff line numberDiff line change
@@ -63,7 +63,7 @@ def execute_script(script_path):
6363
spec.loader.exec_module(module)
6464
return True
6565
except Exception as e:
66-
print(f"Failed to execute startup-script: {script_path} / {e}")
66+
logging.error(f"Failed to execute startup-script: {script_path} / {e}")
6767
return False
6868

6969
if args.disable_all_custom_nodes:
@@ -85,14 +85,14 @@ def execute_script(script_path):
8585
success = execute_script(script_path)
8686
node_prestartup_times.append((time.perf_counter() - time_before, module_path, success))
8787
if len(node_prestartup_times) > 0:
88-
print("\nPrestartup times for custom nodes:")
88+
logging.info("\nPrestartup times for custom nodes:")
8989
for n in sorted(node_prestartup_times):
9090
if n[2]:
9191
import_message = ""
9292
else:
9393
import_message = " (PRESTARTUP FAILED)"
94-
print("{:6.1f} seconds{}:".format(n[0], import_message), n[1])
95-
print()
94+
logging.info("{:6.1f} seconds{}: {}".format(n[0], import_message, n[1]))
95+
logging.info("")
9696

9797
apply_custom_paths()
9898
execute_prestartup_script()

Diff for: new_updater.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -32,4 +32,4 @@ def update_windows_updater():
3232
except:
3333
pass
3434
shutil.copy(bat_path, dest_bat_path)
35-
print("Updated the windows standalone package updater.")
35+
print("Updated the windows standalone package updater.") # noqa: T201

Diff for: ruff.toml

+4-1
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,10 @@ lint.ignore = ["ALL"]
44
# Enable specific rules
55
lint.select = [
66
"S307", # suspicious-eval-usage
7+
"T201", # print-usage
78
# The "F" series in Ruff stands for "Pyflakes" rules, which catch various Python syntax errors and undefined names.
89
# See all rules here: https://docs.astral.sh/ruff/rules/#pyflakes-f
910
"F",
10-
]
11+
]
12+
13+
exclude = ["*.ipynb"]

Diff for: tests-unit/server/routes/internal_routes_test.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -89,9 +89,9 @@ async def test_routes_added_to_app(aiohttp_client_factory, internal_routes):
8989
client = await aiohttp_client_factory()
9090
try:
9191
resp = await client.get('/files')
92-
print(f"Response received: status {resp.status}")
92+
print(f"Response received: status {resp.status}") # noqa: T201
9393
except Exception as e:
94-
print(f"Exception occurred during GET request: {e}")
94+
print(f"Exception occurred during GET request: {e}") # noqa: T201
9595
raise
9696

9797
assert resp.status != 404, "Route /files does not exist"

Diff for: tests/conftest.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ def pytest_collection_modifyitems(items):
2828
last_items = []
2929
for test_name in LAST_TESTS:
3030
for item in items.copy():
31-
print(item.module.__name__, item)
31+
print(item.module.__name__, item) # noqa: T201
3232
if item.module.__name__ == test_name:
3333
last_items.append(item)
3434
items.remove(item)

Diff for: tests/inference/test_execution.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -134,7 +134,7 @@ def _server(self, args_pytest, request):
134134
use_lru, lru_size = request.param
135135
if use_lru:
136136
pargs += ['--cache-lru', str(lru_size)]
137-
print("Running server with args:", pargs)
137+
print("Running server with args:", pargs) # noqa: T201
138138
p = subprocess.Popen(pargs)
139139
yield
140140
p.kill()
@@ -150,8 +150,8 @@ def start_client(self, listen:str, port:int):
150150
try:
151151
comfy_client.connect(listen=listen, port=port)
152152
except ConnectionRefusedError as e:
153-
print(e)
154-
print(f"({i+1}/{n_tries}) Retrying...")
153+
print(e) # noqa: T201
154+
print(f"({i+1}/{n_tries}) Retrying...") # noqa: T201
155155
else:
156156
break
157157
return comfy_client

Diff for: tests/inference/test_inference.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -171,8 +171,8 @@ def start_client(self, listen:str, port:int):
171171
try:
172172
comfy_client.connect(listen=listen, port=port)
173173
except ConnectionRefusedError as e:
174-
print(e)
175-
print(f"({i+1}/{n_tries}) Retrying...")
174+
print(e) # noqa: T201
175+
print(f"({i+1}/{n_tries}) Retrying...") # noqa: T201
176176
else:
177177
break
178178
return comfy_client

0 commit comments

Comments
 (0)