Skip to content

Commit 8087b42

Browse files
ui: node editor misc 2 (#4306)
## What type of PR is this? (check all applicable) - [ ] Refactor - [x] Feature - [ ] Bug Fix - [ ] Optimization - [ ] Documentation Update - [ ] Community Node Submission ## Description Next batch of Node Editor changes.
2 parents 572e6b8 + 0c639bd commit 8087b42

File tree

267 files changed

+3645
-2670
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

267 files changed

+3645
-2670
lines changed

.gitignore

Lines changed: 0 additions & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -1,23 +1,8 @@
1-
# ignore default image save location and model symbolic link
21
.idea/
3-
embeddings/
4-
outputs/
5-
models/ldm/stable-diffusion-v1/model.ckpt
6-
**/restoration/codeformer/weights
7-
8-
# ignore user models config
9-
configs/models.user.yaml
10-
config/models.user.yml
11-
invokeai.init
12-
.version
13-
.last_model
142

153
# ignore the Anaconda/Miniconda installer used while building Docker image
164
anaconda.sh
175

18-
# ignore a directory which serves as a place for initial images
19-
inputs/
20-
216
# Byte-compiled / optimized / DLL files
227
__pycache__/
238
*.py[cod]
@@ -189,39 +174,17 @@ cython_debug/
189174
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
190175
#.idea/
191176

192-
src
193177
**/__pycache__/
194-
outputs
195178

196-
# Logs and associated folders
197-
# created from generated embeddings.
198-
logs
199-
testtube
200-
checkpoints
201179
# If it's a Mac
202180
.DS_Store
203181

204-
invokeai/frontend/yarn.lock
205-
invokeai/frontend/node_modules
206-
207182
# Let the frontend manage its own gitignore
208183
!invokeai/frontend/web/*
209184

210185
# Scratch folder
211186
.scratch/
212187
.vscode/
213-
gfpgan/
214-
models/ldm/stable-diffusion-v1/*.sha256
215-
216-
217-
# GFPGAN model files
218-
gfpgan/
219-
220-
# config file (will be created by installer)
221-
configs/models.yaml
222-
223-
# ignore initfile
224-
.invokeai
225188

226189
# ignore environment.yml and requirements.txt
227190
# these are links to the real files in environments-and-requirements

invokeai/app/api_app.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -122,6 +122,7 @@ def custom_openapi():
122122

123123
output_schemas = schema(output_types, ref_prefix="#/components/schemas/")
124124
for schema_key, output_schema in output_schemas["definitions"].items():
125+
output_schema["class"] = "output"
125126
openapi_schema["components"]["schemas"][schema_key] = output_schema
126127

127128
# TODO: note that we assume the schema_key here is the TYPE.__name__
@@ -130,8 +131,8 @@ def custom_openapi():
130131

131132
# Add Node Editor UI helper schemas
132133
ui_config_schemas = schema([UIConfigBase, _InputField, _OutputField], ref_prefix="#/components/schemas/")
133-
for schema_key, output_schema in ui_config_schemas["definitions"].items():
134-
openapi_schema["components"]["schemas"][schema_key] = output_schema
134+
for schema_key, ui_config_schema in ui_config_schemas["definitions"].items():
135+
openapi_schema["components"]["schemas"][schema_key] = ui_config_schema
135136

136137
# Add a reference to the output type to additionalProperties of the invoker schema
137138
for invoker in all_invocations:
@@ -140,8 +141,8 @@ def custom_openapi():
140141
output_type_title = output_type_titles[output_type.__name__]
141142
invoker_schema = openapi_schema["components"]["schemas"][invoker_name]
142143
outputs_ref = {"$ref": f"#/components/schemas/{output_type_title}"}
143-
144144
invoker_schema["output"] = outputs_ref
145+
invoker_schema["class"] = "invocation"
145146

146147
from invokeai.backend.model_management.models import get_model_config_enums
147148

invokeai/app/invocations/baseinvocation.py

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -143,6 +143,7 @@ class UIType(str, Enum):
143143
# region Misc
144144
FilePath = "FilePath"
145145
Enum = "enum"
146+
Scheduler = "Scheduler"
146147
# endregion
147148

148149

@@ -392,6 +393,13 @@ def get_all_subclasses_tuple(cls):
392393
toprocess.extend(next_subclasses)
393394
return tuple(subclasses)
394395

396+
class Config:
397+
@staticmethod
398+
def schema_extra(schema: dict[str, Any], model_class: Type[BaseModel]) -> None:
399+
if "required" not in schema or not isinstance(schema["required"], list):
400+
schema["required"] = list()
401+
schema["required"].extend(["type"])
402+
395403

396404
class RequiredConnectionException(Exception):
397405
"""Raised when an field which requires a connection did not receive a value."""
@@ -452,6 +460,9 @@ def schema_extra(schema: dict[str, Any], model_class: Type[BaseModel]) -> None:
452460
schema["title"] = uiconfig.title
453461
if uiconfig and hasattr(uiconfig, "tags"):
454462
schema["tags"] = uiconfig.tags
463+
if "required" not in schema or not isinstance(schema["required"], list):
464+
schema["required"] = list()
465+
schema["required"].extend(["type", "id"])
455466

456467
@abstractmethod
457468
def invoke(self, context: InvocationContext) -> BaseInvocationOutput:

invokeai/app/invocations/latent.py

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -115,12 +115,14 @@ class DenoiseLatentsInvocation(BaseInvocation):
115115
noise: Optional[LatentsField] = InputField(description=FieldDescriptions.noise, input=Input.Connection)
116116
steps: int = InputField(default=10, gt=0, description=FieldDescriptions.steps)
117117
cfg_scale: Union[float, List[float]] = InputField(
118-
default=7.5, ge=1, description=FieldDescriptions.cfg_scale, ui_type=UIType.Float
118+
default=7.5, ge=1, description=FieldDescriptions.cfg_scale, ui_type=UIType.Float, title="CFG Scale"
119119
)
120120
denoising_start: float = InputField(default=0.0, ge=0, le=1, description=FieldDescriptions.denoising_start)
121121
denoising_end: float = InputField(default=1.0, ge=0, le=1, description=FieldDescriptions.denoising_end)
122-
scheduler: SAMPLER_NAME_VALUES = InputField(default="euler", description=FieldDescriptions.scheduler)
123-
unet: UNetField = InputField(description=FieldDescriptions.unet, input=Input.Connection)
122+
scheduler: SAMPLER_NAME_VALUES = InputField(
123+
default="euler", description=FieldDescriptions.scheduler, ui_type=UIType.Scheduler
124+
)
125+
unet: UNetField = InputField(description=FieldDescriptions.unet, input=Input.Connection, title="UNet")
124126
control: Union[ControlField, list[ControlField]] = InputField(
125127
default=None, description=FieldDescriptions.control, input=Input.Connection
126128
)
@@ -454,7 +456,7 @@ def _lora_loader():
454456

455457

456458
@title("Latents to Image")
457-
@tags("latents", "image", "vae")
459+
@tags("latents", "image", "vae", "l2i")
458460
class LatentsToImageInvocation(BaseInvocation):
459461
"""Generates an image from latents."""
460462

@@ -642,7 +644,7 @@ def invoke(self, context: InvocationContext) -> LatentsOutput:
642644

643645

644646
@title("Image to Latents")
645-
@tags("latents", "image", "vae")
647+
@tags("latents", "image", "vae", "i2l")
646648
class ImageToLatentsInvocation(BaseInvocation):
647649
"""Encodes an image into latents."""
648650

invokeai/app/invocations/math.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ class AddInvocation(BaseInvocation):
2121
b: int = InputField(default=0, description=FieldDescriptions.num_2)
2222

2323
def invoke(self, context: InvocationContext) -> IntegerOutput:
24-
return IntegerOutput(a=self.a + self.b)
24+
return IntegerOutput(value=self.a + self.b)
2525

2626

2727
@title("Subtract Integers")
@@ -36,7 +36,7 @@ class SubtractInvocation(BaseInvocation):
3636
b: int = InputField(default=0, description=FieldDescriptions.num_2)
3737

3838
def invoke(self, context: InvocationContext) -> IntegerOutput:
39-
return IntegerOutput(a=self.a - self.b)
39+
return IntegerOutput(value=self.a - self.b)
4040

4141

4242
@title("Multiply Integers")
@@ -51,7 +51,7 @@ class MultiplyInvocation(BaseInvocation):
5151
b: int = InputField(default=0, description=FieldDescriptions.num_2)
5252

5353
def invoke(self, context: InvocationContext) -> IntegerOutput:
54-
return IntegerOutput(a=self.a * self.b)
54+
return IntegerOutput(value=self.a * self.b)
5555

5656

5757
@title("Divide Integers")
@@ -66,7 +66,7 @@ class DivideInvocation(BaseInvocation):
6666
b: int = InputField(default=0, description=FieldDescriptions.num_2)
6767

6868
def invoke(self, context: InvocationContext) -> IntegerOutput:
69-
return IntegerOutput(a=int(self.a / self.b))
69+
return IntegerOutput(value=int(self.a / self.b))
7070

7171

7272
@title("Random Integer")
@@ -81,4 +81,4 @@ class RandomIntInvocation(BaseInvocation):
8181
high: int = InputField(default=np.iinfo(np.int32).max, description="The exclusive high value")
8282

8383
def invoke(self, context: InvocationContext) -> IntegerOutput:
84-
return IntegerOutput(a=np.random.randint(self.low, self.high))
84+
return IntegerOutput(value=np.random.randint(self.low, self.high))

invokeai/app/invocations/model.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -72,7 +72,7 @@ class LoRAModelField(BaseModel):
7272
base_model: BaseModelType = Field(description="Base model")
7373

7474

75-
@title("Main Model Loader")
75+
@title("Main Model")
7676
@tags("model")
7777
class MainModelLoaderInvocation(BaseInvocation):
7878
"""Loads a main model, outputting its submodels."""
@@ -179,7 +179,7 @@ class LoraLoaderOutput(BaseInvocationOutput):
179179
# fmt: on
180180

181181

182-
@title("LoRA Loader")
182+
@title("LoRA")
183183
@tags("lora", "model")
184184
class LoraLoaderInvocation(BaseInvocation):
185185
"""Apply selected lora to unet and text_encoder."""
@@ -257,7 +257,7 @@ class SDXLLoraLoaderOutput(BaseInvocationOutput):
257257
# fmt: on
258258

259259

260-
@title("SDXL LoRA Loader")
260+
@title("SDXL LoRA")
261261
@tags("sdxl", "lora", "model")
262262
class SDXLLoraLoaderInvocation(BaseInvocation):
263263
"""Apply selected lora to unet and text_encoder."""
@@ -356,7 +356,7 @@ class VaeLoaderOutput(BaseInvocationOutput):
356356
vae: VaeField = OutputField(description=FieldDescriptions.vae, title="VAE")
357357

358358

359-
@title("VAE Loader")
359+
@title("VAE")
360360
@tags("vae", "model")
361361
class VaeLoaderInvocation(BaseInvocation):
362362
"""Loads a VAE model, outputting a VaeLoaderOutput"""

invokeai/app/invocations/onnx.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -169,7 +169,7 @@ class ONNXTextToLatentsInvocation(BaseInvocation):
169169
ui_type=UIType.Float,
170170
)
171171
scheduler: SAMPLER_NAME_VALUES = InputField(
172-
default="euler", description=FieldDescriptions.scheduler, input=Input.Direct
172+
default="euler", description=FieldDescriptions.scheduler, input=Input.Direct, ui_type=UIType.Scheduler
173173
)
174174
precision: PRECISION_VALUES = InputField(default="tensor(float16)", description=FieldDescriptions.precision)
175175
unet: UNetField = InputField(
@@ -406,7 +406,7 @@ class OnnxModelField(BaseModel):
406406
model_type: ModelType = Field(description="Model Type")
407407

408408

409-
@title("ONNX Model Loader")
409+
@title("ONNX Main Model")
410410
@tags("onnx", "model")
411411
class OnnxModelLoaderInvocation(BaseInvocation):
412412
"""Loads a main model, outputting its submodels."""

0 commit comments

Comments
 (0)