Skip to content

Commit 508c488

Browse files
tidy(mm): consistent class names
1 parent 32a9ad1 commit 508c488

File tree

14 files changed

+137
-139
lines changed

14 files changed

+137
-139
lines changed

invokeai/app/invocations/create_gradient_mask.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@
2121
from invokeai.app.invocations.model import UNetField, VAEField
2222
from invokeai.app.services.shared.invocation_context import InvocationContext
2323
from invokeai.backend.model_manager import LoadedModel
24-
from invokeai.backend.model_manager.config import MainConfigBase
24+
from invokeai.backend.model_manager.config import Main_Config_Base
2525
from invokeai.backend.model_manager.taxonomy import ModelVariantType
2626
from invokeai.backend.stable_diffusion.diffusers_pipeline import image_resized_to_grid_as_tensor
2727

@@ -182,7 +182,7 @@ def invoke(self, context: InvocationContext) -> GradientMaskOutput:
182182
if self.unet is not None and self.vae is not None and self.image is not None:
183183
# all three fields must be present at the same time
184184
main_model_config = context.models.get_config(self.unet.unet.key)
185-
assert isinstance(main_model_config, MainConfigBase)
185+
assert isinstance(main_model_config, Main_Config_Base)
186186
if main_model_config.variant is ModelVariantType.Inpaint:
187187
mask = dilated_mask_tensor
188188
vae_info: LoadedModel = context.models.load(self.vae.vae)

invokeai/app/invocations/flux_model_loader.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
)
1616
from invokeai.backend.flux.util import get_flux_max_seq_length
1717
from invokeai.backend.model_manager.config import (
18-
CheckpointConfigBase,
18+
Checkpoint_Config_Base,
1919
)
2020
from invokeai.backend.model_manager.taxonomy import BaseModelType, ModelType, SubModelType
2121

@@ -87,7 +87,7 @@ def invoke(self, context: InvocationContext) -> FluxModelLoaderOutput:
8787
t5_encoder = preprocess_t5_encoder_model_identifier(self.t5_encoder_model)
8888

8989
transformer_config = context.models.get_config(transformer)
90-
assert isinstance(transformer_config, CheckpointConfigBase)
90+
assert isinstance(transformer_config, Checkpoint_Config_Base)
9191

9292
return FluxModelLoaderOutput(
9393
transformer=TransformerField(transformer=transformer, loras=[]),

invokeai/app/services/model_install/model_install_default.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@
3737
from invokeai.app.services.model_records.model_records_base import ModelRecordChanges
3838
from invokeai.backend.model_manager.config import (
3939
AnyModelConfig,
40-
CheckpointConfigBase,
40+
Checkpoint_Config_Base,
4141
InvalidModelConfigException,
4242
ModelConfigFactory,
4343
)
@@ -625,7 +625,7 @@ def _register(
625625

626626
info.path = model_path.as_posix()
627627

628-
if isinstance(info, CheckpointConfigBase) and info.config_path is not None:
628+
if isinstance(info, Checkpoint_Config_Base) and info.config_path is not None:
629629
# Checkpoints have a config file needed for conversion. Same handling as the model weights - if it's in the
630630
# invoke-managed legacy config dir, we use a relative path.
631631
legacy_config_path = self.app_config.legacy_conf_path / info.config_path

invokeai/app/services/shared/invocation_context.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@
2121
from invokeai.app.util.step_callback import diffusion_step_callback
2222
from invokeai.backend.model_manager.config import (
2323
AnyModelConfig,
24-
ModelConfigBase,
24+
Config_Base,
2525
)
2626
from invokeai.backend.model_manager.load.load_base import LoadedModel, LoadedModelWithoutConfig
2727
from invokeai.backend.model_manager.taxonomy import AnyModel, BaseModelType, ModelFormat, ModelType, SubModelType
@@ -558,7 +558,7 @@ def get_absolute_path(self, config_or_path: AnyModelConfig | Path | str) -> Path
558558
The absolute path to the model.
559559
"""
560560

561-
model_path = Path(config_or_path.path) if isinstance(config_or_path, ModelConfigBase) else Path(config_or_path)
561+
model_path = Path(config_or_path.path) if isinstance(config_or_path, Config_Base) else Path(config_or_path)
562562

563563
if model_path.is_absolute():
564564
return model_path.resolve()

invokeai/backend/model_manager/__init__.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
from invokeai.backend.model_manager.config import (
44
AnyModelConfig,
55
InvalidModelConfigException,
6-
ModelConfigBase,
6+
Config_Base,
77
ModelConfigFactory,
88
)
99
from invokeai.backend.model_manager.legacy_probe import ModelProbe
@@ -30,7 +30,7 @@
3030
"ModelConfigFactory",
3131
"ModelProbe",
3232
"ModelSearch",
33-
"ModelConfigBase",
33+
"Config_Base",
3434
"AnyModel",
3535
"AnyVariant",
3636
"BaseModelType",

0 commit comments

Comments
 (0)