fixes to automatic updating and vae conversions

This PR makes the following minor fixes to the 2.3 branch:

1. The invokeai-update script will now recognize when the user
previously installed xformers and modifies the pip install command
so as to include xformers as an extra that needs to be updated.

2. In addition to looking for instances in which a user has entered a
VAE ckpt into the "vae" field directly, it also handles the case in
which the user entered a ckpt into the path field. These two cases
now work:

   vae: models/ldm/stable-diffusion-1/vae-ft-mse-840000-ema-pruned.ckpt

and

   vae:
      path: models/ldm/stable-diffusion-1/vae-ft-mse-840000-ema-pruned.ckpt

3. If a 32-bit checkpoint VAE is encountered and user is using half precision,
the VAE is now converted to 16 bits on the fly.
This commit is contained in:
Lincoln Stein
2023-05-21 19:11:43 -04:00
parent 0ce628b22e
commit 2e21e5b8f3
3 changed files with 25 additions and 9 deletions

View File

@@ -1,2 +1,3 @@
__version__='2.3.5.post1'
__version__='2.3.5.post2'

View File

@@ -6,6 +6,7 @@ import os
import platform
import psutil
import requests
import pkg_resources
from rich import box, print
from rich.console import Console, group
from rich.panel import Panel
@@ -72,6 +73,15 @@ def welcome(versions: dict):
)
console.line()
def get_extras():
extras = ''
try:
dist = pkg_resources.get_distribution('xformers')
extras = '[xformers]'
except pkg_resources.DistributionNotFound:
pass
return extras
def main():
versions = get_versions()
if invokeai_is_running():
@@ -94,13 +104,15 @@ def main():
elif choice=='4':
branch = Prompt.ask('Enter an InvokeAI branch name')
extras = get_extras()
print(f':crossed_fingers: Upgrading to [yellow]{tag if tag else release}[/yellow]')
if release:
cmd = f'pip install {INVOKE_AI_SRC}/{release}.zip --use-pep517 --upgrade'
cmd = f"pip install 'invokeai{extras} @ {INVOKE_AI_SRC}/{release}.zip' --use-pep517 --upgrade"
elif tag:
cmd = f'pip install {INVOKE_AI_TAG}/{tag}.zip --use-pep517 --upgrade'
cmd = f"pip install 'invokeai{extras} @ {INVOKE_AI_TAG}/{tag}.zip' --use-pep517 --upgrade"
else:
cmd = f'pip install {INVOKE_AI_BRANCH}/{branch}.zip --use-pep517 --upgrade'
cmd = f"pip install 'invokeai{extras} @ {INVOKE_AI_BRANCH}/{branch}.zip' --use-pep517 --upgrade"
print('')
print('')
if os.system(cmd)==0:

View File

@@ -1229,12 +1229,16 @@ class ModelManager(object):
return vae_path
def _load_vae(self, vae_config) -> AutoencoderKL:
using_fp16 = self.precision == "float16"
dtype = torch.float16 if using_fp16 else torch.float32
# Handle the common case of a user shoving a VAE .ckpt into
# the vae field for a diffusers. We convert it into diffusers
# format and use it.
if type(vae_config) in [str,Path]:
return self.convert_vae(vae_config)
if isinstance(vae_config,(str,Path)):
return self.convert_vae(vae_config).to(dtype=dtype)
elif isinstance(vae_config,DictConfig) and (vae_path := vae_config.get('path')):
return self.convert_vae(vae_path).to(dtype=dtype)
vae_args = {}
try:
@@ -1243,7 +1247,6 @@ class ModelManager(object):
return None
if name_or_path is None:
return None
using_fp16 = self.precision == "float16"
vae_args.update(
cache_dir=global_cache_dir("hub"),
@@ -1285,7 +1288,7 @@ class ModelManager(object):
@staticmethod
def convert_vae(vae_path: Union[Path,str])->AutoencoderKL:
print(f" | A checkpoint VAE was detected. Converting to diffusers format.")
print(" | A checkpoint VAE was detected. Converting to diffusers format.")
vae_path = Path(Globals.root,vae_path).resolve()
from .ckpt_to_diffuser import (