Compare commits

...

12 Commits

Author SHA1 Message Date
Lincoln Stein
ded521b019 pass regression tests when translator package not installed 2023-07-31 12:35:39 -04:00
Lincoln Stein
a3980cc756 changed declaration of dummy translate class for pytest 2023-07-31 09:04:26 -04:00
Lincoln Stein
6f15a67592 Merge branch 'main' into feat/translate 2023-07-31 08:40:17 -04:00
Lincoln Stein
a597b4bfaf add dummy ts object to pass pytests 2023-07-31 08:28:23 -04:00
Lincoln Stein
6ac4338f00 blackified 2023-07-31 08:07:36 -04:00
Alexandre Macabies
eb642653cb Add Nix Flake for development, which uses Python virtualenv. 2023-07-31 19:14:30 +10:00
Lincoln Stein
f4ead5e07f fix keyerror bug that was causing merge script to crash 2023-07-30 19:25:44 -04:00
Lincoln Stein
6d24ca7f52 3.0.1post3 (#4082)
This is a relatively stable release that corrects the urgent windows
install and model manager problems in 3.0.1. It still has two known
bugs:

1. Many inpainting models are not loading correctly.
2. The merge script is failing to start.
2023-07-30 18:03:35 -04:00
Lincoln Stein
2164da8592 blackify 2023-07-30 16:25:06 -04:00
Lincoln Stein
ba817b5648 added popup for translation service 2023-07-30 09:14:26 -04:00
Lincoln Stein
0c31eaee61 blackified 2023-07-29 21:14:18 -04:00
Lincoln Stein
e73c12cac2 add non-English language translator node 2023-07-29 20:21:57 -04:00
5 changed files with 162 additions and 2 deletions

25
flake.lock generated Normal file
View File

@@ -0,0 +1,25 @@
{
"nodes": {
"nixpkgs": {
"locked": {
"lastModified": 1690630721,
"narHash": "sha256-Y04onHyBQT4Erfr2fc82dbJTfXGYrf4V0ysLUYnPOP8=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "d2b52322f35597c62abf56de91b0236746b2a03d",
"type": "github"
},
"original": {
"id": "nixpkgs",
"type": "indirect"
}
},
"root": {
"inputs": {
"nixpkgs": "nixpkgs"
}
}
},
"root": "root",
"version": 7
}

81
flake.nix Normal file
View File

@@ -0,0 +1,81 @@
# Important note: this flake does not attempt to create a fully isolated, 'pure'
# Python environment for InvokeAI. Instead, it depends on local invocations of
# virtualenv/pip to install the required (binary) packages, most importantly the
# prebuilt binary pytorch packages with CUDA support.
# ML Python packages with CUDA support, like pytorch, are notoriously expensive
# to compile so it's purposefuly not what this flake does.
{
description = "An (impure) flake to develop on InvokeAI.";
outputs = { self, nixpkgs }:
let
system = "x86_64-linux";
pkgs = import nixpkgs {
inherit system;
config.allowUnfree = true;
};
python = pkgs.python310;
mkShell = { dir, install }:
let
setupScript = pkgs.writeScript "setup-invokai" ''
# This must be sourced using 'source', not executed.
${python}/bin/python -m venv ${dir}
${dir}/bin/python -m pip install ${install}
# ${dir}/bin/python -c 'import torch; assert(torch.cuda.is_available())'
source ${dir}/bin/activate
'';
in
pkgs.mkShell rec {
buildInputs = with pkgs; [
# Backend: graphics, CUDA.
cudaPackages.cudnn
cudaPackages.cuda_nvrtc
cudatoolkit
freeglut
glib
gperf
procps
libGL
libGLU
linuxPackages.nvidia_x11
python
stdenv.cc
stdenv.cc.cc.lib
xorg.libX11
xorg.libXext
xorg.libXi
xorg.libXmu
xorg.libXrandr
xorg.libXv
zlib
# Pre-commit hooks.
black
# Frontend.
yarn
nodejs
];
LD_LIBRARY_PATH = pkgs.lib.makeLibraryPath buildInputs;
CUDA_PATH = pkgs.cudatoolkit;
EXTRA_LDFLAGS = "-L${pkgs.linuxPackages.nvidia_x11}/lib";
shellHook = ''
if [[ -f "${dir}/bin/activate" ]]; then
source "${dir}/bin/activate"
echo "Using Python: $(which python)"
else
echo "Use 'source ${setupScript}' to set up the environment."
fi
'';
};
in
{
devShells.${system} = rec {
develop = mkShell { dir = "venv"; install = "-e '.[xformers]' --extra-index-url https://download.pytorch.org/whl/cu118"; };
default = develop;
};
};
}

View File

@@ -0,0 +1,52 @@
# Copyright (c) 2023 Lincoln D. Stein
from typing import Literal, Union, List
from pydantic import Field
from .baseinvocation import (
BaseInvocation,
BaseInvocationOutput,
InvocationContext,
InvocationConfig,
)
# from .params import StringOutput
translate_available = False
try:
import translators as ts
translate_available = True
TRANSLATORS = tuple(ts.translators_pool)
except:
TRANSLATORS = ("google", "bing")
DEFAULT_PROMPT = "" if translate_available else "To use this node, please 'pip install --upgrade translators'"
class TranslateOutput(BaseInvocationOutput):
"""Translated string output"""
type: Literal["translated_string_output"] = "translated_string_output"
prompt: str = Field(default=None, description="The translated prompt string")
class TranslateInvocation(BaseInvocation):
"""Use the translators package to translate 330 languages into English prompts"""
# fmt: off
type: Literal["translate"] = "translate"
# Inputs
text: str = Field(default=DEFAULT_PROMPT, description="Prompt in any language")
translator: Literal[TRANSLATORS] = Field(default="google", description="The translator service to use")
# fmt: on
# Schema customisation
class Config(InvocationConfig):
schema_extra = {
"ui": {"title": "Translate", "tags": ["prompt", "translate", "translator"]},
}
def invoke(self, context: InvocationContext) -> TranslateOutput:
translation: str = ts.translate_text(self.text, translator=self.translator)
return TranslateOutput(prompt=translation)

View File

@@ -174,6 +174,7 @@ INIT_FILE = Path("invokeai.yaml")
DB_FILE = Path("invokeai.db")
LEGACY_INIT_FILE = Path("invokeai.init")
class InvokeAISettings(BaseSettings):
"""
Runtime configuration settings in which default values are
@@ -273,7 +274,7 @@ class InvokeAISettings(BaseSettings):
@classmethod
def _excluded(self) -> List[str]:
# internal fields that shouldn't be exposed as command line options
return ["type", "initconf","cached_root"]
return ["type", "initconf", "cached_root"]
@classmethod
def _excluded_from_yaml(self) -> List[str]:
@@ -362,6 +363,7 @@ def _find_root() -> Path:
root = Path("~/invokeai").expanduser().resolve()
return root
class InvokeAIAppConfig(InvokeAISettings):
"""
Generate images using Stable Diffusion. Use "invokeai" to launch

View File

@@ -320,7 +320,7 @@ class mergeModelsForm(npyscreen.FormMultiPageAction):
def get_model_names(self, base_model: BaseModelType = None) -> List[str]:
model_names = [
info["name"]
info["model_name"]
for info in self.model_manager.list_models(model_type=ModelType.Main, base_model=base_model)
if info["model_format"] == "diffusers"
]