nousr / koi

A plug-in for Krita that enables the use of AI models for img2img generation.
MIT License
445 stars 36 forks source link

Error using colab. Image unable to be transferred back to krita with 'BadZipFile' error. #51

Open sam-greenwood opened 2 years ago

sam-greenwood commented 2 years ago

Tried running the example notebook to use colab for the GPU compute. Setup of the server works just fine, opened fresh install of krita, pasted in the address for the server and clicked 'dream' with the default mountain landscape prompt. An error on colab and krita was produced (see below).

It seems that the inference runs fine so the stable diffusion code and connection to krita seems to work. The error seems to occur when the generated image is passed back to krita. Running on krita version 5.1.1 (AppImage) and my OS is Fedora 36 with linux kernel 5.19. The colab notebook is an unmodified copy of that included in the koi repo (https://github.com/nousr/koi/blob/main/koi_colab_backend.ipynb).

I note that in the krita error message it is using the miniconda python installed on my system, could that be an issue? Any help appreciated!

Colab error message:

100%
9/9 [00:06<00:00, 2.94it/s]

ERROR:__main__:Exception on /api/img2img [POST]
Traceback (most recent call last):
  File "/usr/local/lib/python3.7/dist-packages/flask/app.py", line 2525, in wsgi_app
    response = self.full_dispatch_request()
  File "/usr/local/lib/python3.7/dist-packages/flask/app.py", line 1822, in full_dispatch_request
    rv = self.handle_user_exception(e)
  File "/usr/local/lib/python3.7/dist-packages/flask/app.py", line 1820, in full_dispatch_request
    rv = self.dispatch_request()
  File "/usr/local/lib/python3.7/dist-packages/flask/app.py", line 1796, in dispatch_request
    return self.ensure_sync(self.view_functions[rule.endpoint])(**view_args)
  File "<ipython-input-1-afcf33d6615b>", line 82, in img2img
    )["sample"][0]
  File "/usr/local/lib/python3.7/dist-packages/diffusers/utils/outputs.py", line 88, in __getitem__
    return inner_dict[k]
KeyError: 'sample'
INFO:werkzeug:127.0.0.1 - - [21/Oct/2022 13:08:02] "POST /api/img2img HTTP/1.1" 500 -

And then this error on krita:

BadZipFile
Python 3.8.1: /home/$USER/miniconda3/bin/python3
Fri Oct 21 14:08:02 2022

A problem occurred in a Python script.  Here is the sequence of
function calls leading up to the error, in the order they occurred.

 /home/$USER/.local/share/krita/pykrita/koi/koi.py in pingServer(self=<koi.koi.Koi object>)
  157         # wait for response and read image
  158         with request.urlopen(response_url, timeout=self._get_timeout()) as response:
  159             archive = ZipFile(BytesIO(response.read()))
  160             filenames = archive.namelist()
  161             for name in filenames:
archive undefined
global ZipFile = <class 'zipfile.ZipFile'>
global BytesIO = <class '_io.BytesIO'>
response = <http.client.HTTPResponse object>
response.read = <bound method HTTPResponse.read of <http.client.HTTPResponse object>>

 /tmp/.mount_krita-sL66r9/usr/lib/python3.8/zipfile.py in __init__(self=<zipfile.ZipFile [closed]>, file=<_io.BytesIO object>, mode='r', compression=0, allowZip64=True, compresslevel=None, strict_timestamps=True)
 1265         try:
 1266             if mode == 'r':
 1267                 self._RealGetContents()
 1268             elif mode in ('w', 'x'):
 1269                 # set the modified flag so central directory gets written
self = <zipfile.ZipFile [closed]>
self._RealGetContents = <bound method ZipFile._RealGetContents of <zipfile.ZipFile [closed]>>

 /tmp/.mount_krita-sL66r9/usr/lib/python3.8/zipfile.py in _RealGetContents(self=<zipfile.ZipFile [closed]>)
 1332             raise BadZipFile("File is not a zip file")
 1333         if not endrec:
 1334             raise BadZipFile("File is not a zip file")
 1335         if self.debug > 1:
 1336             print(endrec)
global BadZipFile = <class 'zipfile.BadZipFile'>
BadZipFile: File is not a zip file
    __cause__ = None
    __class__ = <class 'zipfile.BadZipFile'>
    __context__ = None
    __delattr__ = <method-wrapper '__delattr__' of BadZipFile object>
    __dict__ = {}
    __dir__ = <built-in method __dir__ of BadZipFile object>
    __doc__ = None
    __eq__ = <method-wrapper '__eq__' of BadZipFile object>
    __format__ = <built-in method __format__ of BadZipFile object>
    __ge__ = <method-wrapper '__ge__' of BadZipFile object>
    __getattribute__ = <method-wrapper '__getattribute__' of BadZipFile object>
    __gt__ = <method-wrapper '__gt__' of BadZipFile object>
    __hash__ = <method-wrapper '__hash__' of BadZipFile object>
    __init__ = <method-wrapper '__init__' of BadZipFile object>
    __init_subclass__ = <built-in method __init_subclass__ of type object>
    __le__ = <method-wrapper '__le__' of BadZipFile object>
    __lt__ = <method-wrapper '__lt__' of BadZipFile object>
    __module__ = 'zipfile'
    __ne__ = <method-wrapper '__ne__' of BadZipFile object>
    __new__ = <built-in method __new__ of type object>
    __reduce__ = <built-in method __reduce__ of BadZipFile object>
    __reduce_ex__ = <built-in method __reduce_ex__ of BadZipFile object>
    __repr__ = <method-wrapper '__repr__' of BadZipFile object>
    __setattr__ = <method-wrapper '__setattr__' of BadZipFile object>
    __setstate__ = <built-in method __setstate__ of BadZipFile object>
    __sizeof__ = <built-in method __sizeof__ of BadZipFile object>
    __str__ = <method-wrapper '__str__' of BadZipFile object>
    __subclasshook__ = <built-in method __subclasshook__ of type object>
    __suppress_context__ = False
    __traceback__ = <traceback object>
    __weakref__ = None
    args = ('File is not a zip file',)
    with_traceback = <built-in method with_traceback of BadZipFile object>

The above is a description of an error in a Python program.  Here is
the original traceback:

Traceback (most recent call last):
  File "/home/$USER/.local/share/krita/pykrita/koi/koi.py", line 159, in pingServer
    archive = ZipFile(BytesIO(response.read()))
  File "/tmp/.mount_krita-sL66r9/usr/lib/python3.8/zipfile.py", line 1267, in __init__
    self._RealGetContents()
  File "/tmp/.mount_krita-sL66r9/usr/lib/python3.8/zipfile.py", line 1334, in _RealGetContents
    raise BadZipFile("File is not a zip file")
zipfile.BadZipFile: File is not a zip file
LoganFairbairn commented 2 years ago

I'm also hitting this same error, haven't found a solution for it yet.

nousr commented 2 years ago

make sure you are copying the correct endpoint url and are adding something like "/api/img2img"

Massi361 commented 2 years ago

I am also having the same problem

Masashi3777 commented 2 years ago

Same issue for me...

sam-greenwood commented 1 year ago

make sure you are copying the correct endpoint url and are adding something like "/api/img2img"

I have double checked and I have been doing this. Sorry I can't shed any more light on what might be causing the issue.

qqmok commented 1 year ago

Hi, don't know if this issue is still relevant, but I had the same problem and managed to solve it. The BadZipError occurs because there is an error in the api backend, hence no "valid" zip file was passed back to krita, but an error message. In the ipynb backend (https://github.com/nousr/koi/blob/main/koi_colab_backend.ipynb?short_path=8eefaeb#L223) while using the pipe, it tries to get image from the key ["sample"] wich doesn't exist in the output of the pipe. According to https://huggingface.co/CompVis/stable-diffusion-v1-4 , pipe(...).images[0] returns the correct output image. Changing the code respectively should solve the issue.

nousr commented 1 year ago

Hi, don't know if this issue is still relevant, but I had the same problem and managed to solve it. The BadZipError occurs because there is an error in the api backend, hence no "valid" zip file was passed back to krita, but an error message. In the ipynb backend (https://github.com/nousr/koi/blob/main/koi_colab_backend.ipynb?short_path=8eefaeb#L223) while using the pipe, it tries to get image from the key ["sample"] wich doesn't exist in the output of the pipe. According to https://huggingface.co/CompVis/stable-diffusion-v1-4 , pipe(...).images[0] returns the correct output image. Changing the code respectively should solve the issue.

hey! do you think you could open a PR to fix this? @qqmok would be a great help :)

reyvicio commented 1 year ago

im am pretty noob in this things, what did you say is that i have to change

import torch from flask import Flask, Response, request, send_file from PIL import Image from io import BytesIO from torch import autocast from diffusers import StableDiffusionImg2ImgPipeline from click import secho from zipfile import ZipFile

the following line is specific to remote environments (like google colab)

from flask_ngrok import run_with_ngrok

Load the model for use (this may take a minute or two...or three)

secho("Loading Model...", fg="yellow")

pipe = StableDiffusionImg2ImgPipeline.from_pretrained( "CompVis/stable-diffusion-v1-4", use_auth_token=True, revision="fp16", torch_dtype=torch.float16, ).to("cuda")

secho("Finished!", fg="green")

Start setting up flask

app = Flask(name)

Define a function to help us "control the randomness"

def seed_everything(seed: int): import random, os

random.seed(seed)
os.environ['PYTHONHASHSEED'] = str(seed)
torch.manual_seed(seed)
torch.cuda.manual_seed(seed)
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.benchmark = True

def get_name(prompt, seed): return f'{prompt}-{seed}'

Define one endpoint "/api/img2img" for us to communicate with

@app.route("/api/img2img", methods=["POST"]) def img2img(): global pipe

r = request
headers = r.headers

data = r.data
buff = BytesIO(data)
img = Image.open(buff).convert("RGB")

seed = int(headers["seed"])
prompt = headers['prompt']

print(r.headers)

zip_stream = BytesIO()
with ZipFile(zip_stream, 'w') as zf:

    for index in range(int(headers['variations'])):
        variation_seed = seed + index
        seed_everything(variation_seed)

        with autocast("cuda"):
            return_image = pipe(
                init_image=img,
                prompt=prompt,
                strength=float(headers["sketch_strength"]),
                guidance_scale=float(headers["prompt_strength"]),
                num_inference_steps=int(headers["steps"]),
            )

          ********  ["sample"][0]  ********** This thing for
         ********pipe(...).images[0]******** this thing

        return_bytes = BytesIO()
        return_image.save(return_bytes, format="JPEG")

        return_bytes.seek(0)
        zf.writestr(get_name(prompt, variation_seed), return_bytes.read())

zip_stream.seek(0)

return send_file(zip_stream, mimetype="application/zip")

run_with_ngrok(app) app.run()

it didnt work for me but didnt know why

werneroi commented 1 year ago

Hello. I am new to Krita and Koi. just installed using colab, and getting this badZipFIle error. is this thread still active? is there a fix for this? Any way to help? much appreciated in advance :)

brunolm commented 1 year ago

Same error, but that's just what you see in the "frontend". The actual error I think is this on the server:

ERROR:__main__:Exception on /api/img2img [POST]
Traceback (most recent call last):
  File "/usr/local/lib/python3.10/dist-packages/flask/app.py", line 2529, in wsgi_app
    response = self.full_dispatch_request()
  File "/usr/local/lib/python3.10/dist-packages/flask/app.py", line 1825, in full_dispatch_request
    rv = self.handle_user_exception(e)
  File "/usr/local/lib/python3.10/dist-packages/flask/app.py", line 1823, in full_dispatch_request
    rv = self.dispatch_request()
  File "/usr/local/lib/python3.10/dist-packages/flask/app.py", line 1799, in dispatch_request
    return self.ensure_sync(self.view_functions[rule.endpoint])(**view_args)
  File "<ipython-input-4-c3399f45cf3d>", line 75, in img2img
    strength=float(headers["sketch_strength"]),
  File "/usr/local/lib/python3.10/dist-packages/werkzeug/datastructures/headers.py", line 493, in __getitem__
    return self.environ[f"HTTP_{key}"]
KeyError: 'HTTP_SKETCH_STRENGTH'

The code causing the issue is this:

            with autocast("cuda"):
                return_image = pipe(
                    init_image=img,
                    prompt=prompt,
                    strength=float(headers["sketch_strength"]),
                    guidance_scale=float(headers["prompt_strength"]),
                    num_inference_steps=int(headers["steps"]),
                ).images[0]

I don't know how to fix it.

Same issue:

brunolm commented 1 year ago

Bing helped me change the script a little bit to make it work:

import torch
from flask import Flask, Response, request, send_file
from PIL import Image
from io import BytesIO
from torch import autocast
from diffusers import StableDiffusionImg2ImgPipeline
from click import secho
from zipfile import ZipFile

# the following line is specific to remote environments (like google colab)
from flask_ngrok import run_with_ngrok

# Load the model for use (this may take a minute or two...or three)
secho("Loading Model...", fg="yellow")

pipe = StableDiffusionImg2ImgPipeline.from_pretrained(
    "CompVis/stable-diffusion-v1-4", 
    use_auth_token=True,
    revision="fp16",
    torch_dtype=torch.float16,
    safety_checker=None,
    requires_safety_checker=False,
).to("cuda")

secho("Finished!", fg="green")

# Start setting up flask

app = Flask(__name__)

# Define a function to help us "control the randomness"

def seed_everything(seed: int):
    import random, os

    random.seed(seed)
    os.environ['PYTHONHASHSEED'] = str(seed)
    torch.manual_seed(seed)
    torch.cuda.manual_seed(seed)
    torch.backends.cudnn.deterministic = True
    torch.backends.cudnn.benchmark = True

def get_name(prompt, seed):
  return f'{prompt}-{seed}'

# Define one endpoint "/api/img2img" for us to communicate with
@app.route("/api/img2img", methods=["POST"])
def img2img():
    global pipe

    r = request
    headers = r.headers

    data = r.data
    buff = BytesIO(data)
    img = Image.open(buff).convert("RGB")

    seed = int(headers.get("seed", 1337))
    prompt = headers.get('prompt', 'error message')

    print(r.headers)

    zip_stream = BytesIO()
    with ZipFile(zip_stream, 'w') as zf:

        for index in range(int(headers.get('variations', 32))):
            variation_seed = seed + index
            seed_everything(variation_seed)

            secho("Loading image...", fg="yellow")
            with autocast("cuda"):
                return_image = pipe(
                    image=img,
                    prompt=prompt,
                    strength=float(headers.get("sketch_strength", 0.4)),
                    guidance_scale=float(headers.get("prompt_strength", 7.5)),
                    num_inference_steps=int(headers.get("steps", 32)),
                ).images[0]

            secho("Got Image!", fg="green")

            return_bytes = BytesIO()
            return_image.save(return_bytes, format="JPEG")

            return_bytes.seek(0)
            zf.writestr(get_name(prompt, variation_seed), return_bytes.read())

    zip_stream.seek(0)

    return send_file(zip_stream, mimetype="application/zip")

run_with_ngrok(app)
app.run()