Didn't change anything, just launched and pressed Generate Image
```
Code block collapsed, click to show
```
```bat
Henlooo
python index.py --no-use_tuned --import_mlir --device_allocator=caching
(shark.venv) G:\Downloads\Shark\SHARK\apps\stable_diffusion\web>python index.py
shark_tank local cache is located at C:\Users\Acrivec\.local/shark_tank/ . You may change this by setting the --local_tank_cache= flag
Clearing .mlir temporary files from a prior run. This may take some time...
Clearing .mlir temporary files took 0.0000 seconds.
gradio temporary image cache located at G:\Downloads\Shark\SHARK\apps\stable_diffusion\web\shark_tmp/gradio. You may change this by setting the GRADIO_TEMP_DIR environment variable.
No temporary images files to clear.
G:\Downloads\Shark\SHARK\shark.venv\Lib\site-packages\gradio\components\dropdown.py:163: UserWarning: The value passed into gr.Dropdown() is not in the list of choices. Please update the list of choices to include: EulerAncestralDiscrete or set allow_custom_value=True.
warnings.warn(
vulkan devices are available.
metal devices are not available.
cuda devices are not available.
rocm devices are available.
local-sync devices are available.
local-task devices are available.
G:\Downloads\Shark\SHARK\shark.venv\Lib\site-packages\gradio\components\dropdown.py:163: UserWarning: The value passed into gr.Dropdown() is not in the list of choices. Please update the list of choices to include: SharkEulerDiscrete or set allow_custom_value=True.
warnings.warn(
{'cpu': ['Intel(R) Core(TM) i7-6700K CPU @ 4.00GHz => cpu-task'], 'cuda': [], 'vulkan': ['AMD Radeon RX 7900 XTX => vulkan://0'], 'rocm': ['AMD Radeon RX 7900 XTX => rocm']}
Running on local URL: http://0.0.0.0:8080
To create a public link, set `share=True` in `launch()`.
shark_tank local cache is located at C:\Users\Acrivec\.local/shark_tank/ . You may change this by setting the --local_tank_cache= flag
Found device AMD Radeon RX 7900 XTX. Using target triple rdna3-7900-windows.
Tuned models are currently not supported for this setting.
Traceback (most recent call last):
File "G:\Downloads\Shark\SHARK\shark.venv\Lib\site-packages\gradio\queueing.py", line 456, in call_prediction
output = await route_utils.call_process_api(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "G:\Downloads\Shark\SHARK\shark.venv\Lib\site-packages\gradio\route_utils.py", line 232, in call_process_api
output = await app.get_blocks().process_api(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "G:\Downloads\Shark\SHARK\shark.venv\Lib\site-packages\gradio\blocks.py", line 1522, in process_api
result = await self.call_function(
^^^^^^^^^^^^^^^^^^^^^^^^^
File "G:\Downloads\Shark\SHARK\shark.venv\Lib\site-packages\gradio\blocks.py", line 1156, in call_function
prediction = await utils.async_iteration(iterator)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "G:\Downloads\Shark\SHARK\shark.venv\Lib\site-packages\gradio\utils.py", line 515, in async_iteration
return await iterator.__anext__()
^^^^^^^^^^^^^^^^^^^^^^^^^^
File "G:\Downloads\Shark\SHARK\shark.venv\Lib\site-packages\gradio\utils.py", line 508, in __anext__
return await anyio.to_thread.run_sync(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "G:\Downloads\Shark\SHARK\shark.venv\Lib\site-packages\anyio\to_thread.py", line 33, in run_sync
return await get_asynclib().run_sync_in_worker_thread(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "G:\Downloads\Shark\SHARK\shark.venv\Lib\site-packages\anyio\_backends\_asyncio.py", line 877, in run_sync_in_worker_thread
return await future
^^^^^^^^^^^^
File "G:\Downloads\Shark\SHARK\shark.venv\Lib\site-packages\anyio\_backends\_asyncio.py", line 807, in run
result = context.run(func, *args)
^^^^^^^^^^^^^^^^^^^^^^^^
File "G:\Downloads\Shark\SHARK\shark.venv\Lib\site-packages\gradio\utils.py", line 491, in run_sync_iterator_async
return next(iterator)
^^^^^^^^^^^^^^
File "G:\Downloads\Shark\SHARK\shark.venv\Lib\site-packages\gradio\utils.py", line 662, in gen_wrapper
yield from f(*args, **kwargs)
File "G:\Downloads\Shark\SHARK\apps\stable_diffusion\web\ui\txt2img_ui.py", line 161, in txt2img_inf
global_obj.set_schedulers(get_schedulers(model_id))
^^^^^^^^^^^^^^^^^^^^^^^^
File "G:\Downloads\Shark\SHARK\apps\stable_diffusion\src\schedulers\sd_schedulers.py", line 100, in get_schedulers
] = SharkEulerDiscreteScheduler.from_pretrained(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "G:\Downloads\Shark\SHARK\shark.venv\Lib\site-packages\diffusers\schedulers\scheduling_utils.py", line 147, in from_pretrained
return cls.from_config(config, return_unused_kwargs=return_unused_kwargs, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "G:\Downloads\Shark\SHARK\shark.venv\Lib\site-packages\diffusers\configuration_utils.py", line 254, in from_config
model = cls(**init_dict)
^^^^^^^^^^^^^^^^
File "G:\Downloads\Shark\SHARK\shark.venv\Lib\site-packages\diffusers\configuration_utils.py", line 644, in inner_init init(self, *args, **init_kwargs)
File "G:\Downloads\Shark\SHARK\apps\stable_diffusion\src\schedulers\shark_eulerdiscrete.py", line 35, in __init__
super().__init__(
File "G:\Downloads\Shark\SHARK\shark.venv\Lib\site-packages\diffusers\configuration_utils.py", line 644, in inner_init init(self, *args, **init_kwargs)
TypeError: EulerDiscreteScheduler.__init__() takes from 1 to 11 positional arguments but 14 were given
Traceback (most recent call last):
File "G:\Downloads\Shark\SHARK\shark.venv\Lib\site-packages\gradio\queueing.py", line 456, in call_prediction
output = await route_utils.call_process_api(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "G:\Downloads\Shark\SHARK\shark.venv\Lib\site-packages\gradio\route_utils.py", line 232, in call_process_api
output = await app.get_blocks().process_api(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "G:\Downloads\Shark\SHARK\shark.venv\Lib\site-packages\gradio\blocks.py", line 1522, in process_api
result = await self.call_function(
^^^^^^^^^^^^^^^^^^^^^^^^^
File "G:\Downloads\Shark\SHARK\shark.venv\Lib\site-packages\gradio\blocks.py", line 1156, in call_function
prediction = await utils.async_iteration(iterator)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "G:\Downloads\Shark\SHARK\shark.venv\Lib\site-packages\gradio\utils.py", line 515, in async_iteration
return await iterator.__anext__()
^^^^^^^^^^^^^^^^^^^^^^^^^^
File "G:\Downloads\Shark\SHARK\shark.venv\Lib\site-packages\gradio\utils.py", line 508, in __anext__
return await anyio.to_thread.run_sync(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "G:\Downloads\Shark\SHARK\shark.venv\Lib\site-packages\anyio\to_thread.py", line 33, in run_sync
return await get_asynclib().run_sync_in_worker_thread(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "G:\Downloads\Shark\SHARK\shark.venv\Lib\site-packages\anyio\_backends\_asyncio.py", line 877, in run_sync_in_worker_thread
return await future
^^^^^^^^^^^^
File "G:\Downloads\Shark\SHARK\shark.venv\Lib\site-packages\anyio\_backends\_asyncio.py", line 807, in run
result = context.run(func, *args)
^^^^^^^^^^^^^^^^^^^^^^^^
File "G:\Downloads\Shark\SHARK\shark.venv\Lib\site-packages\gradio\utils.py", line 491, in run_sync_iterator_async
return next(iterator)
^^^^^^^^^^^^^^
File "G:\Downloads\Shark\SHARK\shark.venv\Lib\site-packages\gradio\utils.py", line 662, in gen_wrapper
yield from f(*args, **kwargs)
File "G:\Downloads\Shark\SHARK\apps\stable_diffusion\web\ui\txt2img_ui.py", line 161, in txt2img_inf
global_obj.set_schedulers(get_schedulers(model_id))
^^^^^^^^^^^^^^^^^^^^^^^^
File "G:\Downloads\Shark\SHARK\apps\stable_diffusion\src\schedulers\sd_schedulers.py", line 100, in get_schedulers
] = SharkEulerDiscreteScheduler.from_pretrained(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "G:\Downloads\Shark\SHARK\shark.venv\Lib\site-packages\diffusers\schedulers\scheduling_utils.py", line 147, in from_pretrained
return cls.from_config(config, return_unused_kwargs=return_unused_kwargs, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "G:\Downloads\Shark\SHARK\shark.venv\Lib\site-packages\diffusers\configuration_utils.py", line 254, in from_config
model = cls(**init_dict)
^^^^^^^^^^^^^^^^
File "G:\Downloads\Shark\SHARK\shark.venv\Lib\site-packages\diffusers\configuration_utils.py", line 644, in inner_init init(self, *args, **init_kwargs)
File "G:\Downloads\Shark\SHARK\apps\stable_diffusion\src\schedulers\shark_eulerdiscrete.py", line 35, in __init__
super().__init__(
File "G:\Downloads\Shark\SHARK\shark.venv\Lib\site-packages\diffusers\configuration_utils.py", line 644, in inner_init init(self, *args, **init_kwargs)
TypeError: EulerDiscreteScheduler.__init__() takes from 1 to 11 positional arguments but 14 were given
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "G:\Downloads\Shark\SHARK\shark.venv\Lib\site-packages\gradio\queueing.py", line 501, in process_events
response = await self.call_prediction(awake_events, batch)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "G:\Downloads\Shark\SHARK\shark.venv\Lib\site-packages\gradio\queueing.py", line 465, in call_prediction
raise Exception(str(error) if show_error else None) from error
Exception: None
```
Interestingly, changing to DDIM still produces same error
TypeError: EulerDiscreteScheduler.__init__() takes from 1 to 11 positional arguments but 14 were given
Didn't change anything, just launched and pressed Generate Image
``` Code block collapsed, click to show ```
```bat Henlooo python index.py --no-use_tuned --import_mlir --device_allocator=caching (shark.venv) G:\Downloads\Shark\SHARK\apps\stable_diffusion\web>python index.py shark_tank local cache is located at C:\Users\Acrivec\.local/shark_tank/ . You may change this by setting the --local_tank_cache= flag Clearing .mlir temporary files from a prior run. This may take some time... Clearing .mlir temporary files took 0.0000 seconds. gradio temporary image cache located at G:\Downloads\Shark\SHARK\apps\stable_diffusion\web\shark_tmp/gradio. You may change this by setting the GRADIO_TEMP_DIR environment variable. No temporary images files to clear. G:\Downloads\Shark\SHARK\shark.venv\Lib\site-packages\gradio\components\dropdown.py:163: UserWarning: The value passed into gr.Dropdown() is not in the list of choices. Please update the list of choices to include: EulerAncestralDiscrete or set allow_custom_value=True. warnings.warn( vulkan devices are available. metal devices are not available. cuda devices are not available. rocm devices are available. local-sync devices are available. local-task devices are available. G:\Downloads\Shark\SHARK\shark.venv\Lib\site-packages\gradio\components\dropdown.py:163: UserWarning: The value passed into gr.Dropdown() is not in the list of choices. Please update the list of choices to include: SharkEulerDiscrete or set allow_custom_value=True. warnings.warn( {'cpu': ['Intel(R) Core(TM) i7-6700K CPU @ 4.00GHz => cpu-task'], 'cuda': [], 'vulkan': ['AMD Radeon RX 7900 XTX => vulkan://0'], 'rocm': ['AMD Radeon RX 7900 XTX => rocm']} Running on local URL: http://0.0.0.0:8080 To create a public link, set `share=True` in `launch()`. shark_tank local cache is located at C:\Users\Acrivec\.local/shark_tank/ . You may change this by setting the --local_tank_cache= flag Found device AMD Radeon RX 7900 XTX. Using target triple rdna3-7900-windows. Tuned models are currently not supported for this setting. Traceback (most recent call last): File "G:\Downloads\Shark\SHARK\shark.venv\Lib\site-packages\gradio\queueing.py", line 456, in call_prediction output = await route_utils.call_process_api( ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "G:\Downloads\Shark\SHARK\shark.venv\Lib\site-packages\gradio\route_utils.py", line 232, in call_process_api output = await app.get_blocks().process_api( ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "G:\Downloads\Shark\SHARK\shark.venv\Lib\site-packages\gradio\blocks.py", line 1522, in process_api result = await self.call_function( ^^^^^^^^^^^^^^^^^^^^^^^^^ File "G:\Downloads\Shark\SHARK\shark.venv\Lib\site-packages\gradio\blocks.py", line 1156, in call_function prediction = await utils.async_iteration(iterator) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "G:\Downloads\Shark\SHARK\shark.venv\Lib\site-packages\gradio\utils.py", line 515, in async_iteration return await iterator.__anext__() ^^^^^^^^^^^^^^^^^^^^^^^^^^ File "G:\Downloads\Shark\SHARK\shark.venv\Lib\site-packages\gradio\utils.py", line 508, in __anext__ return await anyio.to_thread.run_sync( ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "G:\Downloads\Shark\SHARK\shark.venv\Lib\site-packages\anyio\to_thread.py", line 33, in run_sync return await get_asynclib().run_sync_in_worker_thread( ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "G:\Downloads\Shark\SHARK\shark.venv\Lib\site-packages\anyio\_backends\_asyncio.py", line 877, in run_sync_in_worker_thread return await future ^^^^^^^^^^^^ File "G:\Downloads\Shark\SHARK\shark.venv\Lib\site-packages\anyio\_backends\_asyncio.py", line 807, in run result = context.run(func, *args) ^^^^^^^^^^^^^^^^^^^^^^^^ File "G:\Downloads\Shark\SHARK\shark.venv\Lib\site-packages\gradio\utils.py", line 491, in run_sync_iterator_async return next(iterator) ^^^^^^^^^^^^^^ File "G:\Downloads\Shark\SHARK\shark.venv\Lib\site-packages\gradio\utils.py", line 662, in gen_wrapper yield from f(*args, **kwargs) File "G:\Downloads\Shark\SHARK\apps\stable_diffusion\web\ui\txt2img_ui.py", line 161, in txt2img_inf global_obj.set_schedulers(get_schedulers(model_id)) ^^^^^^^^^^^^^^^^^^^^^^^^ File "G:\Downloads\Shark\SHARK\apps\stable_diffusion\src\schedulers\sd_schedulers.py", line 100, in get_schedulers ] = SharkEulerDiscreteScheduler.from_pretrained( ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "G:\Downloads\Shark\SHARK\shark.venv\Lib\site-packages\diffusers\schedulers\scheduling_utils.py", line 147, in from_pretrained return cls.from_config(config, return_unused_kwargs=return_unused_kwargs, **kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "G:\Downloads\Shark\SHARK\shark.venv\Lib\site-packages\diffusers\configuration_utils.py", line 254, in from_config model = cls(**init_dict) ^^^^^^^^^^^^^^^^ File "G:\Downloads\Shark\SHARK\shark.venv\Lib\site-packages\diffusers\configuration_utils.py", line 644, in inner_init init(self, *args, **init_kwargs) File "G:\Downloads\Shark\SHARK\apps\stable_diffusion\src\schedulers\shark_eulerdiscrete.py", line 35, in __init__ super().__init__( File "G:\Downloads\Shark\SHARK\shark.venv\Lib\site-packages\diffusers\configuration_utils.py", line 644, in inner_init init(self, *args, **init_kwargs) TypeError: EulerDiscreteScheduler.__init__() takes from 1 to 11 positional arguments but 14 were given Traceback (most recent call last): File "G:\Downloads\Shark\SHARK\shark.venv\Lib\site-packages\gradio\queueing.py", line 456, in call_prediction output = await route_utils.call_process_api( ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "G:\Downloads\Shark\SHARK\shark.venv\Lib\site-packages\gradio\route_utils.py", line 232, in call_process_api output = await app.get_blocks().process_api( ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "G:\Downloads\Shark\SHARK\shark.venv\Lib\site-packages\gradio\blocks.py", line 1522, in process_api result = await self.call_function( ^^^^^^^^^^^^^^^^^^^^^^^^^ File "G:\Downloads\Shark\SHARK\shark.venv\Lib\site-packages\gradio\blocks.py", line 1156, in call_function prediction = await utils.async_iteration(iterator) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "G:\Downloads\Shark\SHARK\shark.venv\Lib\site-packages\gradio\utils.py", line 515, in async_iteration return await iterator.__anext__() ^^^^^^^^^^^^^^^^^^^^^^^^^^ File "G:\Downloads\Shark\SHARK\shark.venv\Lib\site-packages\gradio\utils.py", line 508, in __anext__ return await anyio.to_thread.run_sync( ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "G:\Downloads\Shark\SHARK\shark.venv\Lib\site-packages\anyio\to_thread.py", line 33, in run_sync return await get_asynclib().run_sync_in_worker_thread( ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "G:\Downloads\Shark\SHARK\shark.venv\Lib\site-packages\anyio\_backends\_asyncio.py", line 877, in run_sync_in_worker_thread return await future ^^^^^^^^^^^^ File "G:\Downloads\Shark\SHARK\shark.venv\Lib\site-packages\anyio\_backends\_asyncio.py", line 807, in run result = context.run(func, *args) ^^^^^^^^^^^^^^^^^^^^^^^^ File "G:\Downloads\Shark\SHARK\shark.venv\Lib\site-packages\gradio\utils.py", line 491, in run_sync_iterator_async return next(iterator) ^^^^^^^^^^^^^^ File "G:\Downloads\Shark\SHARK\shark.venv\Lib\site-packages\gradio\utils.py", line 662, in gen_wrapper yield from f(*args, **kwargs) File "G:\Downloads\Shark\SHARK\apps\stable_diffusion\web\ui\txt2img_ui.py", line 161, in txt2img_inf global_obj.set_schedulers(get_schedulers(model_id)) ^^^^^^^^^^^^^^^^^^^^^^^^ File "G:\Downloads\Shark\SHARK\apps\stable_diffusion\src\schedulers\sd_schedulers.py", line 100, in get_schedulers ] = SharkEulerDiscreteScheduler.from_pretrained( ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "G:\Downloads\Shark\SHARK\shark.venv\Lib\site-packages\diffusers\schedulers\scheduling_utils.py", line 147, in from_pretrained return cls.from_config(config, return_unused_kwargs=return_unused_kwargs, **kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "G:\Downloads\Shark\SHARK\shark.venv\Lib\site-packages\diffusers\configuration_utils.py", line 254, in from_config model = cls(**init_dict) ^^^^^^^^^^^^^^^^ File "G:\Downloads\Shark\SHARK\shark.venv\Lib\site-packages\diffusers\configuration_utils.py", line 644, in inner_init init(self, *args, **init_kwargs) File "G:\Downloads\Shark\SHARK\apps\stable_diffusion\src\schedulers\shark_eulerdiscrete.py", line 35, in __init__ super().__init__( File "G:\Downloads\Shark\SHARK\shark.venv\Lib\site-packages\diffusers\configuration_utils.py", line 644, in inner_init init(self, *args, **init_kwargs) TypeError: EulerDiscreteScheduler.__init__() takes from 1 to 11 positional arguments but 14 were given The above exception was the direct cause of the following exception: Traceback (most recent call last): File "G:\Downloads\Shark\SHARK\shark.venv\Lib\site-packages\gradio\queueing.py", line 501, in process_events response = await self.call_prediction(awake_events, batch) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "G:\Downloads\Shark\SHARK\shark.venv\Lib\site-packages\gradio\queueing.py", line 465, in call_prediction raise Exception(str(error) if show_error else None) from error Exception: None ```