Traceback (most recent call last):
File "W:\stable-diffusion-webui\venv\lib\site-packages\gradio\routes.py", line 488, in run_predict
output = await app.get_blocks().process_api(
File "W:\stable-diffusion-webui\venv\lib\site-packages\gradio\blocks.py", line 1431, in process_api
result = await self.call_function(
File "W:\stable-diffusion-webui\venv\lib\site-packages\gradio\blocks.py", line 1103, in call_function
prediction = await anyio.to_thread.run_sync(
File "W:\stable-diffusion-webui\venv\lib\site-packages\anyio\to_thread.py", line 33, in run_sync
return await get_asynclib().run_sync_in_worker_thread(
File "W:\stable-diffusion-webui\venv\lib\site-packages\anyio_backends_asyncio.py", line 877, in run_sync_in_worker_thread
return await future
File "W:\stable-diffusion-webui\venv\lib\site-packages\anyio_backends_asyncio.py", line 807, in run
result = context.run(func, args)
File "W:\stable-diffusion-webui\venv\lib\site-packages\gradio\utils.py", line 707, in wrapper
response = f(args, kwargs)
File "W:\stable-diffusion-webui\extensions\sd-webui-lcm\scripts\main.py", line 291, in generate_v2v
result = pipe(
File "W:\stable-diffusion-webui\venv\lib\site-packages\torch\utils_contextlib.py", line 115, in decorate_context
return func(*args, kwargs)
File "W:\stable-diffusion-webui\extensions\sd-webui-lcm\lcm\lcm_i2i_pipeline.py", line 305, in call
self.scheduler.set_timesteps(strength, num_inference_steps, original_inference_steps)
File "W:\stable-diffusion-webui\venv\lib\site-packages\diffusers\schedulers\scheduling_lcm.py", line 458, in set_timesteps
inference_indices = np.linspace(0, len(lcm_origin_timesteps), num=num_inference_steps, endpoint=False)
File "<__array_function__ internals>", line 180, in linspace
File "W:\stable-diffusion-webui\venv\lib\site-packages\numpy\core\function_base.py", line 120, in linspace
num = operator.index(num)
TypeError: 'float' object cannot be interpreted as an integer
Loading pipeline components...: 100%|█████████████████████████████████████████████████| 6/6 [00:00<00:00, 7.68steps/s]
Traceback (most recent call last):
File "W:\stable-diffusion-webui\venv\lib\site-packages\gradio\routes.py", line 488, in run_predict
output = await app.get_blocks().process_api(
File "W:\stable-diffusion-webui\venv\lib\site-packages\gradio\blocks.py", line 1431, in process_api
result = await self.call_function(
File "W:\stable-diffusion-webui\venv\lib\site-packages\gradio\blocks.py", line 1103, in call_function
prediction = await anyio.to_thread.run_sync(
File "W:\stable-diffusion-webui\venv\lib\site-packages\anyio\to_thread.py", line 33, in run_sync
return await get_asynclib().run_sync_in_worker_thread(
File "W:\stable-diffusion-webui\venv\lib\site-packages\anyio_backends_asyncio.py", line 877, in run_sync_in_worker_thread
return await future
File "W:\stable-diffusion-webui\venv\lib\site-packages\anyio_backends_asyncio.py", line 807, in run
result = context.run(func, args)
File "W:\stable-diffusion-webui\venv\lib\site-packages\gradio\utils.py", line 707, in wrapper
response = f(args, kwargs)
File "W:\stable-diffusion-webui\extensions\sd-webui-lcm\scripts\main.py", line 291, in generate_v2v
result = pipe(
File "W:\stable-diffusion-webui\venv\lib\site-packages\torch\utils_contextlib.py", line 115, in decorate_context
return func(*args, kwargs)
File "W:\stable-diffusion-webui\extensions\sd-webui-lcm\lcm\lcm_i2i_pipeline.py", line 305, in call
self.scheduler.set_timesteps(strength, num_inference_steps, original_inference_steps)
File "W:\stable-diffusion-webui\venv\lib\site-packages\diffusers\schedulers\scheduling_lcm.py", line 458, in set_timesteps
inference_indices = np.linspace(0, len(lcm_origin_timesteps), num=num_inference_steps, endpoint=False)
File "<__array_function__ internals>", line 180, in linspace
File "W:\stable-diffusion-webui\venv\lib\site-packages\numpy\core\function_base.py", line 120, in linspace
num = operator.index(num)
TypeError: 'float' object cannot be interpreted as an integer
Traceback (most recent call last): File "W:\stable-diffusion-webui\venv\lib\site-packages\gradio\routes.py", line 488, in run_predict output = await app.get_blocks().process_api( File "W:\stable-diffusion-webui\venv\lib\site-packages\gradio\blocks.py", line 1431, in process_api result = await self.call_function( File "W:\stable-diffusion-webui\venv\lib\site-packages\gradio\blocks.py", line 1103, in call_function prediction = await anyio.to_thread.run_sync( File "W:\stable-diffusion-webui\venv\lib\site-packages\anyio\to_thread.py", line 33, in run_sync return await get_asynclib().run_sync_in_worker_thread( File "W:\stable-diffusion-webui\venv\lib\site-packages\anyio_backends_asyncio.py", line 877, in run_sync_in_worker_thread return await future File "W:\stable-diffusion-webui\venv\lib\site-packages\anyio_backends_asyncio.py", line 807, in run result = context.run(func, args) File "W:\stable-diffusion-webui\venv\lib\site-packages\gradio\utils.py", line 707, in wrapper response = f(args, kwargs) File "W:\stable-diffusion-webui\extensions\sd-webui-lcm\scripts\main.py", line 291, in generate_v2v result = pipe( File "W:\stable-diffusion-webui\venv\lib\site-packages\torch\utils_contextlib.py", line 115, in decorate_context return func(*args, kwargs) File "W:\stable-diffusion-webui\extensions\sd-webui-lcm\lcm\lcm_i2i_pipeline.py", line 305, in call self.scheduler.set_timesteps(strength, num_inference_steps, original_inference_steps) File "W:\stable-diffusion-webui\venv\lib\site-packages\diffusers\schedulers\scheduling_lcm.py", line 458, in set_timesteps inference_indices = np.linspace(0, len(lcm_origin_timesteps), num=num_inference_steps, endpoint=False) File "<__array_function__ internals>", line 180, in linspace File "W:\stable-diffusion-webui\venv\lib\site-packages\numpy\core\function_base.py", line 120, in linspace num = operator.index(num) TypeError: 'float' object cannot be interpreted as an integer Loading pipeline components...: 100%|█████████████████████████████████████████████████| 6/6 [00:00<00:00, 7.68steps/s] Traceback (most recent call last): File "W:\stable-diffusion-webui\venv\lib\site-packages\gradio\routes.py", line 488, in run_predict output = await app.get_blocks().process_api( File "W:\stable-diffusion-webui\venv\lib\site-packages\gradio\blocks.py", line 1431, in process_api result = await self.call_function( File "W:\stable-diffusion-webui\venv\lib\site-packages\gradio\blocks.py", line 1103, in call_function prediction = await anyio.to_thread.run_sync( File "W:\stable-diffusion-webui\venv\lib\site-packages\anyio\to_thread.py", line 33, in run_sync return await get_asynclib().run_sync_in_worker_thread( File "W:\stable-diffusion-webui\venv\lib\site-packages\anyio_backends_asyncio.py", line 877, in run_sync_in_worker_thread return await future File "W:\stable-diffusion-webui\venv\lib\site-packages\anyio_backends_asyncio.py", line 807, in run result = context.run(func, args) File "W:\stable-diffusion-webui\venv\lib\site-packages\gradio\utils.py", line 707, in wrapper response = f(args, kwargs) File "W:\stable-diffusion-webui\extensions\sd-webui-lcm\scripts\main.py", line 291, in generate_v2v result = pipe( File "W:\stable-diffusion-webui\venv\lib\site-packages\torch\utils_contextlib.py", line 115, in decorate_context return func(*args, kwargs) File "W:\stable-diffusion-webui\extensions\sd-webui-lcm\lcm\lcm_i2i_pipeline.py", line 305, in call self.scheduler.set_timesteps(strength, num_inference_steps, original_inference_steps) File "W:\stable-diffusion-webui\venv\lib\site-packages\diffusers\schedulers\scheduling_lcm.py", line 458, in set_timesteps inference_indices = np.linspace(0, len(lcm_origin_timesteps), num=num_inference_steps, endpoint=False) File "<__array_function__ internals>", line 180, in linspace File "W:\stable-diffusion-webui\venv\lib\site-packages\numpy\core\function_base.py", line 120, in linspace num = operator.index(num) TypeError: 'float' object cannot be interpreted as an integer