Closed the-dream-machine closed 2 months ago
It's included in the [default]
of pip install -e ".[default]"
, so no need to install it separately.
And it'll be imported by from comfy_script.runtime.nodes import *
like all other nodes.
Sorry, I'm new to python, and I'm trying to deploy comfyscript inside a container to run as a serverless function. I can't import wildcards in the serverless function, so I was importing all nodes individually. Here Is my code:
import modal
import os
import requests
import io
import subprocess
app = modal.App("pony-diffusion")
image = (
modal.Image.debian_slim(python_version="3.12.5")
.apt_install("git", "libglib2.0-0", "libsm6", "libxrender1", "libxext6", "ffmpeg", "libgl1")
.pip_install("torch==2.4.0+cu121", "torchvision", extra_options="--index-url https://download.pytorch.org/whl/cu121")
.pip_install("xformers==0.0.27.post2")
.pip_install("git+https://github.com/hiddenswitch/ComfyUI.git", extra_options="--no-build-isolation")
.run_commands("comfyui --create-directories")
.pip_install("comfy-script[default]", extra_options="--upgrade")
)
@app.cls(gpu="T4", container_idle_timeout=120, image=image)
class Model:
@modal.enter()
def enter(self):
print("โ
Entering container...")
from comfy_script.runtime import load
load("comfyui")
@modal.exit()
def exit(self):
print("๐งจ Exiting container...")
@modal.method()
def generate_image(self, prompt:str):
print("๐จ Generating image...")
# Cannot import * here
from comfy_script.runtime import Workflow
from comfy_script.runtime.nodes import CheckpointLoaderSimple, CLIPTextEncode, EmptyLatentImage, KSampler, VAEDecode, SaveImage, CivitAICheckpointLoader
with Workflow(wait=True):
model, clip, vae = CivitAICheckpointLoader('https://civitai.com/models/101055?modelVersionId=128078')
conditioning = CLIPTextEncode('beautiful scenery nature glass bottle landscape, , purple galaxy bottle,', clip)
conditioning2 = CLIPTextEncode('text, watermark', clip)
latent = EmptyLatentImage(512, 512, 1)
latent = KSampler(model, 156680208700286, 20, 8, 'euler', 'normal', conditioning, conditioning2, latent, 1)
image = VAEDecode(latent, vae)
SaveImage(image, 'ComfyUI')
@app.local_entrypoint()
def main(prompt: str):
Model().generate_image.remote(prompt)
I get the error:
ImportError: cannot import name 'CivitAICheckpointLoader' from 'comfy_script.runtime.nodes' (/usr/local/lib/python3.12/site-packages/comfy_script/runtime/nodes.py)
Fixed in the latest commit. Thanks for your report. Cloud you use pip_install("comfy-script[default] @ git+https://github.com/Chaoses-Ib/ComfyScript.git")
to test if it works?
The CivitAICheckpointLoader
node is now being imported but I'm getting new errors:
load("comfyui")
is called:
Starting server
To see the GUI go to: http://127.0.0.1:8188
Ib Custom Nodes: Loaded
Nodes: 294
ComfyScript: Failed to load node StringEnumRequestParameter
Traceback (most recent call last):
File "/usr/local/lib/python3.12/site-packages/comfy_script/runtime/nodes.py", line 20, in load
fact.add_node(node_info)
File "/usr/local/lib/python3.12/site-packages/comfy_script/runtime/factory.py", line 435, in add_node
output_types = [type_and_hint(type, name, output=True)[0] for type, name in output_with_name]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.12/site-packages/comfy_script/runtime/factory.py", line 216, in type_and_hint
id = astutil.str_to_raw_id(name)
^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.12/site-packages/comfy_script/astutil.py", line 27, in str_to_raw_id
raise TypeError(f'Expected str, got {type(s)}: {s}')
TypeError: Expected str, got <class 'list'>: []
๐จ Generating image...
Queue remaining: 1
[CivitAI] Downloading `sdXL_v10VAEFix.safetensors` from `https://civitai.com/api/download/models/128078`
An error occurred while executing a workflow
Traceback (most recent call last):
File "/usr/local/lib/python3.12/site-packages/comfy/cmd/execution.py", line 371, in execute
output_data, output_ui, has_subgraph = get_output_data(obj, input_data_all, execution_block_cb=execution_block_cb, pre_execute_cb=pre_execute_cb)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.12/site-packages/comfy/cmd/execution.py", line 242, in get_output_data
return_values = map_node_over_list(obj, input_data_all, obj.FUNCTION, allow_interrupt=True, execution_block_cb=execution_block_cb, pre_execute_cb=pre_execute_cb)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.12/contextlib.py", line 81, in inner
return func(*args, **kwds)
^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.12/site-packages/comfy/cmd/execution.py", line 218, in map_node_over_list
process_inputs(input_dict, i)
File "/usr/local/lib/python3.12/site-packages/comfy/cmd/execution.py", line 207, in process_inputs
results.append(getattr(obj, func)(**inputs))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.12/site-packages/civitai_comfy_nodes/civitai_checkpoint_loader.py", line 77, in load_checkpoint
if not civitai_model.download():
^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.12/site-packages/civitai_comfy_nodes/CivitAI_Model.py", line 292, in download
save_path = os.path.join(self.model_path, self.name) # Assume default comfy folder, unless we take user input on extra paths
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "<frozen posixpath>", line 76, in join
TypeError: expected str, bytes or os.PathLike object, not NoneType
Traceback (most recent call last):
File "/usr/local/lib/python3.12/site-packages/comfy/cmd/execution.py", line 371, in execute
output_data, output_ui, has_subgraph = get_output_data(obj, input_data_all, execution_block_cb=execution_block_cb, pre_execute_cb=pre_execute_cb)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.12/site-packages/comfy/cmd/execution.py", line 242, in get_output_data
return_values = map_node_over_list(obj, input_data_all, obj.FUNCTION, allow_interrupt=True, execution_block_cb=execution_block_cb, pre_execute_cb=pre_execute_cb)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.12/contextlib.py", line 81, in inner
return func(*args, **kwds)
^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.12/site-packages/comfy/cmd/execution.py", line 218, in map_node_over_list
process_inputs(input_dict, i)
File "/usr/local/lib/python3.12/site-packages/comfy/cmd/execution.py", line 207, in process_inputs
results.append(getattr(obj, func)(**inputs))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.12/site-packages/civitai_comfy_nodes/civitai_checkpoint_loader.py", line 77, in load_checkpoint
if not civitai_model.download():
^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.12/site-packages/civitai_comfy_nodes/CivitAI_Model.py", line 292, in download
save_path = os.path.join(self.model_path, self.name) # Assume default comfy folder, unless we take user input on extra paths
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "<frozen posixpath>", line 76, in join
TypeError: expected str, bytes or os.PathLike object, not NoneType
Queue remaining: 0 Queue remaining: 0
The first one is because comfyui package adds a node with an unusual/wrong config. The official ComfyUI doesn't have it and this error doesn't affect using other nodes, so I'm probably not going to debug it until someone really needs to use it.
The second one works on my side (with a different model for convenience):
Maybe it's caused by the same problem caused #69? Using the official ComfyUI may have different results.
I was able to resolve this issue and #69 by using the official comfy-cli to install comfyUI. Now everything works! ๐
Here is my code in case anyone runs into the same issue while trying to run this in a container:
import subprocess
import modal
image = (
modal.Image.debian_slim(python_version="3.12.5")
.apt_install("git")
.pip_install("comfy-cli==1.1.6")
# use comfy-cli to install the ComfyUI repo and its dependencies
.run_commands("comfy --skip-prompt install --nvidia")
# download all models and custom nodes required in your workflow
.run_commands(
"comfy --skip-prompt model download --url https://civitai.com/api/download/models/290640 --relative-path models/checkpoints"
)
.run_commands(
"cd /root/comfy/ComfyUI/custom_nodes && git clone https://github.com/Chaoses-Ib/ComfyScript.git",
"cd /root/comfy/ComfyUI/custom_nodes/ComfyScript && python -m pip install -e '.[default]'",
)
)
app = modal.App("pony_diffusion_2")
# Optional: serve the UI
@app.function(
allow_concurrent_inputs=10,
concurrency_limit=1,
container_idle_timeout=30,
timeout=1800,
gpu="T4",
)
@modal.web_server(8000, startup_timeout=60)
def ui():
_web_server = subprocess.Popen("comfy launch -- --listen 0.0.0.0 --port 8000", shell=True)
@app.cls(gpu="T4", container_idle_timeout=120, image=image)
class Model:
@modal.build()
def build(self):
print("๐ ๏ธ Building container...")
@modal.enter()
def enter(self):
print("โ
Entering container...")
from comfy_script.runtime import load
load()
@modal.exit()
def exit(self):
print("๐งจ Exiting container...")
@modal.method()
def generate_image(self, prompt:str):
print("๐จ Generating image...")
from comfy_script.runtime import Workflow
from comfy_script.runtime.nodes import CheckpointLoaderSimple, CLIPTextEncode, EmptyLatentImage, KSampler, VAEDecode, SaveImage, CivitAICheckpointLoader
with Workflow(wait=True):
model, clip, vae = CivitAICheckpointLoader('https://civitai.com/models/101055?modelVersionId=128078')
# model, clip, vae = CheckpointLoaderSimple("ponyDiffusionV6XL_v6StartWithThisOne.safetensors")
conditioning = CLIPTextEncode('beautiful scenery nature glass bottle landscape, , purple galaxy bottle,', clip)
conditioning2 = CLIPTextEncode('text, watermark', clip)
latent = EmptyLatentImage(512, 512, 1)
latent = KSampler(model, 156680208700286, 20, 8, 'euler', 'normal', conditioning, conditioning2, latent, 1)
image = VAEDecode(latent, vae)
result = SaveImage(image, 'ComfyUI')
print("result", result)
@app.local_entrypoint()
def main(prompt: str):
Model().generate_image.remote(prompt)
Maybe you should consider adding the official comfy-cli as a 3rd installation option?
Feel free to close these issues ๐
v0.5.1 is released. Comfy-Cli and the Modal code is added to README. Thank you!
Great work @Chaoses-Ib. Thanks for putting together this amazing library!
How do I import the
CivitAICheckpointLoader
? Do I need to install it seperately?