Open bvajk opened 1 month ago
I've narrowed down the long save time to the checkpoint being hashed so I decided to add a boolean parameter to decide whether the user wants that or not:
diff --git a/nodes.py b/nodes.py
index ac1114a..c6f0e00 100644
--- a/nodes.py
+++ b/nodes.py
@@ -184,6 +184,7 @@ class ImageSaveWithMetadata:
"modelname": (folder_paths.get_filename_list("checkpoints"),),
"sampler_name": (comfy.samplers.KSampler.SAMPLERS,),
"scheduler": (comfy.samplers.KSampler.SCHEDULERS,),
+ "include_hash": ("BOOLEAN", {"default": False})
},
"optional": {
"positive": ("STRING", {"default": 'unknown', "multiline": True}),
@@ -210,13 +211,17 @@ class ImageSaveWithMetadata:
CATEGORY = "ImageSaverTools"
def save_files(self, images, seed_value, steps, cfg, sampler_name, scheduler, positive, negative, modelname, quality_jpeg_or_webp,
- lossless_webp, width, height, counter, filename, path, extension, time_format, prompt=None, extra_pnginfo=None):
+ lossless_webp, width, height, counter, filename, path, extension, time_format, include_hash, prompt=None, extra_pnginfo=None):
filename = make_filename(filename, seed_value, modelname, counter, time_format)
path = make_pathname(path, seed_value, modelname, counter, time_format)
ckpt_path = folder_paths.get_full_path("checkpoints", modelname)
basemodelname = parse_name(modelname)
- modelhash = calculate_sha256(ckpt_path)[:10]
- comment = f"{handle_whitespace(positive)}\nNegative prompt: {handle_whitespace(negative)}\nSteps: {steps}, Sampler: {sampler_name}{f'_{scheduler}' if scheduler != 'normal' else ''}, CFG Scale: {cfg}, Seed: {seed_value}, Size: {width}x{height}, Model hash: {modelhash}, Model: {basemodelname}, Version: ComfyUI"
+ comment = ""
+ if include_hash:
+ modelhash = calculate_sha256(ckpt_path)[:10]
+ comment = f"{handle_whitespace(positive)}\nNegative prompt: {handle_whitespace(negative)}\nSteps: {steps}, Sampler: {sampler_name}{f'_{scheduler}' if scheduler != 'normal' else ''}, CFG Scale: {cfg}, Seed: {seed_value}, Size: {width}x{height}, Model hash: {modelhash}, Model: {basemodelname}, Version: ComfyUI"
+ else:
+ comment = f"{handle_whitespace(positive)}\nNegative prompt: {handle_whitespace(negative)}\nSteps: {steps}, Sampler: {sampler_name}{f'_{scheduler}' if scheduler != 'normal' else ''}, CFG Scale: {cfg}, Seed: {seed_value}, Size: {width}x{height}, Model: {basemodelname}, Version: ComfyUI"
output_path = os.path.join(self.output_dir, path)
if output_path.strip() != '':
But I've ran into some issues, the node is not working anymore and the console is saying this:
Failed to validate prompt for output 40:
* (prompt):
- Value 0 smaller than min of 1: quality_jpeg_or_webp
- Failed to convert an input value to a INT value: counter, %Y-%m-%d-%H%M%S, invalid literal for int() with base 10: '%Y-%m-%d-%H%M%S'
* Save Image w/Metadata 40:
- Value 0 smaller than min of 1: quality_jpeg_or_webp
- Failed to convert an input value to a INT value: counter, %Y-%m-%d-%H%M%S, invalid literal for int() with base 10: '%Y-%m-%d-%H%M%S'
I made a fork of the repo excluding the hash part as a whole
I was mad with how much time it required to save. Thanks for the hint. If you wanna keep the hash, you can add a small cache to the hash function, so the value is only calculated the first time (until you restart the comfyui server).
hash_cache = {}
def calculate_sha256(file_path):
if file_path in hash_cache:
return hash_cache[file_path]
sha256_hash = hashlib.sha256()
with open(file_path, "rb") as f:
# Read the file in chunks to avoid loading the entire file into memory
for byte_block in iter(lambda: f.read(4096), b""):
sha256_hash.update(byte_block)
hash_value= sha256_hash.hexdigest()
hash_cache[file_path] = hash_value
return hash_value
This 'fast' saving introduces another problem when you are saving several images with the same seed at the same time (It happens to me with 'Load prompt from file' from Inspire Pack), as the time used for the filename is the time of saving (not the time of generation) and images overwrite other images saved at the same second.
A dirty hack to avoid the problem could be to check in save_images if the file exists before writing
def save_images
...
while os.path.exists (os.path.join(output_path, filename)):
filename= "x"+filename
I have a workflow that uses core comfyui save image and the prompt exectuion time in that is 2.83 seconds if the models are loaded. with the same exact paraemeters and prompt, saving the image with this node, prompt execution time is 21.55 seconds. I just found ths node today and love it, the metadata saving functionality is super useful, but it would be so much better of saving the file didn't take about 18-19 seconds Is there a way to optimize that? I can provide example workflows if needed