Skip to content

Commit

Permalink
improved: Load Prompts From Dir (Inspire), `Load Prompts From File …
Browse files Browse the repository at this point in the history
…(Inspire)` - add load_cap, start_index widget

#207
  • Loading branch information
ltdrdata committed Jan 28, 2025
1 parent f12b4bd commit 5aae916
Show file tree
Hide file tree
Showing 4 changed files with 46 additions and 20 deletions.
8 changes: 7 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -63,9 +63,15 @@ This repository offers various extension nodes for ComfyUI. Nodes here have diff
* Specify the directories located under `ComfyUI-Inspire-Pack/prompts/`
* One prompts file can have multiple prompts separated by `---`.
* e.g. `prompts/example`
* `Load Prompts From File (Inspire)`: It sequentially reads prompts from the specified file. The output it returns is ZIPPED_PROMPT.
* **NOTE**: This node provides advanced option via `Show advanced`
* load_cap, start_index

* `Load Prompts From File (Inspire)`: It sequentially reads prompts from the specified file. The output it returns is ZIPPED_PROMPT.
* Specify the file located under `ComfyUI-Inspire-Pack/prompts/`
* e.g. `prompts/example/prompt2.txt`
* **NOTE**: This node provides advanced option via `Show advanced`
* load_cap, start_index

* `Load Single Prompt From File (Inspire)`: Loads a single prompt from a file containing multiple prompts by using an index.
* The prompts file directory can be specified as `inspire_prompts` in `extra_model_paths.yaml`
* `Unzip Prompt (Inspire)`: Separate ZIPPED_PROMPT into `positive`, `negative`, and name components.
Expand Down
2 changes: 1 addition & 1 deletion __init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@

import importlib

version_code = [1, 11]
version_code = [1, 12]
version_str = f"V{version_code[0]}.{version_code[1]}" + (f'.{version_code[2]}' if len(version_code) > 2 else '')
print(f"### Loading: ComfyUI-Inspire-Pack ({version_str})")

Expand Down
54 changes: 37 additions & 17 deletions inspire/prompt_support.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,21 +60,23 @@ def INPUT_TYPES(cls):
},
"optional": {
"reload": ("BOOLEAN", { "default": False, "label_on": "if file changed", "label_off": "if value changed"}),
"load_cap": ("INT", {"default": 0, "min": 0, "step": 1, "advanced": True, "tooltip": "The amount of prompts to load at once:\n0: Load all\n1 or higher: Load a specified number"}),
"start_index": ("INT", {"default": 0, "min": -1, "step": 1, "advanced": True, "tooltip": "Starting index for loading prompts:\n-1: The last prompt\n0 or higher: Load from the specified index"}),
}
}

RETURN_TYPES = ("ZIPPED_PROMPT", "INT")
RETURN_NAMES = ("zipped_prompt", "count")
OUTPUT_IS_LIST = (True,)
RETURN_TYPES = ("ZIPPED_PROMPT", "INT", "INT")
RETURN_NAMES = ("zipped_prompt", "count", "remaining_count")
OUTPUT_IS_LIST = (True, False, False)

FUNCTION = "doit"

CATEGORY = "InspirePack/Prompt"

@staticmethod
def IS_CHANGED(prompt_dir, reload=False):
def IS_CHANGED(prompt_dir, reload=False, load_cap=0, start_index=-1):
if not reload:
return prompt_dir
return prompt_dir, load_cap, start_index
else:
candidates = []
for d in folder_paths.get_folder_paths('inspire_prompts'):
Expand All @@ -100,10 +102,10 @@ def IS_CHANGED(prompt_dir, reload=False):
break
md5.update(chunk)

return md5.hexdigest()
return md5.hexdigest(), load_cap, start_index

@staticmethod
def doit(prompt_dir, reload=False):
def doit(prompt_dir, reload=False, load_cap=0, start_index=-1):
candidates = []
for d in folder_paths.get_folder_paths('inspire_prompts'):
candidates.append(os.path.join(d, prompt_dir))
Expand Down Expand Up @@ -140,7 +142,15 @@ def doit(prompt_dir, reload=False):
except Exception as e:
print(f"[ERROR] LoadPromptsFromDir: an error occurred while processing '{file_name}': {str(e)}\nNOTE: Only files with UTF-8 encoding are supported.")

return (prompts, len(prompts),)
# slicing [start_index ~ start_index + load_cap]
total_prompts = len(prompts)
prompts = prompts[start_index:]
remaining_count = False
if load_cap > 0:
remaining_count = max(0, len(prompts) - load_cap)
prompts = prompts[:load_cap]

return prompts, total_prompts, remaining_count


class LoadPromptsFromFile:
Expand All @@ -165,26 +175,28 @@ def INPUT_TYPES(cls):
"optional": {
"text_data_opt": ("STRING", {"defaultInput": True}),
"reload": ("BOOLEAN", {"default": False, "label_on": "if file changed", "label_off": "if value changed"}),
"load_cap": ("INT", {"default": 0, "min": 0, "step": 1, "advanced": True, "tooltip": "The amount of prompts to load at once:\n0: Load all\n1 or higher: Load a specified number"}),
"start_index": ("INT", {"default": 0, "min": -1, "step": 1, "advanced": True, "tooltip": "Starting index for loading prompts:\n-1: The last prompt\n0 or higher: Load from the specified index"}),
}
}

RETURN_TYPES = ("ZIPPED_PROMPT", "INT")
RETURN_NAMES = ("zipped_prompt", "count")
OUTPUT_IS_LIST = (True,)
RETURN_TYPES = ("ZIPPED_PROMPT", "INT", "INT")
RETURN_NAMES = ("zipped_prompt", "count", "remaining_count")
OUTPUT_IS_LIST = (True, False, False)

FUNCTION = "doit"

CATEGORY = "InspirePack/Prompt"

@staticmethod
def IS_CHANGED(prompt_file, text_data_opt=None, reload=False):
def IS_CHANGED(prompt_file, text_data_opt=None, reload=False, load_cap=0, start_index=-1):
md5 = hashlib.md5()

if text_data_opt is not None:
md5.update(text_data_opt)
return md5.hexdigest()
return md5.hexdigest(), load_cap, start_index
elif not reload:
return prompt_file
return prompt_file, load_cap, start_index
else:
matched_path = None
for x in folder_paths.get_folder_paths('inspire_prompts'):
Expand All @@ -204,10 +216,10 @@ def IS_CHANGED(prompt_file, text_data_opt=None, reload=False):
break
md5.update(chunk)

return md5.hexdigest()
return md5.hexdigest(), load_cap, start_index

@staticmethod
def doit(prompt_file, text_data_opt=None, reload=False):
def doit(prompt_file, text_data_opt=None, reload=False, load_cap=0, start_index=-1):
matched_path = None
for d in folder_paths.get_folder_paths('inspire_prompts'):
matched_path = os.path.join(d, prompt_file)
Expand Down Expand Up @@ -247,7 +259,15 @@ def doit(prompt_file, text_data_opt=None, reload=False):
except Exception as e:
print(f"[ERROR] LoadPromptsFromFile: an error occurred while processing '{prompt_file}': {str(e)}\nNOTE: Only files with UTF-8 encoding are supported.")

return (prompts, len(prompts),)
# slicing [start_index ~ start_index + load_cap]
total_prompts = len(prompts)
prompts = prompts[start_index:]
remaining_count = 0
if load_cap > 0:
remaining_count = max(0, len(prompts) - load_cap)
prompts = prompts[:load_cap]

return prompts, total_prompts, remaining_count


class LoadSinglePromptFromFile:
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
[project]
name = "comfyui-inspire-pack"
description = "This extension provides various nodes to support Lora Block Weight, Regional Nodes, Backend Cache, Prompt Utils, List Utils, Noise(Seed) Utils, ... and the Impact Pack."
version = "1.11"
version = "1.12"
license = { file = "LICENSE" }
dependencies = ["matplotlib", "cachetools"]

Expand Down

0 comments on commit 5aae916

Please sign in to comment.