-
Notifications
You must be signed in to change notification settings - Fork 128
/
comfyui.py
339 lines (286 loc) · 13.8 KB
/
comfyui.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
import os
import urllib.request
import subprocess
import threading
import time
import json
import urllib
import uuid
import websocket
import random
import requests
import shutil
import custom_node_helpers as helpers
from cog import Path
from node import Node
from weights_downloader import WeightsDownloader
from urllib.error import URLError
class ComfyUI:
def __init__(self, server_address):
self.weights_downloader = WeightsDownloader()
self.server_address = server_address
def start_server(self, output_directory, input_directory):
self.input_directory = input_directory
self.output_directory = output_directory
self.apply_helper_methods("prepare", weights_downloader=self.weights_downloader)
start_time = time.time()
server_thread = threading.Thread(
target=self.run_server, args=(output_directory, input_directory)
)
server_thread.start()
while not self.is_server_running():
if time.time() - start_time > 60:
raise TimeoutError("Server did not start within 60 seconds")
time.sleep(0.5)
elapsed_time = time.time() - start_time
print(f"Server started in {elapsed_time:.2f} seconds")
def run_server(self, output_directory, input_directory):
command = f"python ./ComfyUI/main.py --output-directory {output_directory} --input-directory {input_directory} --disable-metadata"
"""
We need to capture the stdout and stderr from the server process
so that we can print the logs to the console. If we don't do this
then at the point where ComfyUI attempts to print it will throw a
broken pipe error. This only happens from cog v0.9.13 onwards.
"""
server_process = subprocess.Popen(
command,
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
universal_newlines=True,
)
def print_stdout():
for stdout_line in iter(server_process.stdout.readline, ""):
print(f"[ComfyUI] {stdout_line.strip()}")
stdout_thread = threading.Thread(target=print_stdout)
stdout_thread.start()
for stderr_line in iter(server_process.stderr.readline, ""):
print(f"[ComfyUI] {stderr_line.strip()}")
def is_server_running(self):
try:
with urllib.request.urlopen(
"http://{}/history/{}".format(self.server_address, "123")
) as response:
return response.status == 200
except URLError:
return False
def apply_helper_methods(self, method_name, *args, **kwargs):
# Dynamically applies a method from helpers module with given args.
# Example usage: self.apply_helper_methods("add_weights", weights_to_download, node)
for module_name in dir(helpers):
module = getattr(helpers, module_name)
method = getattr(module, method_name, None)
if callable(method):
method(*args, **kwargs)
def handle_weights(self, workflow, weights_to_download=None):
if weights_to_download is None:
weights_to_download = []
print("Checking weights")
embeddings = self.weights_downloader.get_weights_by_type("EMBEDDINGS")
embedding_to_fullname = {emb.split(".")[0]: emb for emb in embeddings}
weights_filetypes = self.weights_downloader.supported_filetypes
for node in workflow.values():
self.apply_helper_methods("add_weights", weights_to_download, Node(node))
for input in node["inputs"].values():
if isinstance(input, str):
if any(key in input for key in embedding_to_fullname):
weights_to_download.extend(
embedding_to_fullname[key]
for key in embedding_to_fullname
if key in input
)
elif any(input.endswith(ft) for ft in weights_filetypes):
weights_to_download.append(input)
weights_to_download = list(set(weights_to_download))
for weight in weights_to_download:
self.weights_downloader.download_weights(weight)
print("====================================")
def is_image_or_video_value(self, value):
filetypes = [".png", ".jpg", ".jpeg", ".webp", ".mp4", ".webm"]
return isinstance(value, str) and any(
value.lower().endswith(ft) for ft in filetypes
)
def handle_known_unsupported_nodes(self, workflow):
for node in workflow.values():
self.apply_helper_methods("check_for_unsupported_nodes", Node(node))
def handle_inputs(self, workflow):
print("Checking inputs")
seen_inputs = set()
for node in workflow.values():
if "inputs" in node:
for input_key, input_value in node["inputs"].items():
if isinstance(input_value, str) and input_value not in seen_inputs:
seen_inputs.add(input_value)
if input_value.startswith(("http://", "https://")):
filename = os.path.join(
self.input_directory, os.path.basename(input_value)
)
if not os.path.exists(filename):
print(f"Downloading {input_value} to {filename}")
try:
response = requests.get(input_value)
response.raise_for_status()
with open(filename, "wb") as file:
file.write(response.content)
print(f"✅ {filename}")
except requests.exceptions.RequestException as e:
print(f"❌ Error downloading {input_value}: {e}")
# The same URL may be included in a workflow more than once
node["inputs"][input_key] = filename
elif self.is_image_or_video_value(input_value):
filename = os.path.join(
self.input_directory, os.path.basename(input_value)
)
if not os.path.exists(filename):
print(f"❌ {filename} not provided")
else:
print(f"✅ {filename}")
print("====================================")
def connect(self):
self.client_id = str(uuid.uuid4())
self.ws = websocket.WebSocket()
self.ws.connect(f"ws://{self.server_address}/ws?clientId={self.client_id}")
def post_request(self, endpoint, data=None):
url = f"http://{self.server_address}{endpoint}"
headers = {"Content-Type": "application/json"} if data else {}
json_data = json.dumps(data).encode("utf-8") if data else None
req = urllib.request.Request(
url, data=json_data, headers=headers, method="POST"
)
with urllib.request.urlopen(req) as response:
if response.status != 200:
print(f"Failed: {endpoint}, status code: {response.status}")
# https://github.com/comfyanonymous/ComfyUI/blob/master/server.py
def clear_queue(self):
self.post_request("/queue", {"clear": True})
self.post_request("/interrupt")
def queue_prompt(self, prompt):
try:
# Prompt is the loaded workflow (prompt is the label comfyUI uses)
p = {"prompt": prompt, "client_id": self.client_id}
data = json.dumps(p).encode("utf-8")
req = urllib.request.Request(
f"http://{self.server_address}/prompt?{self.client_id}", data=data
)
output = json.loads(urllib.request.urlopen(req).read())
return output["prompt_id"]
except urllib.error.HTTPError as e:
print(f"ComfyUI error: {e.code} {e.reason}")
http_error = True
if http_error:
raise Exception(
"ComfyUI Error – Your workflow could not be run. This usually happens if you're trying to use an unsupported node. Check the logs for 'KeyError: ' details, and go to https://github.com/fofr/cog-comfyui to see the list of supported custom nodes."
)
def _delete_corrupted_weights(self, error_data):
if "current_inputs" in error_data:
weights_to_delete = []
weights_filetypes = self.weights_downloader.supported_filetypes
for input_list in error_data["current_inputs"].values():
for input_value in input_list:
if isinstance(input_value, str) and any(
input_value.endswith(ft) for ft in weights_filetypes
):
weights_to_delete.append(input_value)
for weight_file in list(set(weights_to_delete)):
self.weights_downloader.delete_weights(weight_file)
raise Exception(
"The weights for this workflow have been corrupted. They have been deleted and will be re-downloaded on the next run. Please try again."
)
def wait_for_prompt_completion(self, workflow, prompt_id):
while True:
out = self.ws.recv()
if isinstance(out, str):
message = json.loads(out)
if message["type"] == "execution_error":
error_data = message["data"]
if (
"exception_type" in error_data
and error_data["exception_type"]
== "safetensors_rust.SafetensorError"
):
self._delete_corrupted_weights(error_data)
error_message = json.dumps(message, indent=2)
raise Exception(
f"There was an error executing your workflow:\n\n{error_message}"
)
if message["type"] == "executing":
data = message["data"]
if data["node"] is None and data["prompt_id"] == prompt_id:
break
elif data["prompt_id"] == prompt_id:
node = workflow.get(data["node"], {})
meta = node.get("_meta", {})
class_type = node.get("class_type", "Unknown")
print(
f"Executing node {data['node']}, title: {meta.get('title', 'Unknown')}, class type: {class_type}"
)
else:
continue
def load_workflow(self, workflow):
if not isinstance(workflow, dict):
wf = json.loads(workflow)
else:
wf = workflow
# There are two types of ComfyUI JSON
# We need the API version
if any(key in wf.keys() for key in ["last_node_id", "last_link_id", "version"]):
raise ValueError(
"You need to use the API JSON version of a ComfyUI workflow. To do this go to your ComfyUI settings and turn on 'Enable Dev mode Options'. Then you can save your ComfyUI workflow via the 'Save (API Format)' button."
)
self.handle_known_unsupported_nodes(wf)
self.handle_inputs(wf)
self.handle_weights(wf)
return wf
def reset_execution_cache(self):
print("Resetting execution cache")
with open("reset.json", "r") as file:
reset_workflow = json.loads(file.read())
self.queue_prompt(reset_workflow)
def randomise_input_seed(self, input_key, inputs):
if input_key in inputs and isinstance(inputs[input_key], (int, float)):
new_seed = random.randint(0, 2**32 - 1)
print(f"Randomising {input_key} to {new_seed}")
inputs[input_key] = new_seed
def randomise_seeds(self, workflow):
for node_id, node in workflow.items():
inputs = node.get("inputs", {})
seed_keys = ["seed", "noise_seed", "rand_seed"]
for seed_key in seed_keys:
self.randomise_input_seed(seed_key, inputs)
def run_workflow(self, workflow):
print("Running workflow")
prompt_id = self.queue_prompt(workflow)
self.wait_for_prompt_completion(workflow, prompt_id)
output_json = self.get_history(prompt_id)
print("outputs: ", output_json)
print("====================================")
def get_history(self, prompt_id):
with urllib.request.urlopen(
f"http://{self.server_address}/history/{prompt_id}"
) as response:
output = json.loads(response.read())
return output[prompt_id]["outputs"]
def get_files(self, directories, prefix="", file_extensions=None):
files = []
if isinstance(directories, str):
directories = [directories]
for directory in directories:
for f in os.listdir(directory):
if f == "__MACOSX":
continue
path = os.path.join(directory, f)
if os.path.isfile(path):
print(f"{prefix}{f}")
files.append(Path(path))
elif os.path.isdir(path):
print(f"{prefix}{f}/")
files.extend(self.get_files(path, prefix=f"{prefix}{f}/"))
if file_extensions:
files = [f for f in files if f.name.split(".")[-1] in file_extensions]
return sorted(files)
def cleanup(self, directories):
self.clear_queue()
for directory in directories:
if os.path.exists(directory):
shutil.rmtree(directory)
os.makedirs(directory)