Skip to content

Commit 82adae3

Browse files
Merge pull request #6 from hlky/master
@hlky torch_gc/empty cache after generation
2 parents 34e9795 + 15a700b commit 82adae3

File tree

1 file changed

+6
-2
lines changed

1 file changed

+6
-2
lines changed

webui.py

+6-2
Original file line numberDiff line numberDiff line change
@@ -128,6 +128,9 @@ def create_random_tensors(shape, seeds):
128128
x = torch.stack(xs)
129129
return x
130130

131+
def torch_gc():
132+
torch.cuda.empty_cache()
133+
torch.cuda.ipc_collect()
131134

132135
def load_GFPGAN():
133136
model_name = 'GFPGANv1.3'
@@ -304,7 +307,7 @@ def process_images(outpath, func_init, func_sample, prompt, seed, sampler_name,
304307
"""this is the main loop that both txt2img and img2img use; it calls func_init once inside all the scopes and func_sample once per batch"""
305308

306309
assert prompt is not None
307-
torch.cuda.empty_cache()
310+
torch_gc()
308311

309312
if seed == -1:
310313
seed = random.randrange(4294967294)
@@ -405,6 +408,7 @@ def process_images(outpath, func_init, func_sample, prompt, seed, sampler_name,
405408

406409
output_images.insert(0, grid)
407410

411+
408412
grid.save(os.path.join(outpath, f'grid-{grid_count:04}.{opt.grid_format}'))
409413
grid_count += 1
410414

@@ -415,7 +419,7 @@ def process_images(outpath, func_init, func_sample, prompt, seed, sampler_name,
415419

416420
for comment in comments:
417421
info += "\n\n" + comment
418-
422+
torch_gc()
419423
return output_images, seed, info
420424

421425

0 commit comments

Comments
 (0)