auto-git:
[change] generate_equirect.py
This commit is contained in:
@@ -122,6 +122,28 @@ def clear_torch_cache(device: str | None = None) -> None:
|
|||||||
empty_cache()
|
empty_cache()
|
||||||
|
|
||||||
|
|
||||||
|
def configure_pipeline_memory(pipe) -> None:
|
||||||
|
pipe.enable_attention_slicing()
|
||||||
|
if hasattr(pipe, "enable_vae_tiling"):
|
||||||
|
pipe.enable_vae_tiling()
|
||||||
|
if hasattr(pipe, "enable_vae_slicing"):
|
||||||
|
pipe.enable_vae_slicing()
|
||||||
|
|
||||||
|
|
||||||
|
def make_progress_cb(enable_upscale: bool, seam_inpaint: bool):
|
||||||
|
def progress_cb(phase: str, current: int, total: int):
|
||||||
|
payload = {
|
||||||
|
"phase": phase,
|
||||||
|
"current": current,
|
||||||
|
"total": total,
|
||||||
|
"upscale": enable_upscale,
|
||||||
|
"seamInpaint": seam_inpaint,
|
||||||
|
}
|
||||||
|
print(f"PROGRESS {json.dumps(payload)}", flush=True)
|
||||||
|
|
||||||
|
return progress_cb
|
||||||
|
|
||||||
|
|
||||||
def decode_latents_to_image(vae: AutoencoderKL, latents: torch.Tensor, device: str) -> Image.Image:
|
def decode_latents_to_image(vae: AutoencoderKL, latents: torch.Tensor, device: str) -> Image.Image:
|
||||||
print("→ Decoding latent image with standalone VAE…")
|
print("→ Decoding latent image with standalone VAE…")
|
||||||
if hasattr(vae, "enable_tiling"):
|
if hasattr(vae, "enable_tiling"):
|
||||||
|
|||||||
Reference in New Issue
Block a user