auto-git:

[add] README.md
 [add] default.png
 [add] equirect_hdr_icon_512-Wiederhergestellt.png
 [add] generate_equirect.py
 [add] icon.png
 [add] index.html
 [add] package-lock.json
 [add] package.json
 [add] public/
 [add] requirements.txt
 [add] run.sh
 [add] src-tauri/
 [add] src/
 [add] vite.config.js
This commit is contained in:
2026-05-07 10:44:51 +02:00
parent 19e5cbfa1f
commit 2a2e72eda7
21 changed files with 7508 additions and 0 deletions

1
README.md Normal file
View File

@@ -0,0 +1 @@
# Projekt in skymap-gen

BIN
default.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 10 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 146 KiB

609
generate_equirect.py Normal file
View File

@@ -0,0 +1,609 @@
#!/usr/bin/env python3
"""
Generate an equirectangular HDRI image using Diffusers.
Optional upscaling:
--upscale topaz → legacy Topaz Photo AI CLI (if installed)
--upscale realesrgan → open Real-ESRGAN in-Python upscaler (pip install realesrgan==0.3.0 basicsr opencv-python)
Default flow: prompt in → equirectangular PNG out. Add --seam-inpaint to patch the horizontal wrap seam.
"""
import argparse
import gc
import json
import math
import os
import re
import shutil
import tempfile
import torch
from PIL import Image, ImageDraw
import numpy as np
from diffusers import (
StableDiffusionPipeline,
StableDiffusionXLPipeline,
DPMSolverMultistepScheduler,
EulerDiscreteScheduler,
EulerAncestralDiscreteScheduler,
HeunDiscreteScheduler,
DDIMScheduler,
StableDiffusionInpaintPipeline,
AutoencoderKL,
UNet2DConditionModel,
)
from huggingface_hub import hf_hub_download
# Default panorama model. You can override via --model-path.
# Uses the SDXL 360 diffusion checkpoint from ProGamerGov (single-file safetensors).
MODEL_PATH = "ProGamerGov/sdxl-360-diffusion"
BASE_SDXL_MODEL = "stabilityai/stable-diffusion-xl-base-1.0"
INPAINT_MODEL = "Lykon/dreamshaper-8-inpainting"
TOPAZ_CLI = "/Applications/Topaz Photo AI.app/Contents/MacOS/Topaz Photo AI"
REALESRGAN_MODEL = "RealESRGAN_x4plus.pth"
REALESRGAN_SCALE = 4 # x4 model for full upscale
# Recommended VAE for SDXL checkpoints
SDXL_VAE = "stabilityai/sdxl-vae"
def sanitize_name(prompt: str) -> str:
base = prompt.strip().lower()
base = re.sub(r"\s+", "_", base)
base = re.sub(r"[^a-z0-9_]+", "", base)
return base or "env"
def next_filename(output_dir: str, base: str, width: int, height: int) -> str:
os.makedirs(output_dir, exist_ok=True)
i = 1
while True:
fname = f"{base}-{i}-{width}x{height}.png"
candidate = os.path.join(output_dir, fname)
if not os.path.exists(candidate):
return candidate
i += 1
def shift_image(img: Image.Image, shift: int) -> Image.Image:
w, h = img.size
out = Image.new("RGB", (w, h))
out.paste(img.crop((shift, 0, w, h)), (0, 0))
out.paste(img.crop((0, 0, shift, h)), (w - shift, 0))
return out
def create_mask(width: int, height: int, mask_w: int) -> Image.Image:
mask = Image.new("L", (width, height), 0)
draw = ImageDraw.Draw(mask)
left = (width - mask_w) // 2
draw.rectangle([left, 0, left + mask_w, height], fill=255)
return mask
def unshift_image(img: Image.Image, shift: int) -> Image.Image:
w, h = img.size
out = Image.new("RGB", (w, h))
out.paste(img.crop((w - shift, 0, w, h)), (0, 0))
out.paste(img.crop((0, 0, w - shift, h)), (shift, 0))
return out
def select_device() -> str:
if torch.backends.mps.is_available():
return "mps"
if torch.cuda.is_available():
return "cuda"
return "cpu"
def clear_torch_cache(device: str | None = None) -> None:
gc.collect()
if device == "cuda" and torch.cuda.is_available():
torch.cuda.empty_cache()
elif device == "mps" and torch.backends.mps.is_available() and hasattr(torch, "mps"):
try:
torch.mps.synchronize()
except Exception:
pass
empty_cache = getattr(torch.mps, "empty_cache", None)
if empty_cache is not None:
empty_cache()
def decode_latents_to_image(vae: AutoencoderKL, latents: torch.Tensor, device: str) -> Image.Image:
print("→ Decoding latent image with standalone VAE…")
if hasattr(vae, "enable_tiling"):
vae.enable_tiling()
if hasattr(vae, "enable_slicing"):
vae.enable_slicing()
vae.to(device)
vae.eval()
vae_dtype = next(vae.parameters()).dtype
with torch.inference_mode():
latents = latents.to(device=device, dtype=vae_dtype)
has_latents_mean = hasattr(vae.config, "latents_mean") and vae.config.latents_mean is not None
has_latents_std = hasattr(vae.config, "latents_std") and vae.config.latents_std is not None
if has_latents_mean and has_latents_std:
latent_channels = len(vae.config.latents_mean)
latents_mean = (
torch.tensor(vae.config.latents_mean)
.view(1, latent_channels, 1, 1)
.to(latents.device, latents.dtype)
)
latents_std = (
torch.tensor(vae.config.latents_std)
.view(1, latent_channels, 1, 1)
.to(latents.device, latents.dtype)
)
latents = latents * latents_std / vae.config.scaling_factor + latents_mean
else:
latents = latents / vae.config.scaling_factor
decoded = vae.decode(latents, return_dict=False)[0]
decoded = (decoded / 2 + 0.5).clamp(0, 1)
image = decoded[0].detach().cpu().permute(1, 2, 0).float().numpy()
del decoded, latents
clear_torch_cache(device)
return Image.fromarray((image * 255).round().astype("uint8"))
def run_topaz(input_path: str, tempdir: str) -> str:
print("→ Upscaling with Topaz Photo AI CLI…")
result = None
try:
result = os.system(f'"{TOPAZ_CLI}" --cli "{input_path}" -o "{tempdir}"')
except Exception as e: # noqa: BLE001
raise RuntimeError(f"Topaz invocation failed: {e}") from e
if result != 0:
raise RuntimeError("Topaz CLI returned non-zero exit code")
upscaled_files = sorted(
[os.path.join(tempdir, f) for f in os.listdir(tempdir) if f.lower().endswith('.png')],
key=os.path.getmtime,
reverse=True
)
if not upscaled_files:
raise RuntimeError("Topaz produced no PNG output")
print(f"→ Upscaled file: {upscaled_files[0]}")
return upscaled_files[0]
def run_realesrgan(
input_image: Image.Image,
tempdir: str,
scale: int = 4,
model_path: str = REALESRGAN_MODEL,
progress_cb=None
) -> str:
try:
# Compatibility shim for newer torchvision where functional_tensor moved/renamed
import sys
try:
import torchvision.transforms._functional_tensor as _ft # type: ignore
sys.modules.setdefault("torchvision.transforms.functional_tensor", _ft)
except Exception:
pass
from realesrgan import RealESRGANer
from basicsr.archs.rrdbnet_arch import RRDBNet
import cv2
import types
except Exception as e: # noqa: BLE001
raise RuntimeError(
"Real-ESRGAN dependencies missing. Install with: pip install realesrgan==0.3.0 basicsr opencv-python torchvision"
) from e
device = select_device()
is_sdxl = "sdxl" in model_path.lower()
if not model_path or not os.path.exists(model_path):
raise RuntimeError(
f"Real-ESRGAN model not found at {model_path!r}. "
"Place RealESRGAN_x4plus.pth next to this script or update REALESRGAN_MODEL."
)
img_bgr = cv2.cvtColor(np.array(input_image), cv2.COLOR_RGB2BGR)
print(f"→ Upscaling with Real-ESRGAN (x{scale}) on {device}")
model = RRDBNet(
num_in_ch=3,
num_out_ch=3,
num_feat=64,
num_block=23,
num_grow_ch=32,
scale=scale,
)
upsampler = RealESRGANer(
model_path=model_path,
scale=scale,
model=model,
tile=64, # aggressive tiling to keep memory & runtime manageable
tile_pad=10,
pre_pad=0,
half=False, # keep full precision for CPU/MPS
)
# Wrap tile processing to surface progress per tile.
def tile_process_with_progress(self):
batch, channel, height, width = self.img.shape
output_height = height * self.scale
output_width = width * self.scale
output_shape = (batch, channel, output_height, output_width)
self.output = self.img.new_zeros(output_shape)
tiles_x = math.ceil(width / self.tile_size)
tiles_y = math.ceil(height / self.tile_size)
total_tiles = max(1, tiles_x * tiles_y)
for y in range(tiles_y):
for x in range(tiles_x):
ofs_x = x * self.tile_size
ofs_y = y * self.tile_size
input_start_x = ofs_x
input_end_x = min(ofs_x + self.tile_size, width)
input_start_y = ofs_y
input_end_y = min(ofs_y + self.tile_size, height)
input_start_x_pad = max(input_start_x - self.tile_pad, 0)
input_end_x_pad = min(input_end_x + self.tile_pad, width)
input_start_y_pad = max(input_start_y - self.tile_pad, 0)
input_end_y_pad = min(input_end_y + self.tile_pad, height)
input_tile_width = input_end_x - input_start_x
input_tile_height = input_end_y - input_start_y
tile_idx = y * tiles_x + x + 1
input_tile = self.img[:, :, input_start_y_pad:input_end_y_pad, input_start_x_pad:input_end_x_pad]
try:
with torch.no_grad():
output_tile = self.model(input_tile)
except RuntimeError as error:
print('Error', error)
if progress_cb:
progress_cb("upscale", tile_idx, total_tiles)
output_start_x = input_start_x * self.scale
output_end_x = input_end_x * self.scale
output_start_y = input_start_y * self.scale
output_end_y = input_end_y * self.scale
output_start_x_tile = (input_start_x - input_start_x_pad) * self.scale
output_end_x_tile = output_start_x_tile + input_tile_width * self.scale
output_start_y_tile = (input_start_y - input_start_y_pad) * self.scale
output_end_y_tile = output_start_y_tile + input_tile_height * self.scale
self.output[:, :, output_start_y:output_end_y,
output_start_x:output_end_x] = output_tile[:, :, output_start_y_tile:output_end_y_tile,
output_start_x_tile:output_end_x_tile]
if progress_cb:
upsampler.tile_process = types.MethodType(tile_process_with_progress, upsampler)
if progress_cb:
progress_cb("upscale", 0, 1)
sr_img, _ = upsampler.enhance(img_bgr, outscale=scale)
sr_img = Image.fromarray(cv2.cvtColor(sr_img, cv2.COLOR_BGR2RGB))
if progress_cb:
progress_cb("upscale", 1, 1)
out_path = os.path.join(tempdir, f"realesrgan_x{scale}.png")
sr_img.save(out_path)
print(f"→ Real-ESRGAN output: {out_path}")
return out_path
def generate(
prompt: str,
output_path: str,
work_dir: str,
upscale: str = "none",
model_path: str = MODEL_PATH,
base_model: str = BASE_SDXL_MODEL,
vae_model: str = SDXL_VAE,
steps: int = 25,
guidance: float = 4.5,
scheduler: str | None = None,
width: int = 1024,
height: int = 512,
seam_inpaint: bool = False,
) -> str:
# Normalize common aliases that 404 on HF
aliases = {
"proximasan/sdxl-360-diffusion": "ProGamerGov/sdxl-360-diffusion",
"proximasan": "ProGamerGov/sdxl-360-diffusion",
}
model_path = aliases.get(model_path, model_path)
device = select_device()
is_sdxl = "sdxl" in model_path.lower()
scale = guidance # keep inpaint guidance in sync with cfg guidance
enable_upscale = bool(upscale and upscale != "none")
os.makedirs(work_dir, exist_ok=True)
with tempfile.TemporaryDirectory(dir=work_dir) as tempdir:
print(f"→ Using tempdir: {tempdir}")
gen_pipe = None
load_errors: list[str] = []
vae = None
if is_sdxl:
try:
vae = AutoencoderKL.from_pretrained(vae_model, subfolder="vae", torch_dtype=torch.float32)
except Exception as e: # noqa: BLE001
load_errors.append(f"vae: {e}")
# Try native Diffusers repo
try:
if is_sdxl:
pipe_kwargs = {"torch_dtype": torch.float32}
if vae is not None:
pipe_kwargs["vae"] = vae
# Some SDXL repos only ship a UNet (e.g., sdxl-360); in that case load SDXL base
# and swap the UNet to keep the rest of the components consistent.
unet_only = False
if model_path and model_path.endswith("sdxl-360-diffusion"):
try:
hf_hub_download(model_path, "unet/config.json")
unet_only = True
except Exception:
unet_only = False
if unet_only:
base_pipe = StableDiffusionXLPipeline.from_pretrained(
base_model,
**pipe_kwargs
).to(device)
unet = UNet2DConditionModel.from_pretrained(
model_path,
subfolder="unet",
torch_dtype=torch.float32
).to(device)
base_pipe.unet = unet
gen_pipe = base_pipe
else:
gen_pipe = StableDiffusionXLPipeline.from_pretrained(
model_path,
**pipe_kwargs
).to(device)
else:
gen_pipe = StableDiffusionPipeline.from_pretrained(
model_path,
torch_dtype=torch.float32
).to(device)
except Exception as e: # noqa: BLE001
load_errors.append(f"from_pretrained: {e}")
# Fallback: single-file SDXL checkpoint from the repo
if gen_pipe is None:
ckpt_candidates = [
"sdxl_360_diffusion.safetensors",
"sdxl_360_diffusion_unet.safetensors",
"model.safetensors",
]
last_err = None
for fname in ckpt_candidates:
try:
ckpt = hf_hub_download(model_path, fname)
pipe_kwargs = {"torch_dtype": torch.float32}
if vae is not None:
pipe_kwargs["vae"] = vae
gen_pipe = StableDiffusionXLPipeline.from_single_file(
ckpt,
**pipe_kwargs
).to(device)
break
except Exception as e2: # noqa: BLE001
last_err = e2
load_errors.append(f"{fname}: {e2}")
if gen_pipe is None:
raise RuntimeError(
f"Failed to load model '{model_path}'. "
"Ensure the repo/path exists (e.g., ProGamerGov/sdxl-360-diffusion) and "
"install accelerate for low_cpu_mem_usage: pip install accelerate. "
f"Errors: {load_errors}"
) from (last_err or Exception("No pipeline loaded"))
if gen_pipe.vae is None and is_sdxl:
try:
vae = vae or AutoencoderKL.from_pretrained(
SDXL_VAE,
subfolder="vae",
torch_dtype=torch.float32
)
gen_pipe.vae = vae.to(device)
gen_pipe.to(device)
except Exception as e: # noqa: BLE001
load_errors.append(f"vae-fallback: {e}")
raise RuntimeError(
"Loaded SDXL pipeline without a VAE; failed to attach the SDXL VAE. "
f"Errors: {load_errors}"
) from e
# Optionally override scheduler; otherwise keep the pipeline default (Euler for SDXL base).
if scheduler:
sched_kind = scheduler.lower()
sched_cfg = gen_pipe.scheduler.config
if sched_kind in {"dpmsolver", "dpmsolver++"}:
gen_pipe.scheduler = DPMSolverMultistepScheduler.from_config(sched_cfg)
elif sched_kind in {"dpmsolver-sde", "dpmsolver_sde"}:
gen_pipe.scheduler = DPMSolverMultistepScheduler.from_config(
sched_cfg,
algorithm_type="sde-dpmsolver++"
)
elif sched_kind in {"euler"}:
gen_pipe.scheduler = EulerDiscreteScheduler.from_config(sched_cfg)
elif sched_kind in {"euler_a", "euler-ancestral", "euler-ancestral-discrete"}:
gen_pipe.scheduler = EulerAncestralDiscreteScheduler.from_config(sched_cfg)
elif sched_kind in {"heun"}:
gen_pipe.scheduler = HeunDiscreteScheduler.from_config(sched_cfg)
elif sched_kind in {"ddim"}:
gen_pipe.scheduler = DDIMScheduler.from_config(sched_cfg)
else:
raise ValueError(
f"Unsupported scheduler '{scheduler}'. "
"Try one of: euler, euler_a, heun, ddim, dpmsolver, dpmsolver-sde."
)
gen_pipe.enable_attention_slicing()
if is_sdxl and vae is not None and hasattr(gen_pipe, "enable_vae_tiling"):
gen_pipe.enable_vae_tiling()
def progress_cb(phase: str, current: int, total: int):
payload = {
"phase": phase,
"current": current,
"total": total,
"upscale": enable_upscale,
"seamInpaint": seam_inpaint,
}
print(f"PROGRESS {json.dumps(payload)}", flush=True)
print("→ Generating equirectangular HDRI…")
progress_cb("gen", 0, steps)
image = gen_pipe(
prompt=prompt,
num_inference_steps=steps,
guidance_scale=guidance,
width=width,
height=height,
callback_steps=1,
callback=lambda step, timestep, kwargs: progress_cb("gen", step + 1, steps),
).images[0]
gen_path = os.path.join(tempdir, f"base_{width}x{height}.png")
image.save(gen_path)
print(f"→ Saved initial image to {gen_path}")
seamless_path = os.path.join(tempdir, os.path.basename(output_path))
if seam_inpaint:
shift_amt = width // 2
mask_w = width // 8
shifted = shift_image(image, shift_amt)
mask = create_mask(width, height, mask_w)
inpaint_pipe = StableDiffusionInpaintPipeline.from_pretrained(
INPAINT_MODEL,
torch_dtype=torch.float32
).to(device)
inpaint_pipe.enable_attention_slicing()
print("→ Inpainting seam for seamless tiling…")
progress_cb("inpaint", 0, steps)
inpainted = inpaint_pipe(
prompt=prompt,
image=shifted,
mask_image=mask,
num_inference_steps=steps,
guidance_scale=scale,
width=width,
height=height,
callback_steps=1,
callback=lambda step, timestep, kwargs: progress_cb("inpaint", step + 1, steps),
).images[0]
inpainted = unshift_image(inpainted, shift_amt)
inpainted.save(seamless_path)
print(f"→ Crafted seamless image: {seamless_path}")
final_source = inpainted
else:
image.save(seamless_path)
print(f"→ Using raw output (seam inpaint disabled): {seamless_path}")
final_source = image
final_path = seamless_path
if upscale and upscale != "none":
try:
if upscale is True or upscale == "topaz":
final_path = run_topaz(seamless_path, tempdir)
elif upscale == "realesrgan":
final_path = run_realesrgan(
final_source,
tempdir,
scale=REALESRGAN_SCALE,
model_path=REALESRGAN_MODEL,
progress_cb=progress_cb
)
else:
raise ValueError(f"Unknown upscale option '{upscale}'")
except Exception as e: # noqa: BLE001
print(f"Upscaling failed ({upscale}); keeping seamless image: {e}")
shutil.move(final_path, output_path)
try:
with Image.open(output_path) as _im:
print(f"→ Final image written to {output_path} [{_im.size[0]}x{_im.size[1]}]")
except Exception:
print(f"→ Final image written to {output_path}")
return output_path
def main():
parser = argparse.ArgumentParser(
description="Generate an equirectangular HDRI image"
)
parser.add_argument('--prompt', required=True, help='Text prompt for generation')
parser.add_argument('--output', help='Output filename (PNG)')
parser.add_argument('--output-dir', default='output', help='Directory for outputs (default: output)')
parser.add_argument('--work-dir', default=os.path.dirname(os.path.abspath(__file__)), help='Working directory for temp files')
parser.add_argument(
'--upscale',
choices=['none', 'topaz', 'realesrgan'],
default='realesrgan',
help='Optional upscaler: none, topaz (legacy), realesrgan (open source; default)'
)
parser.add_argument(
'--model-path',
default=MODEL_PATH,
help='Diffusers model id or local path (default: ProGamerGov/sdxl-360-diffusion)'
)
parser.add_argument(
'--base-model',
default=BASE_SDXL_MODEL,
help='SDXL base pipeline used when the model only provides a UNet (default: stabilityai/stable-diffusion-xl-base-1.0)'
)
parser.add_argument(
'--vae-model',
default=SDXL_VAE,
help='VAE repo/path to load for SDXL models (default: stabilityai/sdxl-vae; try madebyollin/sdxl-vae-fp16-fix on Mac)'
)
parser.add_argument('--steps', type=int, default=25, help='Number of inference steps (default: 25)')
parser.add_argument('--guidance', type=float, default=4.5, help='CFG guidance scale (default: 4.5)')
parser.add_argument('--width', type=int, default=1024, help='Output width (default: 1024)')
parser.add_argument('--height', type=int, default=512, help='Output height (default: 512)')
parser.add_argument(
'--seam-inpaint',
action='store_true',
help='Patch the horizontal wrap seam by shifting, inpainting the center seam, then shifting back'
)
parser.add_argument(
'--scheduler',
choices=['euler', 'euler_a', 'heun', 'ddim', 'dpmsolver', 'dpmsolver-sde'],
help='Sampler/scheduler override; default uses the pipeline scheduler (Euler for SDXL base)'
)
args = parser.parse_args()
base = sanitize_name(args.prompt)
target = args.output or next_filename(args.output_dir, base, args.width, args.height)
output_abs = os.path.abspath(target)
try:
result_path = generate(
args.prompt,
output_abs,
args.work_dir,
upscale=args.upscale,
model_path=args.model_path,
base_model=args.base_model,
vae_model=args.vae_model,
steps=args.steps,
guidance=args.guidance,
scheduler=args.scheduler,
width=args.width,
height=args.height,
seam_inpaint=args.seam_inpaint,
)
print(result_path)
except Exception as e: # noqa: BLE001
print(f"Generation failed: {e}")
raise
if __name__ == '__main__':
main()

BIN
icon.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 146 KiB

13
index.html Normal file
View File

@@ -0,0 +1,13 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Skymap Generator</title>
<link rel="stylesheet" href="/src/style.css" />
</head>
<body>
<div id="app"></div>
<script type="module" src="/src/main.js"></script>
</body>
</html>

1183
package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

20
package.json Normal file
View File

@@ -0,0 +1,20 @@
{
"name": "skymap-gen",
"version": "0.1.0",
"private": true,
"type": "module",
"scripts": {
"dev": "vite",
"build": "vite build",
"preview": "vite preview",
"tauri": "tauri"
},
"dependencies": {
"@tauri-apps/api": "^1.5.0",
"three": "^0.165.0"
},
"devDependencies": {
"@tauri-apps/cli": "^1.5.9",
"vite": "^5.1.0"
}
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 52 KiB

BIN
public/default.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 10 MiB

16
requirements.txt Normal file
View File

@@ -0,0 +1,16 @@
torch==2.9.1
torchvision==0.24.1
diffusers==0.27.2
transformers==4.41.2
huggingface_hub==0.23.4
realesrgan==0.3.0
basicsr==1.4.2
opencv-python==4.12.0.88
Pillow==12.0.0
numpy==2.2.6
gfpgan==1.3.8
facexlib==0.3.0
scipy==1.15.3
scikit-image==0.25.2
tqdm==4.67.1
pyyaml==6.0.3

35
run.sh Executable file
View File

@@ -0,0 +1,35 @@
#!/usr/bin/env bash
set -euo pipefail
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
cd "$ROOT_DIR"
# Choose a Python; prefer 3.10 to match the tested venv.
PYTHON_BIN="${PYTHON_BIN:-}"
if [[ -z "${PYTHON_BIN}" ]]; then
if command -v python3.10 >/dev/null 2>&1; then
PYTHON_BIN="python3.10"
else
PYTHON_BIN="python3"
fi
fi
VENV_DIR="$ROOT_DIR/.venv"
if [[ ! -d "$VENV_DIR" ]]; then
echo "Creating venv at $VENV_DIR using $PYTHON_BIN"
"$PYTHON_BIN" -m venv "$VENV_DIR"
fi
source "$VENV_DIR/bin/activate"
echo "Installing Python requirements..."
pip install --upgrade pip >/dev/null
pip install -r "$ROOT_DIR/requirements.txt"
if [[ ! -d "$ROOT_DIR/node_modules" ]]; then
echo "Installing npm dependencies..."
npm install
fi
echo "Starting Tauri dev (npm run tauri dev)..."
npm run tauri dev

3989
src-tauri/Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

13
src-tauri/Cargo.toml Normal file
View File

@@ -0,0 +1,13 @@
[package]
name = "skymap-gen"
version = "0.1.0"
authors = ["Skymap"]
edition = "2021"
[dependencies]
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
tauri = { version = "1.5", features = [ "fs-read-file", "fs-read-dir"] }
[build-dependencies]
tauri-build = { version = "1.5", features = [] }

3
src-tauri/build.rs Normal file
View File

@@ -0,0 +1,3 @@
fn main() {
tauri_build::build()
}

BIN
src-tauri/icons/icon.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 149 KiB

386
src-tauri/src/main.rs Normal file
View File

@@ -0,0 +1,386 @@
#![cfg_attr(not(debug_assertions), windows_subsystem = "windows")]
use serde::{Deserialize, Serialize};
use serde_json::Value;
use std::fs;
use std::io::{BufRead, BufReader, Read};
use std::path::{Path, PathBuf};
use std::process::{Command, Stdio};
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::{Arc, Mutex};
use tauri::{async_runtime, Env, State, Window};
#[derive(Default, Clone)]
struct Paths {
project_root: PathBuf,
active_generation_pid: Arc<Mutex<Option<u32>>>,
cancel_generation_requested: Arc<AtomicBool>,
}
#[derive(Serialize)]
struct MapInfo {
path: String,
filename: String,
modified: i64,
}
#[derive(Serialize)]
struct GenerateResult {
output_path: String,
}
#[derive(Debug, Deserialize, Default)]
#[serde(rename_all = "camelCase")]
struct GenSettings {
steps: Option<u32>,
width: Option<u32>,
height: Option<u32>,
guidance: Option<f32>,
scheduler: Option<String>,
upscale: Option<String>,
seam_inpaint: Option<bool>,
model_path: Option<String>,
base_model: Option<String>,
vae_model: Option<String>,
}
fn discover_root() -> Result<PathBuf, String> {
let cwd = std::env::current_dir().map_err(|e| format!("Failed to resolve current dir: {e}"))?;
for ancestor in cwd.ancestors() {
if ancestor.join("generate_equirect.py").exists() {
return Ok(ancestor.to_path_buf());
}
}
Err("Could not find project root (generate_equirect.py not found in ancestors)".into())
}
fn output_dir(root: &Path) -> Result<PathBuf, String> {
let mut dir = root.to_path_buf();
dir.push("output");
fs::create_dir_all(&dir).map_err(|e| format!("Failed to create output dir: {e}"))?;
Ok(dir)
}
fn script_path(root: &Path) -> Result<PathBuf, String> {
let candidate = root.join("generate_equirect.py");
if candidate.exists() {
return Ok(candidate);
}
Err("generate_equirect.py not found".to_string())
}
/// Pick the Python binary to run: prefer project-local venv to ensure deps are present.
fn python_binary(root: &Path) -> PathBuf {
let venv_py = root.join(".venv").join("bin").join("python");
if venv_py.exists() {
return venv_py;
}
PathBuf::from("python3")
}
#[tauri::command]
fn list_maps(state: State<Paths>) -> Result<Vec<MapInfo>, String> {
let dir = output_dir(&state.project_root)?;
let mut maps = Vec::new();
if !dir.exists() {
return Ok(maps);
}
for entry in fs::read_dir(&dir).map_err(|e| format!("Failed to read output dir: {e}"))? {
let entry = entry.map_err(|e| format!("Failed to read entry: {e}"))?;
let path = entry.path();
if path
.extension()
.and_then(|s| s.to_str())
.map(|s| s.eq_ignore_ascii_case("png"))
!= Some(true)
{
continue;
}
let metadata = entry
.metadata()
.map_err(|e| format!("Metadata error: {e}"))?;
let modified = metadata
.modified()
.map(|m| {
m.duration_since(std::time::UNIX_EPOCH)
.map(|d| d.as_secs() as i64)
.unwrap_or(0)
})
.unwrap_or(0);
let filename = path
.file_name()
.and_then(|s| s.to_str())
.unwrap_or_default()
.to_string();
maps.push(MapInfo {
path: path.to_string_lossy().to_string(),
filename,
modified,
});
}
maps.sort_by(|a, b| b.modified.cmp(&a.modified));
Ok(maps)
}
#[tauri::command]
fn delete_map(path: String, state: State<Paths>) -> Result<(), String> {
let dir = output_dir(&state.project_root)?
.canonicalize()
.map_err(|e| format!("Failed to resolve output dir: {e}"))?;
let candidate = PathBuf::from(path);
let resolved = candidate
.canonicalize()
.map_err(|e| format!("Failed to resolve map path: {e}"))?;
if !resolved.starts_with(&dir) {
return Err("Refusing to delete a file outside the output directory".to_string());
}
if resolved
.extension()
.and_then(|s| s.to_str())
.map(|s| s.eq_ignore_ascii_case("png"))
!= Some(true)
{
return Err("Only PNG maps can be deleted".to_string());
}
fs::remove_file(&resolved).map_err(|e| format!("Failed to delete map: {e}"))
}
fn terminate_process(pid: u32) -> Result<(), String> {
#[cfg(target_family = "unix")]
{
Command::new("kill")
.arg("-TERM")
.arg(pid.to_string())
.status()
.map_err(|e| format!("Failed to request cancellation: {e}"))?;
Ok(())
}
#[cfg(target_family = "windows")]
{
Command::new("taskkill")
.args(["/PID", &pid.to_string(), "/T", "/F"])
.status()
.map_err(|e| format!("Failed to request cancellation: {e}"))?;
Ok(())
}
}
#[tauri::command]
fn cancel_generation(state: State<Paths>) -> Result<(), String> {
state
.cancel_generation_requested
.store(true, Ordering::SeqCst);
let pid = *state
.active_generation_pid
.lock()
.map_err(|_| "Failed to read active generation state".to_string())?;
match pid {
Some(0) => Ok(()),
Some(pid) => terminate_process(pid),
None => Err("No generation is running".to_string()),
}
}
#[tauri::command]
async fn generate_map(
window: Window,
prompt: String,
settings: Option<GenSettings>,
state: State<'_, Paths>,
) -> Result<GenerateResult, String> {
if prompt.trim().is_empty() {
return Err("Prompt must not be empty".into());
}
let prompt_clone = prompt.clone();
let root = state.project_root.clone();
let cfg = settings.unwrap_or_default();
let steps = cfg.steps.unwrap_or(25);
let width = cfg.width.unwrap_or(1536);
let height = cfg.height.unwrap_or(768);
let guidance = cfg.guidance.unwrap_or(6.5);
let scheduler = cfg.scheduler.unwrap_or_else(|| "dpmsolver-sde".to_string());
let upscale = cfg.upscale.unwrap_or_else(|| "none".to_string());
let seam_inpaint = cfg.seam_inpaint.unwrap_or(false);
let model_path = cfg
.model_path
.unwrap_or_else(|| "proximasan/sdxl-360-diffusion".to_string());
let base_model = cfg
.base_model
.unwrap_or_else(|| "stabilityai/stable-diffusion-xl-base-1.0".to_string());
let vae_model = cfg
.vae_model
.unwrap_or_else(|| "madebyollin/sdxl-vae-fp16-fix".to_string());
let active_generation_pid = state.active_generation_pid.clone();
let cancel_generation_requested = state.cancel_generation_requested.clone();
let window = window.clone();
{
let mut active = active_generation_pid
.lock()
.map_err(|_| "Failed to update active generation state".to_string())?;
if active.is_some() {
return Err("Generation is already running".to_string());
}
*active = Some(0);
cancel_generation_requested.store(false, Ordering::SeqCst);
}
async_runtime::spawn_blocking(move || {
let result = (|| {
let out_dir = output_dir(&root)?;
let script = script_path(&root)?;
let python = python_binary(&root);
let mut cmd = Command::new(python);
cmd.current_dir(&root)
.arg(script.as_os_str())
.arg("--prompt")
.arg(prompt_clone)
.arg("--output-dir")
.arg(&out_dir)
.arg("--work-dir")
.arg(root.to_string_lossy().to_string())
.arg("--upscale")
.arg(upscale)
.arg("--steps")
.arg(steps.to_string())
.arg("--guidance")
.arg(guidance.to_string())
.arg("--width")
.arg(width.to_string())
.arg("--height")
.arg(height.to_string())
.arg("--scheduler")
.arg(scheduler)
.arg("--model-path")
.arg(model_path)
.arg("--base-model")
.arg(base_model)
.arg("--vae-model")
.arg(vae_model)
.stdout(Stdio::piped())
.stderr(Stdio::piped());
if seam_inpaint {
cmd.arg("--seam-inpaint");
}
let mut child = cmd
.spawn()
.map_err(|e| format!("Failed to start generator: {e}"))?;
{
let mut active = active_generation_pid
.lock()
.map_err(|_| "Failed to update active generation state".to_string())?;
*active = Some(child.id());
}
if cancel_generation_requested.load(Ordering::SeqCst) {
let _ = child.kill();
}
let stdout = child
.stdout
.take()
.ok_or_else(|| "Failed to capture generator stdout".to_string())?;
let stderr = child
.stderr
.take()
.ok_or_else(|| "Failed to capture generator stderr".to_string())?;
// Consume stderr in a side thread to avoid blocking.
let mut stderr_reader = BufReader::new(stderr);
let stderr_handle = std::thread::spawn(move || {
let mut buf = String::new();
let _ = stderr_reader.read_to_string(&mut buf);
buf
});
let mut last_path: Option<String> = None;
let mut reader = BufReader::new(stdout);
let mut line = String::new();
while reader
.read_line(&mut line)
.map_err(|e| format!("Failed to read generator output: {e}"))?
> 0
{
let trimmed = line.trim_end().to_string();
if let Some(json_str) = trimmed.strip_prefix("PROGRESS ") {
if let Ok(val) = serde_json::from_str::<Value>(json_str) {
let _ = window.emit("gen-progress", val);
}
} else if !trimmed.is_empty() {
last_path = Some(trimmed.clone());
}
line.clear();
}
let status = child
.wait()
.map_err(|e| format!("Failed to wait for generator: {e}"))?;
let stderr_output = stderr_handle.join().unwrap_or_default();
if cancel_generation_requested.load(Ordering::SeqCst) {
return Err("Generation cancelled".to_string());
}
if !status.success() {
return Err(format!("Generator failed: {stderr_output}"));
}
let path_line =
last_path.ok_or_else(|| "Generator did not return a path".to_string())?;
let path = PathBuf::from(path_line.trim());
let resolved = if path.is_absolute() {
path
} else {
out_dir.join(path)
};
Ok(GenerateResult {
output_path: resolved.to_string_lossy().to_string(),
})
})();
if let Ok(mut active) = active_generation_pid.lock() {
*active = None;
}
cancel_generation_requested.store(false, Ordering::SeqCst);
result
})
.await
.map_err(|e| format!("Generation task failed to join: {e}"))?
}
fn main() {
let context = tauri::generate_context!();
let env = Env::default();
// Try to locate the project root by walking up from cwd; if missing, fall back to Tauri resource dir.
let root = discover_root()
.or_else(|_| {
tauri::api::path::resource_dir(context.package_info(), &env)
.ok_or_else(|| "Could not locate project root or resource dir".to_string())
})
.unwrap_or_else(|_| PathBuf::from("."));
let state = Paths {
project_root: root,
..Default::default()
};
tauri::Builder::default()
.manage(state)
.invoke_handler(tauri::generate_handler![
list_maps,
delete_map,
cancel_generation,
generate_map
])
.run(context)
.expect("error while running tauri application");
}

38
src-tauri/tauri.conf.json Normal file
View File

@@ -0,0 +1,38 @@
{
"package": {
"productName": "SkymapGen",
"version": "0.1.0"
},
"build": {
"beforeDevCommand": "npm run dev",
"beforeBuildCommand": "npm run build",
"devPath": "http://localhost:5173",
"distDir": "../dist"
},
"tauri": {
"allowlist": {
"fs": {
"readFile": true,
"readDir": true,
"scope": ["**"]
}
},
"bundle": {
"identifier": "com.skymap.gen",
"resources": ["../generate_equirect.py", "../default.png"]
},
"windows": [
{
"label": "main",
"title": "Skymap Generator",
"fullscreen": false,
"resizable": true,
"width": 1280,
"height": 720
}
],
"security": {
"csp": null
}
}
}

695
src/main.js Normal file
View File

@@ -0,0 +1,695 @@
import './style.css';
import * as THREE from 'three';
import { OrbitControls } from 'three/examples/jsm/controls/OrbitControls.js';
import { invoke } from '@tauri-apps/api/tauri';
import { readBinaryFile } from '@tauri-apps/api/fs';
import { listen } from '@tauri-apps/api/event';
const app = document.getElementById('app');
app.innerHTML = `
<div id="canvas-container"></div>
<div id="thumb-dock">
<div id="thumb-header">Maps</div>
<div id="thumb-list"></div>
</div>
<div id="settings-panel" class="hidden">
<div class="settings-header">
<div>
<div class="settings-title">Generation Settings</div>
<div class="settings-sub">Defaults tuned for SDXL 360</div>
</div>
<button id="settings-close" aria-label="Close settings">×</button>
</div>
<div class="settings-grid">
<label>Steps
<input type="number" id="steps-input" min="1" max="80" />
</label>
<label>Guidance
<input type="number" id="guidance-input" step="0.1" min="0" max="20" />
</label>
<label>Width
<input type="number" id="width-input" min="256" max="4096" step="64" />
</label>
<label>Height
<input type="number" id="height-input" min="256" max="4096" step="64" />
</label>
<label>Scheduler
<select id="scheduler-input">
<option value="dpmsolver-sde">DPM++ 2M SDE</option>
<option value="dpmsolver">DPM++ 2M</option>
<option value="euler">Euler</option>
<option value="euler_a">Euler Ancestral</option>
<option value="heun">Heun</option>
<option value="ddim">DDIM</option>
</select>
</label>
<label>Upscale
<select id="upscale-input">
<option value="none">None</option>
<option value="realesrgan">Real-ESRGAN</option>
<option value="topaz">Topaz</option>
</select>
</label>
<label class="settings-check">Seam Inpainting
<input type="checkbox" id="seam-inpaint-input" />
</label>
<label>Model Path
<input type="text" id="model-path-input" />
</label>
<label>Base Model
<input type="text" id="base-model-input" />
</label>
<label>VAE
<input type="text" id="vae-model-input" />
</label>
</div>
<div class="settings-footer">
<button id="settings-reset">Reset defaults</button>
</div>
</div>
<div id="progress-overlay">
<div id="progress-ring">
<div id="progress-fill"></div>
<div id="progress-text">...</div>
</div>
</div>
<div id="prompt-bar">
<input id="prompt-input" type="text" placeholder="Describe the environment..." />
<button id="settings-btn" title="Generation settings">Settings</button>
<button id="generate-btn">Generate</button>
</div>
<div id="status"></div>
<div id="delete-confirm" class="hidden" role="dialog" aria-modal="true" aria-labelledby="delete-confirm-title">
<div id="delete-confirm-card">
<div id="delete-confirm-title">Delete map?</div>
<div id="delete-confirm-body">This will remove <span id="delete-confirm-filename"></span> from disk.</div>
<div id="delete-confirm-actions">
<button id="delete-confirm-cancel" type="button">Cancel</button>
<button id="delete-confirm-delete" type="button">Delete</button>
</div>
</div>
</div>
`;
const canvasContainer = document.getElementById('canvas-container');
const promptInput = document.getElementById('prompt-input');
const generateBtn = document.getElementById('generate-btn');
const settingsBtn = document.getElementById('settings-btn');
const statusEl = document.getElementById('status');
const thumbList = document.getElementById('thumb-list');
const thumbDock = document.getElementById('thumb-dock');
const deleteConfirm = document.getElementById('delete-confirm');
const deleteConfirmFilename = document.getElementById('delete-confirm-filename');
const deleteConfirmCancel = document.getElementById('delete-confirm-cancel');
const deleteConfirmDelete = document.getElementById('delete-confirm-delete');
const progressOverlay = document.getElementById('progress-overlay');
const progressFill = document.getElementById('progress-fill');
const progressText = document.getElementById('progress-text');
const settingsPanel = document.getElementById('settings-panel');
const settingsClose = document.getElementById('settings-close');
const settingsReset = document.getElementById('settings-reset');
const stepsInput = document.getElementById('steps-input');
const guidanceInput = document.getElementById('guidance-input');
const widthInput = document.getElementById('width-input');
const heightInput = document.getElementById('height-input');
const schedulerInput = document.getElementById('scheduler-input');
const upscaleInput = document.getElementById('upscale-input');
const seamInpaintInput = document.getElementById('seam-inpaint-input');
const modelPathInput = document.getElementById('model-path-input');
const baseModelInput = document.getElementById('base-model-input');
const vaeModelInput = document.getElementById('vae-model-input');
// Three.js setup
let renderer, scene, camera, controls, skyMesh;
const defaultTextureUrl = '/default.png';
const skyFadeDurationMs = 1000;
let skyTransition = null;
let skyTextureRequestId = 0;
let autoSpin = true;
let lastInteraction = Date.now();
const minZoomDistance = 0.2;
const maxZoomDistance = 20;
let desiredDistance = 1;
const zoomLerpFactor = 0.1;
const zoomVec = new THREE.Vector3();
let progressState = {
upscale: null,
seamInpaint: null,
phases: {
gen: null,
inpaint: null,
upscale: null,
},
};
let progressUnlisten = null;
let currentMapPath = null;
let deleteConfirmResolve = null;
let generationRunning = false;
let cancelRequested = false;
const defaultSettings = {
steps: 25,
width: 1536,
height: 768,
guidance: 6.5,
scheduler: 'dpmsolver-sde',
upscale: 'none',
seamInpaint: false,
modelPath: 'proximasan/sdxl-360-diffusion',
baseModel: 'stabilityai/stable-diffusion-xl-base-1.0',
vaeModel: 'madebyollin/sdxl-vae-fp16-fix',
};
let currentSettings = { ...defaultSettings };
function setStatus(msg) {
statusEl.textContent = msg || '';
statusEl.style.opacity = msg ? '0.98' : '0';
if (msg) {
clearTimeout(setStatus._timer);
setStatus._timer = setTimeout(() => {
statusEl.style.opacity = '0';
}, 5000);
}
}
async function loadTexture(url) {
const loader = new THREE.TextureLoader();
return new Promise((resolve, reject) => {
loader.load(
url,
tex => {
tex.mapping = THREE.EquirectangularReflectionMapping;
tex.colorSpace = THREE.SRGBColorSpace;
resolve(tex);
},
undefined,
err => reject(err)
);
});
}
async function setSkyFromUrl(url) {
const requestId = ++skyTextureRequestId;
try {
const texture = await loadTexture(url);
if (requestId !== skyTextureRequestId) {
texture.dispose();
return;
}
transitionSkyToTexture(texture);
} catch (e) {
console.error('Failed to load texture', e);
if (requestId === skyTextureRequestId) {
setStatus('Failed to load map texture');
}
}
}
function replaceSkyTexture(texture) {
const previousTexture = skyMesh.material.map;
skyMesh.material.map = texture;
skyMesh.material.needsUpdate = true;
if (previousTexture && previousTexture !== texture) {
previousTexture.dispose();
}
}
function removeSkyTransition(transition) {
scene.remove(transition.mesh);
transition.material.dispose();
}
function finishSkyTransition(transition) {
if (!transition || skyTransition !== transition) return;
removeSkyTransition(transition);
skyTransition = null;
replaceSkyTexture(transition.texture);
}
function transitionSkyToTexture(texture) {
if (!skyMesh.material.map) {
replaceSkyTexture(texture);
return;
}
if (skyTransition) {
finishSkyTransition(skyTransition);
}
const material = new THREE.MeshBasicMaterial({
side: THREE.BackSide,
color: 0xffffff,
map: texture,
transparent: true,
opacity: 0,
depthWrite: false,
});
const mesh = new THREE.Mesh(skyMesh.geometry, material);
mesh.rotation.copy(skyMesh.rotation);
mesh.renderOrder = skyMesh.renderOrder + 1;
scene.add(mesh);
skyTransition = {
mesh,
material,
texture,
start: performance.now(),
duration: skyFadeDurationMs,
};
}
function updateSkyTransition(now) {
if (!skyTransition) return;
const progress = Math.min(1, (now - skyTransition.start) / skyTransition.duration);
skyTransition.material.opacity = THREE.MathUtils.smoothstep(progress, 0, 1);
skyTransition.mesh.rotation.copy(skyMesh.rotation);
if (progress >= 1) {
finishSkyTransition(skyTransition);
}
}
function initScene() {
scene = new THREE.Scene();
camera = new THREE.PerspectiveCamera(75, window.innerWidth / window.innerHeight, 0.1, 1000);
camera.position.set(0.1, 0, 0.1);
renderer = new THREE.WebGLRenderer({ antialias: true });
renderer.setPixelRatio(Math.min(window.devicePixelRatio, 2));
renderer.setSize(window.innerWidth, window.innerHeight);
renderer.outputColorSpace = THREE.SRGBColorSpace;
renderer.toneMapping = THREE.ACESFilmicToneMapping;
canvasContainer.appendChild(renderer.domElement);
desiredDistance = camera.position.length();
const geometry = new THREE.SphereGeometry(50, 64, 32);
const material = new THREE.MeshBasicMaterial({ side: THREE.BackSide, color: 0xffffff });
skyMesh = new THREE.Mesh(geometry, material);
scene.add(skyMesh);
controls = new OrbitControls(camera, renderer.domElement);
controls.enablePan = false;
controls.enableZoom = false; // we implement custom smooth zooming
controls.enableDamping = true;
controls.dampingFactor = 0.08;
controls.rotateSpeed = 0.3;
controls.autoRotate = false;
controls.autoRotateSpeed = 0.15;
controls.minDistance = 0.05;
controls.maxDistance = 20;
controls.addEventListener('start', () => {
autoSpin = false;
lastInteraction = Date.now();
});
controls.addEventListener('end', () => {
lastInteraction = Date.now();
});
window.addEventListener('resize', onResize);
renderer.domElement.addEventListener(
'wheel',
(e) => {
e.preventDefault();
const delta = e.deltaY;
// Scale desired distance exponentially for smooth feel
const factor = Math.exp(delta * 0.0015);
desiredDistance = THREE.MathUtils.clamp(desiredDistance * factor, minZoomDistance, maxZoomDistance);
lastInteraction = Date.now();
autoSpin = false;
},
{ passive: false }
);
animate();
}
function onResize() {
if (!renderer || !camera) return;
const w = window.innerWidth;
const h = window.innerHeight;
renderer.setSize(w, h);
camera.aspect = w / h;
camera.updateProjectionMatrix();
}
function animate() {
requestAnimationFrame(animate);
const now = performance.now();
const idle = Date.now() - lastInteraction > 2000;
if (idle) autoSpin = true;
if (autoSpin) {
skyMesh.rotation.y += 0.0008;
}
updateSkyTransition(now);
// Smooth zoom toward desired distance
const target = controls.target;
const currentDistance = camera.position.distanceTo(target);
const nextDistance = THREE.MathUtils.lerp(currentDistance, desiredDistance, zoomLerpFactor);
zoomVec.copy(camera.position).sub(target).normalize().multiplyScalar(nextDistance).add(target);
camera.position.copy(zoomVec);
controls.update();
renderer.render(scene, camera);
}
async function pathToObjectUrl(path) {
const data = await readBinaryFile(path);
const blob = new Blob([new Uint8Array(data)], { type: 'image/png' });
return URL.createObjectURL(blob);
}
function resetProgressState() {
progressState = {
upscale: null,
seamInpaint: null,
phases: {
gen: null,
inpaint: null,
upscale: null,
},
};
}
function computeProgress() {
const upscaleOn = progressState.upscale === true;
const seamOn = progressState.seamInpaint === true;
const weights = {
gen: seamOn || upscaleOn ? 0.5 : 1,
inpaint: seamOn ? 0.5 : 0,
upscale: upscaleOn ? 0.5 : 0,
};
if (seamOn && upscaleOn) {
weights.gen = 0.25;
weights.inpaint = 0.25;
}
const frac = phase => {
if (!phase || !phase.total) return 0;
const pct = phase.current / phase.total;
return Math.max(0, Math.min(1, pct));
};
return (
frac(progressState.phases.gen) * weights.gen +
frac(progressState.phases.inpaint) * weights.inpaint +
frac(progressState.phases.upscale) * weights.upscale
);
}
function updateProgressDisplay(pct) {
if (!progressOverlay) return;
const clamped = Math.max(0, Math.min(1, pct || 0));
const deg = clamped * 100;
progressFill.style.background = `conic-gradient(#ffffff ${deg}%, rgba(255,255,255,0.12) ${deg}%)`;
progressText.textContent = `${Math.round(clamped * 100)}%`;
}
function startProgress() {
resetProgressState();
if (progressOverlay) {
progressOverlay.style.display = 'flex';
}
updateProgressDisplay(0);
}
function stopProgress() {
updateProgressDisplay(1);
if (progressOverlay) {
progressOverlay.style.display = 'none';
}
}
async function refreshThumbnails(selectedPath) {
let maps;
try {
maps = await invoke('list_maps');
} catch (e) {
console.error(e);
setStatus('Failed to read map list');
thumbDock.style.display = 'none';
return;
}
thumbList.innerHTML = '';
if (!maps || maps.length === 0) {
thumbDock.style.display = 'none';
return;
}
thumbDock.style.display = 'block';
for (let idx = 0; idx < maps.length; idx++) {
const item = maps[idx];
const el = document.createElement('div');
el.className = 'thumb-item';
const img = document.createElement('img');
const deleteBtn = document.createElement('button');
deleteBtn.className = 'thumb-delete';
deleteBtn.type = 'button';
deleteBtn.textContent = '×';
deleteBtn.title = 'Delete map';
deleteBtn.setAttribute('aria-label', `Delete ${item.filename}`);
deleteBtn.addEventListener('click', async (event) => {
event.preventDefault();
event.stopPropagation();
await deleteMap(item, el);
});
let fileUrl;
try {
fileUrl = await pathToObjectUrl(item.path);
img.src = fileUrl;
} catch (err) {
console.error('Failed to load thumbnail', err);
img.alt = 'Failed to load';
}
el.appendChild(img);
el.appendChild(deleteBtn);
el.title = item.filename;
el.addEventListener('click', () => {
if (fileUrl) {
currentMapPath = item.path;
setSkyFromUrl(fileUrl);
setStatus(`Loaded ${item.filename}`);
}
});
thumbList.appendChild(el);
if (fileUrl) {
if (idx === 0 && !selectedPath) {
currentMapPath = item.path;
setSkyFromUrl(fileUrl);
setStatus(`Showing ${item.filename}`);
}
if (selectedPath && selectedPath === item.path) {
currentMapPath = item.path;
setSkyFromUrl(fileUrl);
setStatus(`Showing ${item.filename}`);
}
}
}
}
async function deleteMap(item, tileEl) {
if (!item?.path) return;
const confirmed = await requestDeleteConfirmation(item.filename);
if (!confirmed) return;
try {
await invoke('delete_map', { path: item.path });
const wasCurrent = currentMapPath === item.path;
tileEl?.remove();
setStatus(`Deleted ${item.filename}`);
if (thumbList.children.length === 0) {
thumbDock.style.display = 'none';
currentMapPath = null;
await setSkyFromUrl(defaultTextureUrl);
return;
}
if (wasCurrent) {
thumbList.querySelector('.thumb-item')?.click();
}
} catch (err) {
console.error(err);
setStatus('Failed to delete map');
}
}
function requestDeleteConfirmation(filename) {
if (deleteConfirmResolve) {
closeDeleteConfirmation(false);
}
deleteConfirmFilename.textContent = `"${filename}"`;
deleteConfirm.classList.remove('hidden');
deleteConfirmDelete.focus();
return new Promise(resolve => {
deleteConfirmResolve = resolve;
});
}
function closeDeleteConfirmation(result) {
deleteConfirm.classList.add('hidden');
if (deleteConfirmResolve) {
const resolve = deleteConfirmResolve;
deleteConfirmResolve = null;
resolve(result);
}
}
async function generateMap() {
if (generationRunning) {
await cancelGeneration();
return;
}
const prompt = promptInput.value.trim();
if (!prompt) {
setStatus('Please enter a prompt');
return;
}
const settings = {
steps: Number(stepsInput.value) || defaultSettings.steps,
guidance: Number(guidanceInput.value) || defaultSettings.guidance,
width: Number(widthInput.value) || defaultSettings.width,
height: Number(heightInput.value) || defaultSettings.height,
scheduler: schedulerInput.value || defaultSettings.scheduler,
upscale: upscaleInput.value || defaultSettings.upscale,
seamInpaint: seamInpaintInput.checked,
modelPath: modelPathInput.value.trim() || defaultSettings.modelPath,
baseModel: baseModelInput.value.trim() || defaultSettings.baseModel,
vaeModel: vaeModelInput.value.trim() || defaultSettings.vaeModel,
};
currentSettings = settings;
generationRunning = true;
cancelRequested = false;
generateBtn.disabled = false;
generateBtn.textContent = 'Cancel';
setStatus('Generating...');
startProgress();
try {
const result = await invoke('generate_map', { prompt, settings });
const outputPath = result.outputPath || result.output_path || result;
if (outputPath) {
const fileUrl = await pathToObjectUrl(outputPath);
await refreshThumbnails(outputPath);
currentMapPath = outputPath;
await setSkyFromUrl(fileUrl);
setStatus('New environment loaded');
} else {
setStatus('Generation finished, but no output path reported');
}
} catch (e) {
console.error(e);
setStatus(e === 'Generation cancelled' ? 'Generation cancelled' : (typeof e === 'string' ? e : 'Generation failed'));
} finally {
stopProgress();
generationRunning = false;
cancelRequested = false;
generateBtn.textContent = 'Generate';
generateBtn.disabled = false;
}
}
async function cancelGeneration() {
if (!generationRunning || cancelRequested) return;
cancelRequested = true;
generateBtn.disabled = true;
generateBtn.textContent = 'Cancelling...';
setStatus('Cancelling generation...');
try {
await invoke('cancel_generation');
} catch (e) {
console.error(e);
cancelRequested = false;
generateBtn.disabled = false;
generateBtn.textContent = 'Cancel';
setStatus(typeof e === 'string' ? e : 'Failed to cancel generation');
}
}
function setupUI() {
generateBtn.addEventListener('click', () => {
if (generationRunning) {
cancelGeneration();
} else {
generateMap();
}
});
promptInput.addEventListener('keydown', (e) => {
if (e.key === 'Enter' && !generationRunning) generateMap();
});
settingsBtn.addEventListener('click', () => {
settingsPanel.classList.remove('hidden');
});
settingsClose.addEventListener('click', () => {
settingsPanel.classList.add('hidden');
});
settingsReset.addEventListener('click', () => {
applySettings(defaultSettings);
});
deleteConfirmCancel.addEventListener('click', () => {
closeDeleteConfirmation(false);
});
deleteConfirmDelete.addEventListener('click', () => {
closeDeleteConfirmation(true);
});
deleteConfirm.addEventListener('click', (event) => {
if (event.target === deleteConfirm) {
closeDeleteConfirmation(false);
}
});
document.addEventListener('keydown', (event) => {
if (event.key === 'Escape' && !deleteConfirm.classList.contains('hidden')) {
closeDeleteConfirmation(false);
}
});
}
async function setupProgressEvents() {
try {
progressUnlisten = await listen('gen-progress', (event) => {
const data = event?.payload || {};
if (typeof data.upscale === 'boolean') {
progressState.upscale = data.upscale;
}
if (typeof data.seamInpaint === 'boolean') {
progressState.seamInpaint = data.seamInpaint;
}
if (data.phase && typeof data.current === 'number' && typeof data.total === 'number') {
progressState.phases[data.phase] = {
current: data.current,
total: data.total,
};
}
updateProgressDisplay(computeProgress());
});
} catch (err) {
console.error('Failed to bind progress listener', err);
}
}
async function bootstrap() {
initScene();
setupUI();
applySettings(defaultSettings);
await setupProgressEvents();
await setSkyFromUrl(defaultTextureUrl);
await refreshThumbnails();
setStatus('Ready');
}
function applySettings(cfg) {
currentSettings = { ...cfg };
stepsInput.value = cfg.steps;
guidanceInput.value = cfg.guidance;
widthInput.value = cfg.width;
heightInput.value = cfg.height;
schedulerInput.value = cfg.scheduler;
upscaleInput.value = cfg.upscale;
seamInpaintInput.checked = Boolean(cfg.seamInpaint);
modelPathInput.value = cfg.modelPath;
baseModelInput.value = cfg.baseModel;
vaeModelInput.value = cfg.vaeModel;
}
bootstrap();

493
src/style.css Normal file
View File

@@ -0,0 +1,493 @@
:root {
--overlay-bg: rgba(255, 255, 255, 0.14);
--text-color: #f7f7f7;
--dock-bg: rgba(18, 18, 24, 0.78);
--dock-hover: rgba(34, 34, 46, 0.92);
--input-bg: rgba(245, 245, 245, 0.82);
--input-text: #1f2933;
--border: rgba(255, 255, 255, 0.22);
--glass-bg: rgba(255,255,255,0.12);
--glass-bg-strong: rgba(255,255,255,0.18);
--glass-border: rgba(255,255,255,0.3);
--glass-shadow: 0 20px 50px rgba(0,0,0,0.35);
}
* { box-sizing: border-box; }
body, html, #app {
margin: 0;
padding: 0;
width: 100%;
height: 100%;
overflow: hidden;
background: #0b0c10;
color: var(--text-color);
font-family: "Inter", "SF Pro Display", "Segoe UI", system-ui, -apple-system, sans-serif;
}
button,
input,
select,
#status,
#thumb-dock,
#settings-panel,
#delete-confirm-card,
#progress-text {
text-shadow: 0 1px 3px rgba(0,0,0,0.72), 0 0 12px rgba(0,0,0,0.34);
}
input::placeholder {
text-shadow: 0 1px 3px rgba(0,0,0,0.5);
}
#canvas-container {
width: 100%;
height: 100%;
position: fixed;
inset: 0;
overflow: hidden;
}
#prompt-bar {
position: fixed;
bottom: 30px;
left: 50%;
transform: translateX(-50%);
display: flex;
gap: 10px;
align-items: center;
padding: 12px 14px;
background: rgba(255,255,255,0.12);
border: 1px solid var(--border);
border-radius: 12px;
backdrop-filter: blur(10px);
box-shadow: 0 20px 50px rgba(0,0,0,0.35);
}
#prompt-input {
width: min(48vw, 720px);
min-width: 220px;
border: 1px solid rgba(255,255,255,0.3);
padding: 12px 14px;
border-radius: 12px;
background: rgba(255,255,255,0.16);
color: #fdfefe;
font-size: 16px;
outline: none;
box-shadow: inset 0 1px 0 rgba(255,255,255,0.35), 0 4px 14px rgba(0,0,0,0.18);
backdrop-filter: blur(6px);
}
#prompt-input:focus {
border-color: rgba(255,255,255,0.5);
box-shadow: inset 0 1px 0 rgba(255,255,255,0.45), 0 6px 18px rgba(0,0,0,0.18);
}
#generate-btn {
border: none;
padding: 12px 18px;
border-radius: 10px;
background: rgba(255,255,255,0.18);
color: #f5f6fb;
font-weight: 700;
font-size: 15px;
cursor: pointer;
transition: transform 0.12s ease, box-shadow 0.12s ease, opacity 0.2s ease;
box-shadow: 0 8px 18px rgba(0,0,0,0.14), inset 0 1px 0 rgba(255,255,255,0.35);
backdrop-filter: blur(6px);
}
#generate-btn:disabled {
opacity: 0.5;
cursor: not-allowed;
box-shadow: none;
}
#generate-btn:not(:disabled):hover { transform: translateY(-1px); }
#generate-btn:not(:disabled):active { transform: translateY(0); }
#settings-btn {
border: 1px solid rgba(255,255,255,0.32);
padding: 12px 14px;
border-radius: 10px;
background: rgba(255,255,255,0.08);
color: #f5f6fb;
font-weight: 600;
font-size: 14px;
cursor: pointer;
transition: transform 0.12s ease, box-shadow 0.12s ease, opacity 0.2s ease;
box-shadow: 0 6px 14px rgba(0,0,0,0.16), inset 0 1px 0 rgba(255,255,255,0.2);
backdrop-filter: blur(6px);
}
#settings-btn:hover { transform: translateY(-1px); }
#settings-btn:active { transform: translateY(0); }
#settings-panel {
position: fixed;
top: 50%;
left: 50%;
transform: translate(-50%, -50%);
min-width: 420px;
max-width: 680px;
background: var(--glass-bg);
border: 1px solid var(--border);
border-radius: 12px;
box-shadow: var(--glass-shadow);
padding: 18px 18px 14px;
backdrop-filter: blur(18px);
z-index: 6;
}
#settings-panel.hidden { display: none; }
.settings-header {
display: flex;
align-items: center;
justify-content: space-between;
gap: 12px;
margin-bottom: 12px;
}
.settings-title {
font-weight: 700;
font-size: 16px;
letter-spacing: 0.1px;
}
.settings-sub {
font-size: 12px;
color: rgba(255,255,255,0.7);
}
#settings-close {
border: 1px solid var(--glass-border);
background: rgba(255,255,255,0.1);
color: #fff;
width: 32px;
height: 32px;
border-radius: 10px;
cursor: pointer;
font-size: 18px;
line-height: 1;
box-shadow: 0 6px 14px rgba(0,0,0,0.16), inset 0 1px 0 rgba(255,255,255,0.22);
backdrop-filter: blur(6px);
}
.settings-grid {
display: grid;
grid-template-columns: repeat(auto-fit, minmax(160px, 1fr));
gap: 10px 12px;
}
.settings-grid label {
display: flex;
flex-direction: column;
gap: 6px;
font-size: 13px;
color: rgba(255,255,255,0.9);
}
.settings-grid input,
.settings-grid select {
width: 100%;
border-radius: 10px;
border: 1px solid var(--glass-border);
background: rgba(255,255,255,0.14);
color: #f9fafc;
padding: 10px 12px;
font-size: 14px;
outline: none;
box-shadow: inset 0 1px 0 rgba(255,255,255,0.22);
backdrop-filter: blur(6px);
}
.settings-grid .settings-check {
flex-direction: row;
align-items: center;
justify-content: space-between;
min-height: 43px;
padding: 10px 12px;
border-radius: 10px;
border: 1px solid var(--glass-border);
background: rgba(255,255,255,0.1);
box-shadow: inset 0 1px 0 rgba(255,255,255,0.18);
backdrop-filter: blur(6px);
}
.settings-grid .settings-check input {
width: 18px;
height: 18px;
padding: 0;
margin: 0;
accent-color: rgba(255,255,255,0.9);
cursor: pointer;
}
.settings-grid input:focus,
.settings-grid select:focus {
border-color: rgba(255,255,255,0.52);
box-shadow: inset 0 1px 0 rgba(255,255,255,0.32), 0 0 0 3px rgba(255,255,255,0.12);
}
.settings-footer {
margin-top: 12px;
display: flex;
justify-content: flex-end;
}
#settings-reset {
border: 1px solid var(--glass-border);
padding: 10px 14px;
border-radius: 10px;
background: rgba(255,255,255,0.1);
color: #fff;
cursor: pointer;
box-shadow: 0 6px 14px rgba(0,0,0,0.16), inset 0 1px 0 rgba(255,255,255,0.2);
backdrop-filter: blur(6px);
}
#status {
position: fixed;
top: 18px;
right: 18px;
padding: 10px 14px;
background: rgba(0,0,0,0.5);
border-radius: 10px;
border: 1px solid var(--border);
font-size: 13px;
min-width: 200px;
text-align: center;
opacity: 0;
transition: opacity 0.25s ease;
}
#delete-confirm {
position: fixed;
inset: 0;
z-index: 20;
display: flex;
align-items: center;
justify-content: center;
background: rgba(255,255,255,0.04);
backdrop-filter: blur(10px);
pointer-events: auto;
}
#delete-confirm.hidden { display: none; }
#delete-confirm-card {
width: min(360px, calc(100vw - 36px));
border-radius: 12px;
border: 1px solid var(--border);
background: var(--glass-bg);
box-shadow: var(--glass-shadow);
padding: 18px;
color: #fff;
backdrop-filter: blur(18px);
}
#delete-confirm-title {
font-size: 17px;
font-weight: 700;
margin-bottom: 8px;
}
#delete-confirm-body {
color: rgba(255,255,255,0.76);
font-size: 13px;
line-height: 1.4;
overflow-wrap: anywhere;
}
#delete-confirm-filename {
color: #fff;
}
#delete-confirm-actions {
display: flex;
justify-content: flex-end;
gap: 10px;
margin-top: 18px;
}
#delete-confirm-actions button {
border-radius: 10px;
border: 1px solid var(--glass-border);
padding: 9px 13px;
color: #fff;
cursor: pointer;
font-weight: 600;
box-shadow: 0 6px 14px rgba(0,0,0,0.16), inset 0 1px 0 rgba(255,255,255,0.2);
backdrop-filter: blur(6px);
}
#delete-confirm-cancel {
background: rgba(255,255,255,0.08);
}
#delete-confirm-delete {
background: var(--glass-bg-strong);
}
#delete-confirm-actions button:focus-visible {
outline: 2px solid rgba(255,255,255,0.86);
outline-offset: 2px;
}
/* Progress overlay */
#progress-overlay {
position: fixed;
inset: 0;
display: none;
justify-content: center;
align-items: center;
pointer-events: none;
z-index: 5;
}
#progress-ring {
width: 90vmin;
height: 90vmin;
border-radius: 50%;
position: relative;
display: flex;
align-items: center;
justify-content: center;
background: transparent;
}
#progress-fill {
position: absolute;
inset: 0;
border-radius: 50%;
background: conic-gradient(#ffffff 0%, rgba(255,255,255,0.08) 0%);
/* Ring thickness is outer - inner. Make it very thin (about 1% of radius). */
mask: radial-gradient(farthest-side, transparent 99%, black 50%);
}
#progress-text {
position: relative;
color: #f5f6fb;
font-size: 24px;
letter-spacing: 0.5px;
text-shadow: 0 1px 8px rgba(0,0,0,0.5);
}
#thumb-dock {
display: block;
position: fixed;
top: 50%;
transform: translate(0, -50%);
left: 0;
width: 120px;
max-height: 80vh;
background: rgba(255,255,255,0.1);
border-right: 1px solid rgba(255,255,255,0.22);
border-radius: 0 12px 12px 0;
box-shadow: 0 12px 40px rgba(0,0,0,0.45);
overflow: hidden;
pointer-events: auto;
backdrop-filter: blur(10px);
}
#thumb-dock:hover { background: rgba(255,255,255,0.14); }
#thumb-header {
padding: 10px 12px;
font-size: 12px;
letter-spacing: 0.2px;
text-transform: uppercase;
color: #d5d6e0;
border-bottom: 1px solid var(--border);
}
#thumb-list {
overflow-y: auto;
max-height: calc(80vh - 44px);
padding: 8px 10px;
display: grid;
gap: 10px;
}
/* Custom scrollbar for dock */
#thumb-list::-webkit-scrollbar {
width: 10px;
}
#thumb-list::-webkit-scrollbar-track {
background: rgba(255,255,255,0.08);
border-radius: 999px;
}
#thumb-list::-webkit-scrollbar-thumb {
background: rgba(255,255,255,0.22);
border-radius: 999px;
border: 2px solid rgba(255,255,255,0.08);
}
#thumb-list::-webkit-scrollbar-thumb:hover {
background: rgba(255,255,255,0.3);
}
.thumb-item {
position: relative;
width: 100%;
max-width: 150px;
height: 150px;
border-radius: 10px;
overflow: visible;
border: 1px solid rgba(255,255,255,0.18);
box-shadow: 0 6px 20px rgba(0,0,0,0.2);
cursor: pointer;
transition: transform 0.12s ease, box-shadow 0.15s ease, border-color 0.12s ease;
background: #15171f;
}
.thumb-delete {
position: absolute;
top: -6px;
right: -6px;
width: 24px;
height: 24px;
border: 1px solid var(--glass-border);
border-radius: 50%;
background: rgba(255,255,255,0.16);
color: #fff;
display: grid;
place-items: center;
font-size: 18px;
line-height: 1;
cursor: pointer;
box-shadow: 0 6px 14px rgba(0,0,0,0.16), inset 0 1px 0 rgba(255,255,255,0.25);
backdrop-filter: blur(6px);
}
.thumb-delete:focus-visible {
outline: 1px solid rgba(255,255,255,0.9);
outline-offset: -2px;
}
.thumb-item img {
width: 100%;
height: 100%;
object-fit: cover;
display: block;
border-radius: 10px;
}
.thumb-item:hover {
transform: translateY(-1px) scale(1.01);
box-shadow: 0 10px 26px rgba(0,0,0,0.26);
border-color: rgba(255,255,255,0.34);
}
@media (max-width: 800px) {
#thumb-dock { display: none; }
#prompt-bar {
flex-direction: column;
bottom: 18px;
align-items: stretch;
}
#prompt-input { width: 78vw; }
#generate-btn { width: 100%; }
#settings-btn { width: 100%; }
#status { bottom: 120px; }
}

14
vite.config.js Normal file
View File

@@ -0,0 +1,14 @@
import { defineConfig } from 'vite';
export default defineConfig({
clearScreen: false,
server: {
port: 5173,
strictPort: true,
},
envPrefix: ['VITE_', 'TAURI_'],
build: {
target: ['es2021', 'chrome100', 'safari13'],
outDir: 'dist'
}
});