@inproceedings{5a300d1ff14645928d8203dd8a094496,
title = "StyLitGAN: Image-Based Relighting via Latent Control",
abstract = "We describe a novel method, StyLitGAN, for relighting and resurfacing images in the absence of labeled data. StyL-itGAN generates images with realistic lighting effects, including cast shadows, soft shadows, inter-reflections, and glossy effects, without the need for paired or CGI data. StyLit-GAN uses an intrinsic image method to decompose an image, followed by a search of the latent space of a pretrained Style-GAN to identify a set of directions. By prompting the model to fix one component (e.g., albedo) and vary another (e.g., shading), we generate relighted images by adding the identi-fied directions to the latent style codes. Quantitative metrics of change in albedo and lighting diversity allow us to choose effective directions using a forward selection process. Qual-itative evaluation confirms the effectiveness of our method.",
keywords = "Generative Models, Illumination, Image Decomposition, Relighting, StyleGAN",
author = "Anand Bhattad and James Soole and Forsyth, {D. A.}",
note = "Publisher Copyright: {\textcopyright} 2024 IEEE.; 2024 IEEE/CVF Conference on Computer Vision and Pattern Recognition, CVPR 2024 ; Conference date: 16-06-2024 Through 22-06-2024",
year = "2024",
doi = "10.1109/CVPR52733.2024.00405",
language = "English (US)",
series = "Proceedings of the IEEE Computer Society Conference on Computer Vision and Pattern Recognition",
publisher = "IEEE Computer Society",
pages = "4231--4240",
booktitle = "Proceedings - 2024 IEEE/CVF Conference on Computer Vision and Pattern Recognition, CVPR 2024",
address = "United States",
}