% IMPORTANT: The following is UTF-8 encoded. This means that in the presence
% of non-ASCII characters, it will not work with BibTeX 0.99 or older.
% Instead, you should use an up-to-date BibTeX implementation like “bibtex8” or
% “biber”.
@INPROCEEDINGS{Rolff:599818,
author = {Rolff, Tim and Schmidt, Susanne and Li, Ke and Steinicke,
Frank and Frintrop, Simone},
title = {{VRS}-{N}e{RF}: {A}ccelerating {N}eural {R}adiance {F}ield
{R}endering with {V}ariable {R}ate {S}hading},
publisher = {IEEE},
reportid = {PUBDB-2023-07544},
pages = {243 - 252},
year = {2023},
comment = {2023 IEEE International Symposium on Mixed and Augmented
Reality (ISMAR) : [Proceedings] - IEEE, 2023. - ISBN
979-8-3503-2838-7 - doi:10.1109/ISMAR59233.2023.00039},
booktitle = {2023 IEEE International Symposium on
Mixed and Augmented Reality (ISMAR) :
[Proceedings] - IEEE, 2023. - ISBN
979-8-3503-2838-7 -
doi:10.1109/ISMAR59233.2023.00039},
abstract = {Recent advancements in Neural Radiance Fields (NeRF)
provide enormous potential for a wide range of Mixed Reality
(MR) applications. However, the applicability of NeRF to
real-time MR systems is still largely limited by the
rendering performance of NeRF. In this paper, we present a
novel approach for Variable Rate Shading for Neural Radiance
Fields (VRS-NeRF). In contrast to previous techniques, our
approach does not require training multiple neural networks
or re-training of already existing ones, but instead
utilizes the raytracing properties of NeRF. This is achieved
by merging rays depending on a variable shading rate, which
reduces the overall number of queries to the neural network.
We demonstrate the generalizability of our approach by
implementing three alternative functions for the
determination of the shading rate. The first method uses the
gaze of users to effectively implement a foveated rendering
technique in NeRF. For the other two techniques, we utilize
shading rates based on edges and saliency. Based on a
psychophysical experiment and multiple image-based metrics,
we suggest a set of parameters for each technique, yielding
an optimal tradeoff between rendering performance gain and
perceived visual quality.},
month = {Oct},
date = {2023-10-16},
organization = {2023 IEEE International Symposium on
Mixed and Augmented Reality, Sydney
(Australia), 16 Oct 2023 - 20 Oct 2023},
cin = {MCS},
cid = {I:(DE-H253)MCS-20120806},
pnm = {621 - Accelerator Research and Development (POF4-621) /
HIDSS-0002 - DASHH: Data Science in Hamburg - Helmholtz
Graduate School for the Structure of Matter
$(2019_IVF-HIDSS-0002)$},
pid = {G:(DE-HGF)POF4-621 / $G:(DE-HGF)2019_IVF-HIDSS-0002$},
experiment = {EXP:(DE-MLZ)NOSPEC-20140101},
typ = {PUB:(DE-HGF)8 / PUB:(DE-HGF)7},
doi = {10.1109/ISMAR59233.2023.00039},
url = {https://bib-pubdb1.desy.de/record/599818},
}