% IMPORTANT: The following is UTF-8 encoded.  This means that in the presence
% of non-ASCII characters, it will not work with BibTeX 0.99 or older.
% Instead, you should use an up-to-date BibTeX implementation like “bibtex8” or
% “biber”.

@INPROCEEDINGS{Li:632217,
      author       = {Li, Ke and Bacher, Reinhard and Schmidt, Susanne and
                      Leemans, Wim and Steinicke, Frank},
      title        = {{R}eality {F}usion: {R}obust {R}eal-time {I}mmersive
                      {M}obile {R}obot {T}eleoperation with {V}olumetric {V}isual
                      {D}ata {F}usion},
      address      = {[Piscataway, NJ]},
      publisher    = {IEEE},
      reportid     = {PUBDB-2025-02145},
      isbn         = {979-8-3503-7770-5},
      pages        = {8982 - 8989},
      year         = {2024},
      note         = {Literaturangaben;},
      comment      = {[Ebook] 2024 IEEE/RSJ International Conference on
                      Intelligent Robots and Systems / IROS'24, Abu Dhabi, 14-18
                      October 2024 ; partners: IEEE, RSJ [und drei weitere] ,
                      [Piscataway, NJ] : IEEE, 2024,},
      booktitle     = {[Ebook] 2024 IEEE/RSJ International
                       Conference on Intelligent Robots and
                       Systems / IROS'24, Abu Dhabi, 14-18
                       October 2024 ; partners: IEEE, RSJ [und
                       drei weitere] , [Piscataway, NJ] :
                       IEEE, 2024,},
      abstract     = {We introduce Reality Fusion, a novel robot teleoperation
                      system that localizes, streams, projects, and merges a
                      typical onboard depth sensor with a photorealistic, high
                      resolution, high framerate, and wide FoV rendering of the
                      complex remote environment represented as 3D Gaussian splats
                      (3DGS). Our framework enables robust egocentric and
                      exocentric robot teleoperation in immersive VR, with the
                      3DGS effectively extending spatial information of a depth
                      sensor with limited FoV and balancing the trade-off between
                      data streaming costs and data visual quality. We evaluated
                      our framework through a user study with 24 participants,
                      which revealed that Reality Fusion leads to significantly
                      better user performance, situation awareness, and user
                      preferences. To support further research and development, we
                      provide an open-source implementation with an
                      easy-to-replicate custom-made telepresence robot, a
                      high-performance virtual reality 3DGS renderer, and an
                      immersive robot control package.1},
      month         = {Oct},
      date          = {2024-10-14},
      organization  = {2024 IEEE/RSJ International Conference
                       on Intelligent Robots and Systems , Abu
                       Dhabi (United Arab Emirates), 14 Oct
                       2024 - 18 Oct 2024},
      cin          = {M},
      cid          = {I:(DE-H253)M-20120731},
      pnm          = {621 - Accelerator Research and Development (POF4-621)},
      pid          = {G:(DE-HGF)POF4-621},
      experiment   = {EXP:(DE-MLZ)NOSPEC-20140101},
      typ          = {PUB:(DE-HGF)8 / PUB:(DE-HGF)7},
      doi          = {10.1109/IROS58592.2024.10802431},
      url          = {https://bib-pubdb1.desy.de/record/632217},
}