@INPROCEEDINGS {10494187, author = {D. Mandl and S. Mori and P. Mohr and Y. Peng and T. Langlotz and D. Schmalstieg and D. Kalkofen}, booktitle = {2024 IEEE Conference Virtual Reality and 3D User Interfaces (VR)}, title = {Neural Bokeh: Learning Lens Blur for Computational Videography and Out-of-Focus Mixed Reality}, year = {2024}, volume = {}, issn = {}, pages = {870-880}, abstract = {We present Neural Bokeh, a deep learning approach for synthesizing convincing out-of-focus effects with applications in Mixed Reality (MR) image and video compositing. Unlike existing approaches that solely learn the amount of blur for out-of-focus areas, our approach captures the overall characteristic of the bokeh to enable the seamless integration of rendered scene content into real images, ensuring a consistent lens blur over the resulting MR composition. Our method learns spatially varying blur shapes, i.e., bokeh, from a dataset of real images acquired using the physical camera that is used to capture the photograph or video of the MR composition. Accordingly, those learned blur shapes mimic the characteristics of the physical lens. As the run-time and the resulting quality of Neural Bokeh increase with the resolution of input images, we employ low-resolution images for the MR view finding at runtime and high-resolution renderings for compositing with high-resolution photographs or videos in an offline process. We envision a variety of applications, including visual enhancement of image and video compositing containing creative utilization of out-of-focus effects.}, keywords = {visualization;three-dimensional displays;runtime;image resolution;shape;mixed reality;virtual reality}, doi = {10.1109/VR58804.2024.00106}, url = {https://doi.ieeecomputersociety.org/10.1109/VR58804.2024.00106}, publisher = {IEEE Computer Society}, address = {Los Alamitos, CA, USA}, month = {mar} }