Melissa Steininger, M. Sc.
Personalized Digital Health and Telemedicine
Affiliation:
Department for Epileptology
University Hospital Bonn
Medical Faculty
University of Bonn
Location:
Venusberg-Campus 1,
Building 74, Room 2G-016
53127 Bonn, Germany
Telephone: +49-228/287-52171
Email: melissa.steininger@ukbonn.de
Short CV:
Melissa Steininger earned her Bachelor’s degree in Cognitive Science (B.Sc.) in 2020 from the University of Osnabrück and her Master’s degree in Visual Computing and Games Technology (M.Sc.) in 2023 from the University of Applied Sciences Bonn-Rhein-Sieg. She is now doing her Ph.D. in Computer Science at the University Hospital Bonn/University of Bonn.
Publications
2025
Haaga, Lisa; Jansen, Anna; Steininger, Melissa; Müllers, Johannes; Bausch, Marcel; Jordan, Arthur; Surges, Rainer; Krüger, Björn
EpiEye – Einfluss anfallssupressiver Medikamente auf Augenbewegungen und autonome Veränderungen bei Epilepsien Conference
Dreiländertagung Epilepsie 2025, 2025.
@conference{haaga2025a,
title = {EpiEye – Einfluss anfallssupressiver Medikamente auf Augenbewegungen und autonome Veränderungen bei Epilepsien},
author = {Lisa Haaga and Anna Jansen and Melissa Steininger and Johannes Müllers and Marcel Bausch and Arthur Jordan and Rainer Surges and Björn Krüger},
year = {2025},
date = {2025-03-26},
booktitle = {Dreiländertagung Epilepsie 2025},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Steininger, Melissa; Jansen, Anna; Welle, Kristian; Krüger, Björn
Optimized Sensor Position Detection: Improving Visual Sensor Setups for Hand Tracking in VR Conference Forthcoming
2025 IEEE Conference Virtual Reality and 3D User Interfaces (VR), Forthcoming.
@conference{steininger2025b,
title = {Optimized Sensor Position Detection: Improving Visual Sensor Setups for Hand Tracking in VR},
author = {Melissa Steininger and Anna Jansen and Kristian Welle and Björn Krüger},
year = {2025},
date = {2025-03-12},
urldate = {2025-03-12},
booktitle = {2025 IEEE Conference Virtual Reality and 3D User Interfaces (VR)},
keywords = {},
pubstate = {forthcoming},
tppubtype = {conference}
}
Steininger, Melissa; Jansen, Anna; Mustafa, Sarah Al-Haj; Surges, Rainer; Helmstaedter, Christoph; von Wrede, Randi; Krüger, Björn
Eye-Tracking Reveals Search Behaviour in Epilepsy Patients Conference
3rd International Conference on Artificial Intelligence in Epilepsy and Neurological Disorders, 2025.
@conference{steininger2025a,
title = {Eye-Tracking Reveals Search Behaviour in Epilepsy Patients},
author = {Melissa Steininger and Anna Jansen and Sarah Al-Haj Mustafa and Rainer Surges and Christoph Helmstaedter and Randi von Wrede and Björn Krüger
},
year = {2025},
date = {2025-03-03},
urldate = {2025-03-03},
booktitle = {3rd International Conference on Artificial Intelligence in Epilepsy and Neurological Disorders},
journal = {3rd International Conference on Artificial Intelligence in Epilepsy and Neurological Disorders},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Jansen, Anna; Steininger, Melissa; Mustafa, Sarah Al-Haj; Müllers, Johannes; Surges, Rainer; Helmstaedter, Christoph; von Wrede, Randi; Krüger, Björn
Prediction Models on Eye Tracking Data in Epilepsy Conference
3rd International Conference on Artificial Intelligence in Epilepsy and Neurological Disorders, 2025.
@conference{jansen2025a,
title = {Prediction Models on Eye Tracking Data in Epilepsy},
author = {Anna Jansen and Melissa Steininger and Sarah Al-Haj Mustafa and Johannes Müllers and Rainer Surges and Christoph Helmstaedter and Randi von Wrede and Björn Krüger},
year = {2025},
date = {2025-03-03},
urldate = {2025-03-03},
booktitle = {3rd International Conference on Artificial Intelligence in Epilepsy and Neurological Disorders},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
2024
Otsubo, Hiromu; Lehnort, Marvin; Steininger, Melissa; Marquardt, Alexander; Dollack, Felix; Hirao, Yutaro; Perusquía-Hernández, Monica; Uchiyama, Hideaki; Kruijff, Ernst; Riecke, Bernhard; Kiyokawa, Kiyoshi
First-Person Perspective Induces Stronger Feelings of Awe and Presence Compared to Third-Person Perspective in Virtual Reality Proceedings Article
In: ICMI '24: Proceedings of the 26th International Conference on Multimodal Interaction, pp. 439 - 448, 2024.
@inproceedings{Otsubo2024,
title = {First-Person Perspective Induces Stronger Feelings of Awe and Presence Compared to Third-Person Perspective in Virtual Reality},
author = {Hiromu Otsubo and Marvin Lehnort and Melissa Steininger and Alexander Marquardt and Felix Dollack and Yutaro Hirao and Monica Perusquía-Hernández and Hideaki Uchiyama and Ernst Kruijff and Bernhard Riecke and Kiyoshi Kiyokawa},
doi = {https://doi.org/10.1145/3678957.3685753},
year = {2024},
date = {2024-11-04},
urldate = {2024-11-04},
booktitle = {ICMI '24: Proceedings of the 26th International Conference on Multimodal Interaction},
pages = {439 - 448},
abstract = {Awe is a complex emotion described as a perception of vastness and a need for accommodation to integrate new, overwhelming experiences. Virtual Reality (VR) has recently gained attention as a convenient means to facilitate experiences of awe. In VR, a first-person perspective might increase awe due to its immersive nature, while a third-person perspective might enhance the perception of vastness. However, the impact of VR perspectives on experiencing awe has not been thoroughly examined. We created two types of VR scenes: one with elements designed to induce high awe, such as a snowy mountain, and a low awe scene without such elements. We compared first-person and third-person perspectives in each scene. Forty-two participants explored the VR scenes, with their physiological responses captured by electrocardiogram (ECG) and face tracking (FT). Subsequently, participants self-reported their experience of awe (AWE-S) and presence (IPQ) within VR. The results revealed that the first-person perspective induced stronger feelings of awe and presence than the third-person perspective. The findings of this study provide useful guidelines for designing VR content that enhances emotional experiences.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Marquardt, Alexander; Lehnort, Marvin; Otsubo, Hiromu; Perusquia-Hernandez, Monica; Steininger, Melissa; Dollack, Felix; Uchiyama, Hideaki; Kiyokawa, Kiyoshi; Kruijff, Ernst
Exploring Gesture Interaction in Underwater Virtual Reality Proceedings Article
In: Proceedings of the 2024 ACM Symposium on Spatial User Interaction, pp. 1-2, 2024.
@inproceedings{marquardtLehnort2024,
title = {Exploring Gesture Interaction in Underwater Virtual Reality},
author = {Alexander Marquardt and Marvin Lehnort and Hiromu Otsubo and Monica Perusquia-Hernandez and Melissa Steininger and Felix Dollack and Hideaki Uchiyama and Kiyoshi Kiyokawa and Ernst Kruijff},
doi = {https://doi.org/10.1145/3677386.3688890},
year = {2024},
date = {2024-10-07},
urldate = {2024-10-07},
booktitle = {Proceedings of the 2024 ACM Symposium on Spatial User Interaction},
pages = {1-2},
abstract = {An underwater virtual reality (UVR) system with gesture-based controls was developed to facilitate navigation and interaction while submerged. The system uses a waterproof head-mounted display and camera-based gesture recognition, originally trained for abovewater conditions, employing three gestures: grab for navigation, pinch for single interactions, and point for continuous interactions. In an experimental study, we tested gesture recognition both above and underwater, and evaluated participant interaction within an immersive underwater scene. Results showed that underwater conditions slightly affected gesture accuracy, but the system maintained high performance. Participants reported a strong sense of presence and found the gestures intuitive while highlighting the need for further refinement to address usability challenges.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Steininger, Melissa; Perusquıa-Hernández, Monica; Marquardt, Alexander; Otsubo, Hiromu; Lehnort, Marvin; Dollack, Felix; Kiyokawa, Kiyoshi; Kruijff, Ernst; Riecke, Bernhard
Using Skin Conductance to Predict Awe and Perceived Vastness in Virtual Reality Conference
12th International Conference on Affective Computing and Intelligent Interaction, 2024.
@conference{steininger2024,
title = {Using Skin Conductance to Predict Awe and Perceived Vastness in Virtual Reality},
author = {Melissa Steininger and Monica Perusquıa-Hernández and Alexander Marquardt and Hiromu Otsubo and Marvin Lehnort and Felix Dollack and Kiyoshi Kiyokawa and Ernst Kruijff and Bernhard Riecke},
year = {2024},
date = {2024-09-17},
urldate = {2024-09-17},
booktitle = {12th International Conference on Affective Computing and Intelligent Interaction},
abstract = {Awe is an emotion characterized by the perception of vastness and the need to accommodate this vastness into one’s mental framework. We propose an elicitation scene to induce awe in Virtual Reality (VR), validate it through selfreport, and explore the feasibility of using skin conductance to predict self-reported awe and vastness as labeled from the stimuli in VR. Sixty-two participants took part in the study comparing the awe-eliciting space scene and a neutral scene. The space scene was confirmed as more awe-eliciting. A k-nearest neighbor algorithm confirmed high and low-awe score clusters used to label the data. A Random Forest algorithm achieved 65% accuracy (F1= 0.56, AU C= 0.73) when predicting the self-reported low and high awe categories from continuous skin conductance data. A similar approach achieved 55% accuracy (F1= 0.59, AU C= 0.56) when predicting the perception of vastness. These results underscore the potential of skinconductance-based algorithms to predict awe.},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Marquardt, Alexander; Steininger, Melissa; Trepkowski, Christina; Weier, Martin; Kruijff, Ernst
Selection Performance and Reliability of Eye and Head Gaze Tracking Under Varying Light Conditions Conference
2024 IEEE Conference Virtual Reality and 3D User Interfaces (VR), 2024.
@conference{marquardt2024,
title = {Selection Performance and Reliability of Eye and Head Gaze Tracking Under Varying Light Conditions},
author = {Alexander Marquardt and Melissa Steininger and Christina Trepkowski and Martin Weier and Ernst Kruijff},
doi = {10.1109/VR58804.2024.00075},
year = {2024},
date = {2024-04-15},
urldate = {2024-04-15},
booktitle = {2024 IEEE Conference Virtual Reality and 3D User Interfaces (VR)},
abstract = {Augmented Reality (AR) applications increasingly rely on eye and head gaze tracking for user interaction, with their efficacy influenced by environmental factors such as spatial arrangements and lighting conditions. This paper presents two studies that examine how these variables affect the performance of eye and head gaze tracking in AR environments. While eye tracking partially delivered faster results, its performance exhibited greater variability, especially under dynamic lighting conditions. Conversely, head gaze tracking, while providing more consistent results, showed a notable reduction in accuracy in environments with fluctuating light levels. Furthermore, the spatial properties of the environment had notable implications on both tracking methods. Our research demonstrates that both spatial properties and lighting conditions are key determinants in the choice of a tracking method, underscoring the need for AR systems that can dynamically adapt to these environmental variables.},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}