2025
Jansen, Anna; Morev, Nikita; Steininger, Melissa; Müllers, Johannes; Krüger, Björn
Synthetic Hand Dataset Generation: Multi-View Rendering and Annotation with Blender Conference Forthcoming
Proceedings IEEE International Symposium on Mixed and Augmented Reality (ISMAR), Forthcoming.
@conference{jansen2025c,
title = {Synthetic Hand Dataset Generation: Multi-View Rendering and Annotation with Blender},
author = {Anna Jansen and Nikita Morev and Melissa Steininger and Johannes Müllers and Björn Krüger},
year = {2025},
date = {2025-10-06},
booktitle = {Proceedings IEEE International Symposium on Mixed and Augmented Reality (ISMAR)},
abstract = {Pose estimation is a common method for precise handtracking, which is important for natural interaction in virtual reality (VR). However, training those models requires large-scale datasets with accurate 3D annotations. Those are difficult to obtain due to the time-consuming data collection and the limited variety in captured scenarios. We present a work-in-progress Blender-based pipeline for generating synthetic multi-view hand datasets. Our system simulates Ultraleap Stereo IR 170-style images and extracts joint positions directly from a rigged hand model, eliminating the need for manual labeling or external tracking processes. The current pipeline version supports randomized static poses with per-frame annotations of joint positions, camera parameters, and rendered images. While extended hand variation, animation features, and different sensor-type simulations are still in progress, our pipeline already provides a flexible foundation for customizable dataset generation and reproducible hand-tracking model training.},
keywords = {},
pubstate = {forthcoming},
tppubtype = {conference}
}
Pose estimation is a common method for precise handtracking, which is important for natural interaction in virtual reality (VR). However, training those models requires large-scale datasets with accurate 3D annotations. Those are difficult to obtain due to the time-consuming data collection and the limited variety in captured scenarios. We present a work-in-progress Blender-based pipeline for generating synthetic multi-view hand datasets. Our system simulates Ultraleap Stereo IR 170-style images and extracts joint positions directly from a rigged hand model, eliminating the need for manual labeling or external tracking processes. The current pipeline version supports randomized static poses with per-frame annotations of joint positions, camera parameters, and rendered images. While extended hand variation, animation features, and different sensor-type simulations are still in progress, our pipeline already provides a flexible foundation for customizable dataset generation and reproducible hand-tracking model training. Steininger, Melissa; Marquardt, Alexander; Perusquía-Hernández, Monica; Lehnort, Marvin; Otsubo, Hiromu; Dollack, Felix; Kruijff, Ernst; Krüger, Björn; Kiyokawa, Kiyoshi; Riecke, Bernhard E.
The Awe-some Spectrum: Self-Reported Awe Varies by Eliciting Scenery and Presence in Virtual Reality, and the User's Nationality Proceedings Article Forthcoming
In: IEEE International Symposium on Mixed and Augmented Reality (ISMAR), Forthcoming.
@inproceedings{steininger2025c,
title = {The Awe-some Spectrum: Self-Reported Awe Varies by Eliciting Scenery and Presence in Virtual Reality, and the User's Nationality},
author = {Melissa Steininger and Alexander Marquardt and Monica Perusquía-Hernández and Marvin Lehnort and Hiromu Otsubo and Felix Dollack and Ernst Kruijff and Björn Krüger and Kiyoshi Kiyokawa and Bernhard E. Riecke
},
year = {2025},
date = {2025-10-01},
booktitle = {IEEE International Symposium on Mixed and Augmented Reality (ISMAR)},
abstract = {Awe is a multifaceted emotion often associated with the perception of vastness, that challenges existing mental frameworks. Despite its growing relevance in affective computing and psychological research, awe remains difficult to elicit and measure.
This raises the research questions of how awe can be effectively elicited, which factors are associated with the experience of awe, and whether it can reliably be measured using biosensors.
For this study, we designed ten immersive Virtual Reality (VR) scenes with dynamic transitions from narrow to vast environments. These scenes were used to explore how awe relates to environmental features (abstract, human-made, nature), personality traits, and country of origin. We collected skin conductance, respiration data, and self-reported awe and presence from participants from Germany, Japan, and Jordan.
Our results indicate that self-reported awe varies significantly across countries and scene types. In particular, a scene depicting outer space elicited the strongest awe. Scenes that elicited high self-reported awe also induced a stronger sense of presence. However, we found no evidence that awe ratings are correlated with physiological responses.
These findings challenge the assumption that awe is reliably reflected in autonomic arousal and underscore the importance of cultural and perceptual context.
Our study offers new insights into how immersive VR can be designed to elicit awe, and suggests that subjective reports—rather than physiological signals—remain the most consistent indicators of emotional impact.},
keywords = {},
pubstate = {forthcoming},
tppubtype = {inproceedings}
}
Awe is a multifaceted emotion often associated with the perception of vastness, that challenges existing mental frameworks. Despite its growing relevance in affective computing and psychological research, awe remains difficult to elicit and measure.
This raises the research questions of how awe can be effectively elicited, which factors are associated with the experience of awe, and whether it can reliably be measured using biosensors.
For this study, we designed ten immersive Virtual Reality (VR) scenes with dynamic transitions from narrow to vast environments. These scenes were used to explore how awe relates to environmental features (abstract, human-made, nature), personality traits, and country of origin. We collected skin conductance, respiration data, and self-reported awe and presence from participants from Germany, Japan, and Jordan.
Our results indicate that self-reported awe varies significantly across countries and scene types. In particular, a scene depicting outer space elicited the strongest awe. Scenes that elicited high self-reported awe also induced a stronger sense of presence. However, we found no evidence that awe ratings are correlated with physiological responses.
These findings challenge the assumption that awe is reliably reflected in autonomic arousal and underscore the importance of cultural and perceptual context.
Our study offers new insights into how immersive VR can be designed to elicit awe, and suggests that subjective reports—rather than physiological signals—remain the most consistent indicators of emotional impact. Alavi, Khashayar; Jansen, Anna; Steininger, Melissa; Mustafa, Sarah Al-Haj; Müllers, Johannes; Surges, Rainer; Helmstaedter, Christoph; von Wrede, Randi; Krüger, Björn
Graph Neural Networks for Analyzing Eye Fixation Patterns in Epilepsy Conference Forthcoming
International Congress on Mobile Health and Digital Technology in Epilepsy, Forthcoming.
@conference{alavi2025a,
title = {Graph Neural Networks for Analyzing Eye Fixation Patterns in Epilepsy},
author = {Khashayar Alavi and Anna Jansen and Melissa Steininger and Sarah Al-Haj Mustafa and Johannes Müllers and Rainer Surges and Christoph Helmstaedter and Randi von Wrede and Björn Krüger},
year = {2025},
date = {2025-09-04},
urldate = {2025-09-04},
booktitle = {International Congress on Mobile Health and Digital Technology in Epilepsy},
keywords = {},
pubstate = {forthcoming},
tppubtype = {conference}
}
Jansen, Anna; Steininger, Melissa; Mustafa, Sarah Al-Haj; Müllers, Johannes; Surges, Rainer; Helmstaedter, Christoph; von Wrede, Randi; Krüger, Björn
Search Behavior – Metrics for Analysis of Eye Tracking Data Conference Forthcoming
International Congress on Mobile Health and Digital Technology in Epilepsy, Forthcoming.
@conference{jansen2025b,
title = {Search Behavior – Metrics for Analysis of Eye Tracking Data},
author = {Anna Jansen and Melissa Steininger and Sarah Al-Haj Mustafa and Johannes Müllers and Rainer Surges and Christoph Helmstaedter and Randi von Wrede and Björn Krüger},
year = {2025},
date = {2025-09-04},
urldate = {2025-09-04},
booktitle = {International Congress on Mobile Health and Digital Technology in Epilepsy},
journal = {International Congress on Mobile Health and Digital Technology in Epilepsy},
keywords = {},
pubstate = {forthcoming},
tppubtype = {conference}
}
Pukropski, Jan; Weber, Christian; Müllers, Johannes; Grond, Martin; Surges, Rainer; Krüger, Björn
Implementation of a User-Friendly System in Epileptologic Teleconsultation Conference Forthcoming
International Congress on Mobile Health and Digital Technology in Epilepsy, Forthcoming.
@conference{prukopski2025a,
title = {Implementation of a User-Friendly System in Epileptologic Teleconsultation},
author = {Jan Pukropski and Christian Weber and Johannes Müllers and Martin Grond and Rainer Surges and Björn Krüger},
year = {2025},
date = {2025-09-04},
booktitle = {International Congress on Mobile Health and Digital Technology in Epilepsy},
keywords = {},
pubstate = {forthcoming},
tppubtype = {conference}
}
Mustafa, Sarah Al-Haj; Jansen, Anna; Steininger, Melissa; Müllers, Johannes; Surges, Rainer; von Wrede, Randi; Krüger, Björn; Helmstaedter, Christoph
Eyes on Cognition: Exploring Oculomotor Correlates of Cognitive Function in Patients with Epilepsy Journal Article
In: Epilepsy & Behavior, vol. 173, iss. December 2025, no. 110562, 2025.
@article{alhaj2025,
title = {Eyes on Cognition: Exploring Oculomotor Correlates of Cognitive Function in Patients with Epilepsy},
author = {Sarah Al-Haj Mustafa and Anna Jansen and Melissa Steininger and Johannes Müllers and Rainer Surges and Randi von Wrede and Björn Krüger and Christoph Helmstaedter},
doi = {10.1016/j.yebeh.2025.110562},
year = {2025},
date = {2025-06-30},
urldate = {2025-06-30},
journal = {Epilepsy & Behavior},
volume = {173},
number = {110562},
issue = {December 2025},
abstract = {Objective
This study investigates the relationship between eye tracking parameters and cognitive performance during the Trail Making Test (TMT) in individuals with epilepsy and healthy controls. By analyzing ocular behaviors such as saccade velocity, fixation duration, and pupil diameter, we aim to determine how these metrics reflect executive functioning and attentional control.
Methods
A sample of 95 participants with epilepsy and 34 healthy controls completed the TMT while their eye movements were recorded. Partial correlations, controlling for age, sex, education, medication count, seizure status and epilepsy duration, examined associations between eye tracking measures and cognitive performance derived from EpiTrack and TMT performance.
Results
In the patient group, faster TMT-A performance was associated with shorter fix- ation durations (r = 0.31, p = 0.006). Lower minimum saccade velocity correlated with slower performance on both TMT-A (r = −0.35, p = 0.002) and TMT-B (r = −0.40, p<0.001), whereas higher peak saccade velocities were linked to worse performance (TMT-A: r = 0.45, p<0.001; TMT-B: r = 0.41, p<0.001). Pupil diameter findings indicated that slower TMT performance was associated with smaller minimum pupil sizes (r = −0.23 to r = −0.36), wich may indicate increased cognitive effort and attentional load. Higher EpiTrack scores also correlated with a smaller minimum pupil diameter − but only during the more demanding TMT-B − and with a more restricted saccade velocity range, reflecting greater motor control and attentional stability. No significant correlations emerged within the control group.
Conclusion
These findings highlight the potential of eye tracking as a non-invasive tool for assessing cognitive function in epilepsy. Efficient cognitive performance was characterized by stable and controlled eye movements, whereas impaired performance involved erratic saccade dynamics and prolonged fixations. Importantly, eye tracking parameters provide additional information beyond simple speed measurements, potentially enhancing the differential diagnostic capabilities of the TMT in epilepsy. The observed associations between oculomotor parameters and cognitive performance were not present in the control group, suggesting that these relationships may be specific to epilepsy. Future research should investigate whether both basic and advanced metrics of search strategies are sensitive to disease dynamics and treatment effects in epilepsy.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Objective
This study investigates the relationship between eye tracking parameters and cognitive performance during the Trail Making Test (TMT) in individuals with epilepsy and healthy controls. By analyzing ocular behaviors such as saccade velocity, fixation duration, and pupil diameter, we aim to determine how these metrics reflect executive functioning and attentional control.
Methods
A sample of 95 participants with epilepsy and 34 healthy controls completed the TMT while their eye movements were recorded. Partial correlations, controlling for age, sex, education, medication count, seizure status and epilepsy duration, examined associations between eye tracking measures and cognitive performance derived from EpiTrack and TMT performance.
Results
In the patient group, faster TMT-A performance was associated with shorter fix- ation durations (r = 0.31, p = 0.006). Lower minimum saccade velocity correlated with slower performance on both TMT-A (r = −0.35, p = 0.002) and TMT-B (r = −0.40, p<0.001), whereas higher peak saccade velocities were linked to worse performance (TMT-A: r = 0.45, p<0.001; TMT-B: r = 0.41, p<0.001). Pupil diameter findings indicated that slower TMT performance was associated with smaller minimum pupil sizes (r = −0.23 to r = −0.36), wich may indicate increased cognitive effort and attentional load. Higher EpiTrack scores also correlated with a smaller minimum pupil diameter − but only during the more demanding TMT-B − and with a more restricted saccade velocity range, reflecting greater motor control and attentional stability. No significant correlations emerged within the control group.
Conclusion
These findings highlight the potential of eye tracking as a non-invasive tool for assessing cognitive function in epilepsy. Efficient cognitive performance was characterized by stable and controlled eye movements, whereas impaired performance involved erratic saccade dynamics and prolonged fixations. Importantly, eye tracking parameters provide additional information beyond simple speed measurements, potentially enhancing the differential diagnostic capabilities of the TMT in epilepsy. The observed associations between oculomotor parameters and cognitive performance were not present in the control group, suggesting that these relationships may be specific to epilepsy. Future research should investigate whether both basic and advanced metrics of search strategies are sensitive to disease dynamics and treatment effects in epilepsy. Greß, Hannah; Demidova, Elena; Meier, Michael; Krüger, Björn
SecureNeuroAI: Advanced Security Framework for AI-Powered Multimodal Real-Time Detection of Medical Seizure Events Proceedings Article
In: Ohm, Marc (Ed.): Proceedings of the 15th graduate workshop of the special interest group Security - Intrusion Detection and Response (SIDAR) of the German Informatics Society (GI) (SPRING 2025), pp. 22-24, GI SIG SIDAR, Nuremberg, April, 2025, ISSN: 2190-846X.
@inproceedings{Greß2025,
title = {SecureNeuroAI: Advanced Security Framework for AI-Powered Multimodal Real-Time Detection of Medical Seizure Events},
author = {Hannah Greß and Elena Demidova and Michael Meier and Björn Krüger},
editor = {Marc Ohm},
url = {https://fg-sidar.gi.de/publikationen/sidar-reports},
issn = {2190-846X},
year = {2025},
date = {2025-05-12},
urldate = {2025-05-12},
booktitle = { Proceedings of the 15th graduate workshop of the special interest group Security - Intrusion Detection and Response (SIDAR) of the German Informatics Society (GI) (SPRING 2025)},
pages = {22-24},
publisher = {GI SIG SIDAR},
address = {Nuremberg, April},
abstract = {In today's interconnected world, medical devices are increasingly equipped with novel digital technologies and AI-powered methods to improve the users' quality of life.
Despite the increased possibilities and features these devices offer due to the technical progress, cyberattacks on medical devices will increase as well with possibly severe outcomes for the patients.
At the same time, AI-based technologies could help to detect and mitigate these attacks on medical systems and their data in real-time.
Therefore, our project "SecureNeuroAI" aims to detect epileptic seizures using multimodal sensor data and AI models while also considering possible cyberattacks on this system resulting in an IT-secure system.
Our results will serve as an example for future AI-supported medical devices and systems to enhance their security and to strengthen their trustworthiness towards their (future) users.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
In today's interconnected world, medical devices are increasingly equipped with novel digital technologies and AI-powered methods to improve the users' quality of life.
Despite the increased possibilities and features these devices offer due to the technical progress, cyberattacks on medical devices will increase as well with possibly severe outcomes for the patients.
At the same time, AI-based technologies could help to detect and mitigate these attacks on medical systems and their data in real-time.
Therefore, our project "SecureNeuroAI" aims to detect epileptic seizures using multimodal sensor data and AI models while also considering possible cyberattacks on this system resulting in an IT-secure system.
Our results will serve as an example for future AI-supported medical devices and systems to enhance their security and to strengthen their trustworthiness towards their (future) users. Khan, Umar; Riaz, Qaiser; Hussain, Mehdi; Zeeshan, Muhammad; Krüger, Björn
Towards Effective Parkinson’s Monitoring: Movement Disorder Detection and Symptom Identification Using Wearable Inertial Sensors Journal Article
In: Algorithms, vol. 18, no. 4, 2025, ISSN: 1999-4893.
@article{2025-khan,
title = {Towards Effective Parkinson’s Monitoring: Movement Disorder Detection and Symptom Identification Using Wearable Inertial Sensors},
author = {Umar Khan and Qaiser Riaz and Mehdi Hussain and Muhammad Zeeshan and Björn Krüger},
url = {https://www.mdpi.com/1999-4893/18/4/203},
doi = {10.3390/a18040203},
issn = {1999-4893},
year = {2025},
date = {2025-04-04},
urldate = {2025-01-01},
journal = {Algorithms},
volume = {18},
number = {4},
abstract = {Parkinson’s disease lacks a cure, yet symptomatic relief can be achieved through various treatments. This study dives into the critical aspect of anomalous event detection in the activities of daily living of patients with Parkinson’s disease and the identification of associated movement disorders, such as tremors, dyskinesia, and bradykinesia. Utilizing the inertial data acquired from the most affected upper limb of the patients, this study aims to create an optimal pipeline for Parkinson’s patient monitoring. This study proposes a two-stage movement disorder detection and classification pipeline for binary classification (normal or anomalous event) and multi-label classification (tremors, dyskinesia, and bradykinesia), respectively. The proposed pipeline employs and evaluates manual feature crafting for classical machine learning algorithms, as well as an RNN-CNN-inspired deep learning model that does not require manual feature crafting. This study also explore three different window sizes for signal segmentation and two different auto-segment labeling approaches for precise and correct labeling of the continuous signal. The performance of the proposed model is validated on a publicly available inertial dataset. Comparisons with existing works reveal the novelty of our approach, covering multiple anomalies (tremors, dyskinesia, and bradykinesia) and achieving 93.03% recall for movement disorder detection (binary) and 91.54% recall for movement disorder classification (multi-label). We believe that the proposed approach will advance the field towards more effective and comprehensive solutions for Parkinson’s detection and symptom classification.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Parkinson’s disease lacks a cure, yet symptomatic relief can be achieved through various treatments. This study dives into the critical aspect of anomalous event detection in the activities of daily living of patients with Parkinson’s disease and the identification of associated movement disorders, such as tremors, dyskinesia, and bradykinesia. Utilizing the inertial data acquired from the most affected upper limb of the patients, this study aims to create an optimal pipeline for Parkinson’s patient monitoring. This study proposes a two-stage movement disorder detection and classification pipeline for binary classification (normal or anomalous event) and multi-label classification (tremors, dyskinesia, and bradykinesia), respectively. The proposed pipeline employs and evaluates manual feature crafting for classical machine learning algorithms, as well as an RNN-CNN-inspired deep learning model that does not require manual feature crafting. This study also explore three different window sizes for signal segmentation and two different auto-segment labeling approaches for precise and correct labeling of the continuous signal. The performance of the proposed model is validated on a publicly available inertial dataset. Comparisons with existing works reveal the novelty of our approach, covering multiple anomalies (tremors, dyskinesia, and bradykinesia) and achieving 93.03% recall for movement disorder detection (binary) and 91.54% recall for movement disorder classification (multi-label). We believe that the proposed approach will advance the field towards more effective and comprehensive solutions for Parkinson’s detection and symptom classification. Greß, Hannah; Alouardani, Saied; Hoffmann, Nico; Trebing, Pia; Becker, Albert J.; Surges, Rainer; Pitsch, Julika; Krüger, Björn
Digitale Transformation des Blutprobenmanagements Conference
Dreiländertagung Epilepsie 2025, 2025.
@conference{gress2025a,
title = {Digitale Transformation des Blutprobenmanagements},
author = {Hannah Greß and Saied Alouardani and Nico Hoffmann and Pia Trebing and Albert J. Becker and Rainer Surges and Julika Pitsch and Björn Krüger},
year = {2025},
date = {2025-03-26},
urldate = {2025-03-26},
booktitle = {Dreiländertagung Epilepsie 2025},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Haaga, Lisa; Jansen, Anna; Steininger, Melissa; Müllers, Johannes; Bausch, Marcel; Jordan, Arthur; Surges, Rainer; Krüger, Björn
EpiEye – Einfluss anfallssupressiver Medikamente auf Augenbewegungen und autonome Veränderungen bei Epilepsien Conference
Dreiländertagung Epilepsie 2025, 2025.
@conference{haaga2025a,
title = {EpiEye – Einfluss anfallssupressiver Medikamente auf Augenbewegungen und autonome Veränderungen bei Epilepsien},
author = {Lisa Haaga and Anna Jansen and Melissa Steininger and Johannes Müllers and Marcel Bausch and Arthur Jordan and Rainer Surges and Björn Krüger},
year = {2025},
date = {2025-03-26},
booktitle = {Dreiländertagung Epilepsie 2025},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Müllers, Johannes; Greß, Hannah; Weber, Christian; Nadeem, Mubaris; Hütwohl, Daniela; Pukropski, Jan; Grond, Martin; Surges, Rainer; Krüger, Björn
Aufbau eines epileptologischen Telekonsils zwischen dem Klinikum Siegen und dem Universitätsklinikum Bonn Conference
Dreiländertagung Epilepsie 2025, 2025.
@conference{muellers2025a,
title = {Aufbau eines epileptologischen Telekonsils zwischen dem Klinikum Siegen und dem Universitätsklinikum Bonn},
author = {Johannes Müllers and Hannah Greß and Christian Weber and Mubaris Nadeem and Daniela Hütwohl and Jan Pukropski and Martin Grond and Rainer Surges and Björn Krüger},
year = {2025},
date = {2025-03-26},
urldate = {2025-03-26},
booktitle = {Dreiländertagung Epilepsie 2025},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Mustafa, Sarah Al-Haj; Jansen, Anna; Surges, Rainer; Helmstaedter, Christoph; von Wrede, Randi; Krüger, Björn
ANNE – Augen-Tracking zur Erkennung von Nebenwirkungen bei Epilepsie Conference
Dreiländertagung Epilepsie 2025, 2025.
@conference{nokey,
title = {ANNE – Augen-Tracking zur Erkennung von Nebenwirkungen bei Epilepsie},
author = {Sarah Al-Haj Mustafa and Anna Jansen and Rainer Surges and Christoph Helmstaedter and Randi von Wrede and Björn Krüger},
year = {2025},
date = {2025-03-26},
urldate = {2025-03-26},
booktitle = {Dreiländertagung Epilepsie 2025},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Neuß, Maike Susanne; Pitsch, Julika; Krüger, Björn; Becker, Albert J.; Surges, Rainer; Baumgartner, Tobias
Semiologische Charakteristika der Temporallappenepilepsie mit Antikörpern gegen GAD65 Conference
Dreiländertagung Epilepsie 2025, 2025.
@conference{neuss2025,
title = {Semiologische Charakteristika der Temporallappenepilepsie mit Antikörpern gegen GAD65},
author = {Maike Susanne Neuß and Julika Pitsch and Björn Krüger and Albert J. Becker and Rainer Surges and Tobias Baumgartner},
year = {2025},
date = {2025-03-26},
booktitle = {Dreiländertagung Epilepsie 2025},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Greß, Hannah; Krüger, Björn; Tischhauser, Elmar
The Newer, the More Secure? Standards-Compliant Bluetooth Low Energy Man-in-the-Middle Attacks on Fitness Trackers Journal Article
In: Sensors, vol. 25, no. 6, 2025, ISSN: 1424-8220.
@article{2025gressBT,
title = {The Newer, the More Secure? Standards-Compliant Bluetooth Low Energy Man-in-the-Middle Attacks on Fitness Trackers},
author = {Hannah Greß and Björn Krüger and Elmar Tischhauser},
url = {https://www.mdpi.com/1424-8220/25/6/1815},
doi = {10.3390/s25061815},
issn = {1424-8220},
year = {2025},
date = {2025-03-14},
urldate = {2025-01-01},
journal = {Sensors},
volume = {25},
number = {6},
abstract = {The trend in self-tracking devices has remained unabated for years. Even if they record a large quantity of sensitive data, most users are not concerned about their data being transmitted and stored in a secure way from the device via the companion app to the vendor’s server. However, the secure implementation of this chain from the manufacturer is not always given, as various publications have already shown. Therefore, we first provide an overview of attack vectors within the ecosystem of self-tracking devices. Second, we evaluate the data security of eight contemporary fitness trackers from leading vendors by applying four still partly standards-compliant Bluetooth Low-Energy Man-in-the-Middle (MitM) attacks. Our results show that the examined devices are partially vulnerable against the attacks. For most of the trackers, the manufacturers put different security measures in place. These include short and user-initiated visibility and connectivity or app-level authentication to limit the attack surface. Interestingly, newer models are more likely to be attackable, underlining the constant need for verifying the security of BLE devices, reporting found vulnerabilities, and also strengthening standards and improving security awareness among manufacturers and users. Therefore, we finish our work with recommendations and best practices for law- and regulation-makers, vendors, and users on how to strengthen the security of BLE devices.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
The trend in self-tracking devices has remained unabated for years. Even if they record a large quantity of sensitive data, most users are not concerned about their data being transmitted and stored in a secure way from the device via the companion app to the vendor’s server. However, the secure implementation of this chain from the manufacturer is not always given, as various publications have already shown. Therefore, we first provide an overview of attack vectors within the ecosystem of self-tracking devices. Second, we evaluate the data security of eight contemporary fitness trackers from leading vendors by applying four still partly standards-compliant Bluetooth Low-Energy Man-in-the-Middle (MitM) attacks. Our results show that the examined devices are partially vulnerable against the attacks. For most of the trackers, the manufacturers put different security measures in place. These include short and user-initiated visibility and connectivity or app-level authentication to limit the attack surface. Interestingly, newer models are more likely to be attackable, underlining the constant need for verifying the security of BLE devices, reporting found vulnerabilities, and also strengthening standards and improving security awareness among manufacturers and users. Therefore, we finish our work with recommendations and best practices for law- and regulation-makers, vendors, and users on how to strengthen the security of BLE devices. Steininger, Melissa; Jansen, Anna; Welle, Kristian; Krüger, Björn
Optimized Sensor Position Detection: Improving Visual Sensor Setups for Hand Tracking in VR Conference
2025 IEEE Conference Virtual Reality and 3D User Interfaces (VR), 2025.
@conference{steininger2025b,
title = {Optimized Sensor Position Detection: Improving Visual Sensor Setups for Hand Tracking in VR},
author = {Melissa Steininger and Anna Jansen and Kristian Welle and Björn Krüger},
year = {2025},
date = {2025-03-12},
urldate = {2025-03-12},
booktitle = {2025 IEEE Conference Virtual Reality and 3D User Interfaces (VR)},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Müllers, Johannes; Staehle, Ricarda; Stroth, Sanna; Poustka, Luise; Krüger, Björn; Schulte-Rüther, Martin
Multi-Stream Analysis for Robust Head Gesture Classification in Natural Social Interaction: Leveraging High-Resolution Sensor Data for Optimized Visual Classification Conference
16. Wissenschaftlichen Tagung Autismus-Spektrum (WTAS), 2025.
@conference{muellers2025b,
title = {Multi-Stream Analysis for Robust Head Gesture Classification in Natural Social Interaction: Leveraging High-Resolution Sensor Data for Optimized Visual Classification},
author = {Johannes Müllers and Ricarda Staehle and Sanna Stroth and Luise Poustka and Björn Krüger and Martin Schulte-Rüther},
year = {2025},
date = {2025-03-07},
booktitle = {16. Wissenschaftlichen Tagung Autismus-Spektrum (WTAS)},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Steininger, Melissa; Jansen, Anna; Mustafa, Sarah Al-Haj; Surges, Rainer; Helmstaedter, Christoph; von Wrede, Randi; Krüger, Björn
Eye-Tracking Reveals Search Behaviour in Epilepsy Patients Conference
3rd International Conference on Artificial Intelligence in Epilepsy and Neurological Disorders, 2025.
@conference{steininger2025a,
title = {Eye-Tracking Reveals Search Behaviour in Epilepsy Patients},
author = {Melissa Steininger and Anna Jansen and Sarah Al-Haj Mustafa and Rainer Surges and Christoph Helmstaedter and Randi von Wrede and Björn Krüger
},
year = {2025},
date = {2025-03-03},
urldate = {2025-03-03},
booktitle = {3rd International Conference on Artificial Intelligence in Epilepsy and Neurological Disorders},
journal = {3rd International Conference on Artificial Intelligence in Epilepsy and Neurological Disorders},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Jansen, Anna; Steininger, Melissa; Mustafa, Sarah Al-Haj; Müllers, Johannes; Surges, Rainer; Helmstaedter, Christoph; von Wrede, Randi; Krüger, Björn
Prediction Models on Eye Tracking Data in Epilepsy Conference
3rd International Conference on Artificial Intelligence in Epilepsy and Neurological Disorders, 2025.
@conference{jansen2025a,
title = {Prediction Models on Eye Tracking Data in Epilepsy},
author = {Anna Jansen and Melissa Steininger and Sarah Al-Haj Mustafa and Johannes Müllers and Rainer Surges and Christoph Helmstaedter and Randi von Wrede and Björn Krüger},
year = {2025},
date = {2025-03-03},
urldate = {2025-03-03},
booktitle = {3rd International Conference on Artificial Intelligence in Epilepsy and Neurological Disorders},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
2024
Otsubo, Hiromu; Lehnort, Marvin; Steininger, Melissa; Marquardt, Alexander; Dollack, Felix; Hirao, Yutaro; Perusquía-Hernández, Monica; Uchiyama, Hideaki; Kruijff, Ernst; Riecke, Bernhard; Kiyokawa, Kiyoshi
First-Person Perspective Induces Stronger Feelings of Awe and Presence Compared to Third-Person Perspective in Virtual Reality Proceedings Article
In: ICMI '24: Proceedings of the 26th International Conference on Multimodal Interaction, pp. 439 - 448, 2024.
@inproceedings{Otsubo2024,
title = {First-Person Perspective Induces Stronger Feelings of Awe and Presence Compared to Third-Person Perspective in Virtual Reality},
author = {Hiromu Otsubo and Marvin Lehnort and Melissa Steininger and Alexander Marquardt and Felix Dollack and Yutaro Hirao and Monica Perusquía-Hernández and Hideaki Uchiyama and Ernst Kruijff and Bernhard Riecke and Kiyoshi Kiyokawa},
doi = {https://doi.org/10.1145/3678957.3685753},
year = {2024},
date = {2024-11-04},
urldate = {2024-11-04},
booktitle = {ICMI '24: Proceedings of the 26th International Conference on Multimodal Interaction},
pages = {439 - 448},
abstract = {Awe is a complex emotion described as a perception of vastness and a need for accommodation to integrate new, overwhelming experiences. Virtual Reality (VR) has recently gained attention as a convenient means to facilitate experiences of awe. In VR, a first-person perspective might increase awe due to its immersive nature, while a third-person perspective might enhance the perception of vastness. However, the impact of VR perspectives on experiencing awe has not been thoroughly examined. We created two types of VR scenes: one with elements designed to induce high awe, such as a snowy mountain, and a low awe scene without such elements. We compared first-person and third-person perspectives in each scene. Forty-two participants explored the VR scenes, with their physiological responses captured by electrocardiogram (ECG) and face tracking (FT). Subsequently, participants self-reported their experience of awe (AWE-S) and presence (IPQ) within VR. The results revealed that the first-person perspective induced stronger feelings of awe and presence than the third-person perspective. The findings of this study provide useful guidelines for designing VR content that enhances emotional experiences.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Awe is a complex emotion described as a perception of vastness and a need for accommodation to integrate new, overwhelming experiences. Virtual Reality (VR) has recently gained attention as a convenient means to facilitate experiences of awe. In VR, a first-person perspective might increase awe due to its immersive nature, while a third-person perspective might enhance the perception of vastness. However, the impact of VR perspectives on experiencing awe has not been thoroughly examined. We created two types of VR scenes: one with elements designed to induce high awe, such as a snowy mountain, and a low awe scene without such elements. We compared first-person and third-person perspectives in each scene. Forty-two participants explored the VR scenes, with their physiological responses captured by electrocardiogram (ECG) and face tracking (FT). Subsequently, participants self-reported their experience of awe (AWE-S) and presence (IPQ) within VR. The results revealed that the first-person perspective induced stronger feelings of awe and presence than the third-person perspective. The findings of this study provide useful guidelines for designing VR content that enhances emotional experiences. Marquardt, Alexander; Lehnort, Marvin; Otsubo, Hiromu; Perusquia-Hernandez, Monica; Steininger, Melissa; Dollack, Felix; Uchiyama, Hideaki; Kiyokawa, Kiyoshi; Kruijff, Ernst
Exploring Gesture Interaction in Underwater Virtual Reality Proceedings Article
In: Proceedings of the 2024 ACM Symposium on Spatial User Interaction, pp. 1-2, 2024.
@inproceedings{marquardtLehnort2024,
title = {Exploring Gesture Interaction in Underwater Virtual Reality},
author = {Alexander Marquardt and Marvin Lehnort and Hiromu Otsubo and Monica Perusquia-Hernandez and Melissa Steininger and Felix Dollack and Hideaki Uchiyama and Kiyoshi Kiyokawa and Ernst Kruijff},
doi = {https://doi.org/10.1145/3677386.3688890},
year = {2024},
date = {2024-10-07},
urldate = {2024-10-07},
booktitle = {Proceedings of the 2024 ACM Symposium on Spatial User Interaction},
pages = {1-2},
abstract = {An underwater virtual reality (UVR) system with gesture-based controls was developed to facilitate navigation and interaction while submerged. The system uses a waterproof head-mounted display and camera-based gesture recognition, originally trained for abovewater conditions, employing three gestures: grab for navigation, pinch for single interactions, and point for continuous interactions. In an experimental study, we tested gesture recognition both above and underwater, and evaluated participant interaction within an immersive underwater scene. Results showed that underwater conditions slightly affected gesture accuracy, but the system maintained high performance. Participants reported a strong sense of presence and found the gestures intuitive while highlighting the need for further refinement to address usability challenges.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
An underwater virtual reality (UVR) system with gesture-based controls was developed to facilitate navigation and interaction while submerged. The system uses a waterproof head-mounted display and camera-based gesture recognition, originally trained for abovewater conditions, employing three gestures: grab for navigation, pinch for single interactions, and point for continuous interactions. In an experimental study, we tested gesture recognition both above and underwater, and evaluated participant interaction within an immersive underwater scene. Results showed that underwater conditions slightly affected gesture accuracy, but the system maintained high performance. Participants reported a strong sense of presence and found the gestures intuitive while highlighting the need for further refinement to address usability challenges. Müllers, Johannes; Krüger, Björn; Schulte-Rüther, Martin
Gesten und Körperbewegungen entschlüsseln - Automatisierte Analyse mit Hilfe von Sensordaten und Methoden des maschinellen Lernens Conference
XXXVIII. DGKJP Kongress 2024, 2024.
@conference{nokey,
title = {Gesten und Körperbewegungen entschlüsseln - Automatisierte Analyse mit Hilfe von Sensordaten und Methoden des maschinellen Lernens},
author = {Johannes Müllers and Björn Krüger and Martin Schulte-Rüther},
year = {2024},
date = {2024-09-19},
booktitle = {XXXVIII. DGKJP Kongress 2024},
abstract = {Hintergrund
Subtile Kopfbewegungen wie Nicken, Kopfschütteln und Kopfneigen sind bedeutende nonverbale Kommunikationssignale während einer therapeutischen Sitzung. Diese Bewegungen können leicht übersehen werden, selbst bei detaillierter Videoanalyse. Die Nutzung von Sensordaten, insbesondere die Erfassung von Beschleunigung und Drehrate mittels Accelerometer und Gyroskop, ermöglicht eine präzise Echtzeitanalyse der Kopfbewegungen.
Methode
Zur Analyse der Sensordaten werden verschiedene Ansätze in Betracht gezogen. Klassische analytische Methoden erlauben eine direkte Auswertung der Beschleunigungs- und Rotationsdaten durch festgelegte Schwellenwerte und Mustererkennung. Graphbasierte Ansätze bieten eine flexible Struktur zur Modellierung der Bewegungssequenzen und deren Beziehungen. Zudem können Methoden des maschinellen Lernens, insbesondere überwachte und unüberwachte Lernverfahren, genutzt werden, um komplexe Bewegungsmuster zu identifizieren und zu klassifizieren. In diesem Vortrag werden die Vor- und Nachteile dieser Ansätze diskutiert und verglichen. Ein besonderer Fokus liegt auf der Echtzeitfähigkeit der Methoden, um eine laufende Annotation der Videos zu gewährleisten.
Ergebnisse
Vorläufige Resultate zeigen die Machbarkeit der Sensordatenanalyse. Erste Untersuchungen belegen, dass die erfassten Daten eine detaillierte Erkennung und Differenzierung von Kopfbewegungen ermöglichen.
Diskussion und Schlussfolgerung
Die bisherigen Ergebnisse zeigen, dass Sensordaten und fortschrittliche Analysemethoden das Potenzial haben, die Erkennung und Annotation von Kopfbewegungen erheblich zu verbessern. Analytische Methoden bieten einfache Implementierungsmöglichkeiten, könnten jedoch gegenüber maschinellen Lernmethoden an Anpassungsfähigkeit verlieren. Maschinelles Lernen bietet höhere Genauigkeit, erfordert jedoch umfangreiche Daten und Trainingszeit. Zukünftige Arbeiten werden sich auf die Verfeinerung und Validierung der Methoden konzentrieren, um eine optimale Balance zwischen Genauigkeit, Echtzeitfähigkeit und praktischer Anwendbarkeit zu finden. Insgesamt zeigt sich, dass die Integration von Sensordatenanalyse in therapeutische Sitzungen die Kommunikation und das Verständnis zwischen Therapeut und Patient verbessern kann.},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Hintergrund
Subtile Kopfbewegungen wie Nicken, Kopfschütteln und Kopfneigen sind bedeutende nonverbale Kommunikationssignale während einer therapeutischen Sitzung. Diese Bewegungen können leicht übersehen werden, selbst bei detaillierter Videoanalyse. Die Nutzung von Sensordaten, insbesondere die Erfassung von Beschleunigung und Drehrate mittels Accelerometer und Gyroskop, ermöglicht eine präzise Echtzeitanalyse der Kopfbewegungen.
Methode
Zur Analyse der Sensordaten werden verschiedene Ansätze in Betracht gezogen. Klassische analytische Methoden erlauben eine direkte Auswertung der Beschleunigungs- und Rotationsdaten durch festgelegte Schwellenwerte und Mustererkennung. Graphbasierte Ansätze bieten eine flexible Struktur zur Modellierung der Bewegungssequenzen und deren Beziehungen. Zudem können Methoden des maschinellen Lernens, insbesondere überwachte und unüberwachte Lernverfahren, genutzt werden, um komplexe Bewegungsmuster zu identifizieren und zu klassifizieren. In diesem Vortrag werden die Vor- und Nachteile dieser Ansätze diskutiert und verglichen. Ein besonderer Fokus liegt auf der Echtzeitfähigkeit der Methoden, um eine laufende Annotation der Videos zu gewährleisten.
Ergebnisse
Vorläufige Resultate zeigen die Machbarkeit der Sensordatenanalyse. Erste Untersuchungen belegen, dass die erfassten Daten eine detaillierte Erkennung und Differenzierung von Kopfbewegungen ermöglichen.
Diskussion und Schlussfolgerung
Die bisherigen Ergebnisse zeigen, dass Sensordaten und fortschrittliche Analysemethoden das Potenzial haben, die Erkennung und Annotation von Kopfbewegungen erheblich zu verbessern. Analytische Methoden bieten einfache Implementierungsmöglichkeiten, könnten jedoch gegenüber maschinellen Lernmethoden an Anpassungsfähigkeit verlieren. Maschinelles Lernen bietet höhere Genauigkeit, erfordert jedoch umfangreiche Daten und Trainingszeit. Zukünftige Arbeiten werden sich auf die Verfeinerung und Validierung der Methoden konzentrieren, um eine optimale Balance zwischen Genauigkeit, Echtzeitfähigkeit und praktischer Anwendbarkeit zu finden. Insgesamt zeigt sich, dass die Integration von Sensordatenanalyse in therapeutische Sitzungen die Kommunikation und das Verständnis zwischen Therapeut und Patient verbessern kann. Steininger, Melissa; Perusquıa-Hernández, Monica; Marquardt, Alexander; Otsubo, Hiromu; Lehnort, Marvin; Dollack, Felix; Kiyokawa, Kiyoshi; Kruijff, Ernst; Riecke, Bernhard
Using Skin Conductance to Predict Awe and Perceived Vastness in Virtual Reality Conference
12th International Conference on Affective Computing and Intelligent Interaction, 2024.
@conference{steininger2024,
title = {Using Skin Conductance to Predict Awe and Perceived Vastness in Virtual Reality},
author = {Melissa Steininger and Monica Perusquıa-Hernández and Alexander Marquardt and Hiromu Otsubo and Marvin Lehnort and Felix Dollack and Kiyoshi Kiyokawa and Ernst Kruijff and Bernhard Riecke},
year = {2024},
date = {2024-09-17},
urldate = {2024-09-17},
booktitle = {12th International Conference on Affective Computing and Intelligent Interaction},
abstract = {Awe is an emotion characterized by the perception of vastness and the need to accommodate this vastness into one’s mental framework. We propose an elicitation scene to induce awe in Virtual Reality (VR), validate it through selfreport, and explore the feasibility of using skin conductance to predict self-reported awe and vastness as labeled from the stimuli in VR. Sixty-two participants took part in the study comparing the awe-eliciting space scene and a neutral scene. The space scene was confirmed as more awe-eliciting. A k-nearest neighbor algorithm confirmed high and low-awe score clusters used to label the data. A Random Forest algorithm achieved 65% accuracy (F1= 0.56, AU C= 0.73) when predicting the self-reported low and high awe categories from continuous skin conductance data. A similar approach achieved 55% accuracy (F1= 0.59, AU C= 0.56) when predicting the perception of vastness. These results underscore the potential of skinconductance-based algorithms to predict awe.},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Awe is an emotion characterized by the perception of vastness and the need to accommodate this vastness into one’s mental framework. We propose an elicitation scene to induce awe in Virtual Reality (VR), validate it through selfreport, and explore the feasibility of using skin conductance to predict self-reported awe and vastness as labeled from the stimuli in VR. Sixty-two participants took part in the study comparing the awe-eliciting space scene and a neutral scene. The space scene was confirmed as more awe-eliciting. A k-nearest neighbor algorithm confirmed high and low-awe score clusters used to label the data. A Random Forest algorithm achieved 65% accuracy (F1= 0.56, AU C= 0.73) when predicting the self-reported low and high awe categories from continuous skin conductance data. A similar approach achieved 55% accuracy (F1= 0.59, AU C= 0.56) when predicting the perception of vastness. These results underscore the potential of skinconductance-based algorithms to predict awe. Kiran, Samia; Riaz, Qaiser; Hussain, Mehdi; Zeeshan, Muhammad; Krüger, Björn
Unveiling Fall Origins: Leveraging Wearable Sensors to Detect Pre-Impact Fall Causes Journal Article
In: IEEE Sensors Journal, vol. 24, no. 15, pp. 24086-24095, 2024, ISSN: 1558-1748.
@article{10552639,
title = {Unveiling Fall Origins: Leveraging Wearable Sensors to Detect Pre-Impact Fall Causes},
author = {Samia Kiran and Qaiser Riaz and Mehdi Hussain and Muhammad Zeeshan and Björn Krüger},
doi = {10.1109/JSEN.2024.3407835},
issn = {1558-1748},
year = {2024},
date = {2024-08-01},
urldate = {2024-01-01},
journal = {IEEE Sensors Journal},
volume = {24},
number = {15},
pages = {24086-24095},
abstract = {Falling poses a significant challenge to the health and well-being of the elderly and people with various disabilities. Precise and prompt fall detection plays a crucial role in preventing falls and mitigating the impact of injuries. In this research, we propose a deep classifier for pre-impact fall detection which can detect a fall in the pre-impact phase with an inference time of 46–52 milliseconds. The proposed classifier is an ensemble of Convolutional Neural Networks (CNNs) and Bidirectional Gated Recurrent Units (BiGRU) with residual connections. We validated the performance of the proposed classifier on a comprehensive, publicly available preimpact fall dataset. The dataset covers 36 diverse activities, including 15 types of fall-related activities and 21 types of activities of daily living (ADLs). Furthermore, we evaluated the proposed model using three different inputs of varying dimensions: 6D input (comprising 3D accelerations and 3D angular velocities), 3D input (3D accelerations), and 1D input (magnitude of 3D accelerations). The reduction in the input space from 6D to 1D is aimed at minimizing the computation cost. We have attained commendable results outperforming the state-of-the-art approaches by achieving an average accuracy and F1 score of 98% for 6D input size. The potential implications of this research are particularly relevant in the realm of smart healthcare, with a focus on the elderly and differently-abled population.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Falling poses a significant challenge to the health and well-being of the elderly and people with various disabilities. Precise and prompt fall detection plays a crucial role in preventing falls and mitigating the impact of injuries. In this research, we propose a deep classifier for pre-impact fall detection which can detect a fall in the pre-impact phase with an inference time of 46–52 milliseconds. The proposed classifier is an ensemble of Convolutional Neural Networks (CNNs) and Bidirectional Gated Recurrent Units (BiGRU) with residual connections. We validated the performance of the proposed classifier on a comprehensive, publicly available preimpact fall dataset. The dataset covers 36 diverse activities, including 15 types of fall-related activities and 21 types of activities of daily living (ADLs). Furthermore, we evaluated the proposed model using three different inputs of varying dimensions: 6D input (comprising 3D accelerations and 3D angular velocities), 3D input (3D accelerations), and 1D input (magnitude of 3D accelerations). The reduction in the input space from 6D to 1D is aimed at minimizing the computation cost. We have attained commendable results outperforming the state-of-the-art approaches by achieving an average accuracy and F1 score of 98% for 6D input size. The potential implications of this research are particularly relevant in the realm of smart healthcare, with a focus on the elderly and differently-abled population. Krüger, Björn; Weber, Christian; Müllers, Johannes; Greß, Hannah; Beyer, Franziska; Knaub, Jessica; Pukropski, Jan; Hütwohl, Daniela; Hahn, Kai; Grond, Martin; Jonas, Stephan; Surges, Rainer
Teleconsultation to Improve Epilepsy Diagnosis and Therapy Book Chapter
In: Herrmann, Wolfram J.; Leser, Ulf; Möller, Sebastian; Voigt-Antons, Jan-Niklas; Gellert, Paul (Ed.): pp. 18-23, Future-Proofing Healthcare for Older Adults Through Digitalization, 2024.
@inbook{krueger2024a,
title = {Teleconsultation to Improve Epilepsy Diagnosis and Therapy},
author = {Björn Krüger and Christian Weber and Johannes Müllers and Hannah Greß and Franziska Beyer and Jessica Knaub and Jan Pukropski and Daniela Hütwohl and Kai Hahn and Martin Grond and Stephan Jonas and Rainer Surges},
editor = {Wolfram J. Herrmann and Ulf Leser and Sebastian Möller and Jan-Niklas Voigt-Antons and Paul Gellert},
doi = {10.14279/depositonce-20417},
year = {2024},
date = {2024-08-01},
urldate = {2024-08-01},
pages = {18-23},
edition = {Future-Proofing Healthcare for Older Adults Through Digitalization},
abstract = {Teleconsultation in epileptology significantly enhances patient diagnosis and treatment, often eliminating the necessity for physical referral to a specialized clinic. In this paper, we detail the typical teleconsultation process, exploring its technical requirements and legal boundaries. Notably, we focus on the groundwork for establishing a teleconsultation specifically between the University Hospital Bonn and the Klinikum Siegen. Additionally, we provide an overview of currently implemented teleconsultations in epileptology in Germany, concluding with research questions stemming from these advancements. },
keywords = {},
pubstate = {published},
tppubtype = {inbook}
}
Teleconsultation in epileptology significantly enhances patient diagnosis and treatment, often eliminating the necessity for physical referral to a specialized clinic. In this paper, we detail the typical teleconsultation process, exploring its technical requirements and legal boundaries. Notably, we focus on the groundwork for establishing a teleconsultation specifically between the University Hospital Bonn and the Klinikum Siegen. Additionally, we provide an overview of currently implemented teleconsultations in epileptology in Germany, concluding with research questions stemming from these advancements. Riedlinger, Dorothee; Krüger, Björn; Winkler, Hanna; Deutschbein, Johannes; Fischer-Rosinský, Antje; Slagman, Anna; Möckel, Martin
In: Herrmann, Wolfram J.; Leser, Ulf; Möller, Sebastian; Voigt-Antons, Jan-Niklas; Gellert, Paul (Ed.): pp. 108-113, Future-Proofing Healthcare for Older Adults Through Digitalization, 2024.
@inbook{riedlinger2024,
title = {Development of an early warning system to identify patients at risk of falling – Combining the analysis of medication prescription data and movement profiles},
author = {Dorothee Riedlinger and Björn Krüger and Hanna Winkler and Johannes Deutschbein and Antje Fischer-Rosinský and Anna Slagman and Martin Möckel},
editor = {Wolfram J. Herrmann and Ulf Leser and Sebastian Möller and Jan-Niklas Voigt-Antons and Paul Gellert},
doi = {10.14279/depositonce-20431},
year = {2024},
date = {2024-08-01},
urldate = {2024-08-01},
pages = {108-113},
edition = {Future-Proofing Healthcare for Older Adults Through Digitalization},
abstract = {Fall related injuries are a common cause for a reduction of autonomy and quality of life in older patients. The early detection of patients at risk of falling or the prediction of falls may help to prevent falls and thereby improve the health of people of advanced age. Prior analyses of routine medication data pointed to an increase of pain medication prescription prior to an ED },
keywords = {},
pubstate = {published},
tppubtype = {inbook}
}
Fall related injuries are a common cause for a reduction of autonomy and quality of life in older patients. The early detection of patients at risk of falling or the prediction of falls may help to prevent falls and thereby improve the health of people of advanced age. Prior analyses of routine medication data pointed to an increase of pain medication prescription prior to an ED
2025
Jansen, Anna; Morev, Nikita; Steininger, Melissa; Müllers, Johannes; Krüger, Björn
Synthetic Hand Dataset Generation: Multi-View Rendering and Annotation with Blender Conference Forthcoming
Proceedings IEEE International Symposium on Mixed and Augmented Reality (ISMAR), Forthcoming.
@conference{jansen2025c,
title = {Synthetic Hand Dataset Generation: Multi-View Rendering and Annotation with Blender},
author = {Anna Jansen and Nikita Morev and Melissa Steininger and Johannes Müllers and Björn Krüger},
year = {2025},
date = {2025-10-06},
booktitle = {Proceedings IEEE International Symposium on Mixed and Augmented Reality (ISMAR)},
abstract = {Pose estimation is a common method for precise handtracking, which is important for natural interaction in virtual reality (VR). However, training those models requires large-scale datasets with accurate 3D annotations. Those are difficult to obtain due to the time-consuming data collection and the limited variety in captured scenarios. We present a work-in-progress Blender-based pipeline for generating synthetic multi-view hand datasets. Our system simulates Ultraleap Stereo IR 170-style images and extracts joint positions directly from a rigged hand model, eliminating the need for manual labeling or external tracking processes. The current pipeline version supports randomized static poses with per-frame annotations of joint positions, camera parameters, and rendered images. While extended hand variation, animation features, and different sensor-type simulations are still in progress, our pipeline already provides a flexible foundation for customizable dataset generation and reproducible hand-tracking model training.},
keywords = {},
pubstate = {forthcoming},
tppubtype = {conference}
}
Steininger, Melissa; Marquardt, Alexander; Perusquía-Hernández, Monica; Lehnort, Marvin; Otsubo, Hiromu; Dollack, Felix; Kruijff, Ernst; Krüger, Björn; Kiyokawa, Kiyoshi; Riecke, Bernhard E.
The Awe-some Spectrum: Self-Reported Awe Varies by Eliciting Scenery and Presence in Virtual Reality, and the User's Nationality Proceedings Article Forthcoming
In: IEEE International Symposium on Mixed and Augmented Reality (ISMAR), Forthcoming.
@inproceedings{steininger2025c,
title = {The Awe-some Spectrum: Self-Reported Awe Varies by Eliciting Scenery and Presence in Virtual Reality, and the User's Nationality},
author = {Melissa Steininger and Alexander Marquardt and Monica Perusquía-Hernández and Marvin Lehnort and Hiromu Otsubo and Felix Dollack and Ernst Kruijff and Björn Krüger and Kiyoshi Kiyokawa and Bernhard E. Riecke
},
year = {2025},
date = {2025-10-01},
booktitle = {IEEE International Symposium on Mixed and Augmented Reality (ISMAR)},
abstract = {Awe is a multifaceted emotion often associated with the perception of vastness, that challenges existing mental frameworks. Despite its growing relevance in affective computing and psychological research, awe remains difficult to elicit and measure.
This raises the research questions of how awe can be effectively elicited, which factors are associated with the experience of awe, and whether it can reliably be measured using biosensors.
For this study, we designed ten immersive Virtual Reality (VR) scenes with dynamic transitions from narrow to vast environments. These scenes were used to explore how awe relates to environmental features (abstract, human-made, nature), personality traits, and country of origin. We collected skin conductance, respiration data, and self-reported awe and presence from participants from Germany, Japan, and Jordan.
Our results indicate that self-reported awe varies significantly across countries and scene types. In particular, a scene depicting outer space elicited the strongest awe. Scenes that elicited high self-reported awe also induced a stronger sense of presence. However, we found no evidence that awe ratings are correlated with physiological responses.
These findings challenge the assumption that awe is reliably reflected in autonomic arousal and underscore the importance of cultural and perceptual context.
Our study offers new insights into how immersive VR can be designed to elicit awe, and suggests that subjective reports—rather than physiological signals—remain the most consistent indicators of emotional impact.},
keywords = {},
pubstate = {forthcoming},
tppubtype = {inproceedings}
}
This raises the research questions of how awe can be effectively elicited, which factors are associated with the experience of awe, and whether it can reliably be measured using biosensors.
For this study, we designed ten immersive Virtual Reality (VR) scenes with dynamic transitions from narrow to vast environments. These scenes were used to explore how awe relates to environmental features (abstract, human-made, nature), personality traits, and country of origin. We collected skin conductance, respiration data, and self-reported awe and presence from participants from Germany, Japan, and Jordan.
Our results indicate that self-reported awe varies significantly across countries and scene types. In particular, a scene depicting outer space elicited the strongest awe. Scenes that elicited high self-reported awe also induced a stronger sense of presence. However, we found no evidence that awe ratings are correlated with physiological responses.
These findings challenge the assumption that awe is reliably reflected in autonomic arousal and underscore the importance of cultural and perceptual context.
Our study offers new insights into how immersive VR can be designed to elicit awe, and suggests that subjective reports—rather than physiological signals—remain the most consistent indicators of emotional impact.
Alavi, Khashayar; Jansen, Anna; Steininger, Melissa; Mustafa, Sarah Al-Haj; Müllers, Johannes; Surges, Rainer; Helmstaedter, Christoph; von Wrede, Randi; Krüger, Björn
Graph Neural Networks for Analyzing Eye Fixation Patterns in Epilepsy Conference Forthcoming
International Congress on Mobile Health and Digital Technology in Epilepsy, Forthcoming.
@conference{alavi2025a,
title = {Graph Neural Networks for Analyzing Eye Fixation Patterns in Epilepsy},
author = {Khashayar Alavi and Anna Jansen and Melissa Steininger and Sarah Al-Haj Mustafa and Johannes Müllers and Rainer Surges and Christoph Helmstaedter and Randi von Wrede and Björn Krüger},
year = {2025},
date = {2025-09-04},
urldate = {2025-09-04},
booktitle = {International Congress on Mobile Health and Digital Technology in Epilepsy},
keywords = {},
pubstate = {forthcoming},
tppubtype = {conference}
}
Jansen, Anna; Steininger, Melissa; Mustafa, Sarah Al-Haj; Müllers, Johannes; Surges, Rainer; Helmstaedter, Christoph; von Wrede, Randi; Krüger, Björn
Search Behavior – Metrics for Analysis of Eye Tracking Data Conference Forthcoming
International Congress on Mobile Health and Digital Technology in Epilepsy, Forthcoming.
@conference{jansen2025b,
title = {Search Behavior – Metrics for Analysis of Eye Tracking Data},
author = {Anna Jansen and Melissa Steininger and Sarah Al-Haj Mustafa and Johannes Müllers and Rainer Surges and Christoph Helmstaedter and Randi von Wrede and Björn Krüger},
year = {2025},
date = {2025-09-04},
urldate = {2025-09-04},
booktitle = {International Congress on Mobile Health and Digital Technology in Epilepsy},
journal = {International Congress on Mobile Health and Digital Technology in Epilepsy},
keywords = {},
pubstate = {forthcoming},
tppubtype = {conference}
}
Pukropski, Jan; Weber, Christian; Müllers, Johannes; Grond, Martin; Surges, Rainer; Krüger, Björn
Implementation of a User-Friendly System in Epileptologic Teleconsultation Conference Forthcoming
International Congress on Mobile Health and Digital Technology in Epilepsy, Forthcoming.
@conference{prukopski2025a,
title = {Implementation of a User-Friendly System in Epileptologic Teleconsultation},
author = {Jan Pukropski and Christian Weber and Johannes Müllers and Martin Grond and Rainer Surges and Björn Krüger},
year = {2025},
date = {2025-09-04},
booktitle = {International Congress on Mobile Health and Digital Technology in Epilepsy},
keywords = {},
pubstate = {forthcoming},
tppubtype = {conference}
}
Mustafa, Sarah Al-Haj; Jansen, Anna; Steininger, Melissa; Müllers, Johannes; Surges, Rainer; von Wrede, Randi; Krüger, Björn; Helmstaedter, Christoph
Eyes on Cognition: Exploring Oculomotor Correlates of Cognitive Function in Patients with Epilepsy Journal Article
In: Epilepsy & Behavior, vol. 173, iss. December 2025, no. 110562, 2025.
@article{alhaj2025,
title = {Eyes on Cognition: Exploring Oculomotor Correlates of Cognitive Function in Patients with Epilepsy},
author = {Sarah Al-Haj Mustafa and Anna Jansen and Melissa Steininger and Johannes Müllers and Rainer Surges and Randi von Wrede and Björn Krüger and Christoph Helmstaedter},
doi = {10.1016/j.yebeh.2025.110562},
year = {2025},
date = {2025-06-30},
urldate = {2025-06-30},
journal = {Epilepsy & Behavior},
volume = {173},
number = {110562},
issue = {December 2025},
abstract = {Objective
This study investigates the relationship between eye tracking parameters and cognitive performance during the Trail Making Test (TMT) in individuals with epilepsy and healthy controls. By analyzing ocular behaviors such as saccade velocity, fixation duration, and pupil diameter, we aim to determine how these metrics reflect executive functioning and attentional control.
Methods
A sample of 95 participants with epilepsy and 34 healthy controls completed the TMT while their eye movements were recorded. Partial correlations, controlling for age, sex, education, medication count, seizure status and epilepsy duration, examined associations between eye tracking measures and cognitive performance derived from EpiTrack and TMT performance.
Results
In the patient group, faster TMT-A performance was associated with shorter fix- ation durations (r = 0.31, p = 0.006). Lower minimum saccade velocity correlated with slower performance on both TMT-A (r = −0.35, p = 0.002) and TMT-B (r = −0.40, p<0.001), whereas higher peak saccade velocities were linked to worse performance (TMT-A: r = 0.45, p<0.001; TMT-B: r = 0.41, p<0.001). Pupil diameter findings indicated that slower TMT performance was associated with smaller minimum pupil sizes (r = −0.23 to r = −0.36), wich may indicate increased cognitive effort and attentional load. Higher EpiTrack scores also correlated with a smaller minimum pupil diameter − but only during the more demanding TMT-B − and with a more restricted saccade velocity range, reflecting greater motor control and attentional stability. No significant correlations emerged within the control group.
Conclusion
These findings highlight the potential of eye tracking as a non-invasive tool for assessing cognitive function in epilepsy. Efficient cognitive performance was characterized by stable and controlled eye movements, whereas impaired performance involved erratic saccade dynamics and prolonged fixations. Importantly, eye tracking parameters provide additional information beyond simple speed measurements, potentially enhancing the differential diagnostic capabilities of the TMT in epilepsy. The observed associations between oculomotor parameters and cognitive performance were not present in the control group, suggesting that these relationships may be specific to epilepsy. Future research should investigate whether both basic and advanced metrics of search strategies are sensitive to disease dynamics and treatment effects in epilepsy.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
This study investigates the relationship between eye tracking parameters and cognitive performance during the Trail Making Test (TMT) in individuals with epilepsy and healthy controls. By analyzing ocular behaviors such as saccade velocity, fixation duration, and pupil diameter, we aim to determine how these metrics reflect executive functioning and attentional control.
Methods
A sample of 95 participants with epilepsy and 34 healthy controls completed the TMT while their eye movements were recorded. Partial correlations, controlling for age, sex, education, medication count, seizure status and epilepsy duration, examined associations between eye tracking measures and cognitive performance derived from EpiTrack and TMT performance.
Results
In the patient group, faster TMT-A performance was associated with shorter fix- ation durations (r = 0.31, p = 0.006). Lower minimum saccade velocity correlated with slower performance on both TMT-A (r = −0.35, p = 0.002) and TMT-B (r = −0.40, p<0.001), whereas higher peak saccade velocities were linked to worse performance (TMT-A: r = 0.45, p<0.001; TMT-B: r = 0.41, p<0.001). Pupil diameter findings indicated that slower TMT performance was associated with smaller minimum pupil sizes (r = −0.23 to r = −0.36), wich may indicate increased cognitive effort and attentional load. Higher EpiTrack scores also correlated with a smaller minimum pupil diameter − but only during the more demanding TMT-B − and with a more restricted saccade velocity range, reflecting greater motor control and attentional stability. No significant correlations emerged within the control group.
Conclusion
These findings highlight the potential of eye tracking as a non-invasive tool for assessing cognitive function in epilepsy. Efficient cognitive performance was characterized by stable and controlled eye movements, whereas impaired performance involved erratic saccade dynamics and prolonged fixations. Importantly, eye tracking parameters provide additional information beyond simple speed measurements, potentially enhancing the differential diagnostic capabilities of the TMT in epilepsy. The observed associations between oculomotor parameters and cognitive performance were not present in the control group, suggesting that these relationships may be specific to epilepsy. Future research should investigate whether both basic and advanced metrics of search strategies are sensitive to disease dynamics and treatment effects in epilepsy.
Greß, Hannah; Demidova, Elena; Meier, Michael; Krüger, Björn
SecureNeuroAI: Advanced Security Framework for AI-Powered Multimodal Real-Time Detection of Medical Seizure Events Proceedings Article
In: Ohm, Marc (Ed.): Proceedings of the 15th graduate workshop of the special interest group Security - Intrusion Detection and Response (SIDAR) of the German Informatics Society (GI) (SPRING 2025), pp. 22-24, GI SIG SIDAR, Nuremberg, April, 2025, ISSN: 2190-846X.
@inproceedings{Greß2025,
title = {SecureNeuroAI: Advanced Security Framework for AI-Powered Multimodal Real-Time Detection of Medical Seizure Events},
author = {Hannah Greß and Elena Demidova and Michael Meier and Björn Krüger},
editor = {Marc Ohm},
url = {https://fg-sidar.gi.de/publikationen/sidar-reports},
issn = {2190-846X},
year = {2025},
date = {2025-05-12},
urldate = {2025-05-12},
booktitle = { Proceedings of the 15th graduate workshop of the special interest group Security - Intrusion Detection and Response (SIDAR) of the German Informatics Society (GI) (SPRING 2025)},
pages = {22-24},
publisher = {GI SIG SIDAR},
address = {Nuremberg, April},
abstract = {In today's interconnected world, medical devices are increasingly equipped with novel digital technologies and AI-powered methods to improve the users' quality of life.
Despite the increased possibilities and features these devices offer due to the technical progress, cyberattacks on medical devices will increase as well with possibly severe outcomes for the patients.
At the same time, AI-based technologies could help to detect and mitigate these attacks on medical systems and their data in real-time.
Therefore, our project "SecureNeuroAI" aims to detect epileptic seizures using multimodal sensor data and AI models while also considering possible cyberattacks on this system resulting in an IT-secure system.
Our results will serve as an example for future AI-supported medical devices and systems to enhance their security and to strengthen their trustworthiness towards their (future) users.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Despite the increased possibilities and features these devices offer due to the technical progress, cyberattacks on medical devices will increase as well with possibly severe outcomes for the patients.
At the same time, AI-based technologies could help to detect and mitigate these attacks on medical systems and their data in real-time.
Therefore, our project "SecureNeuroAI" aims to detect epileptic seizures using multimodal sensor data and AI models while also considering possible cyberattacks on this system resulting in an IT-secure system.
Our results will serve as an example for future AI-supported medical devices and systems to enhance their security and to strengthen their trustworthiness towards their (future) users.
Khan, Umar; Riaz, Qaiser; Hussain, Mehdi; Zeeshan, Muhammad; Krüger, Björn
Towards Effective Parkinson’s Monitoring: Movement Disorder Detection and Symptom Identification Using Wearable Inertial Sensors Journal Article
In: Algorithms, vol. 18, no. 4, 2025, ISSN: 1999-4893.
@article{2025-khan,
title = {Towards Effective Parkinson’s Monitoring: Movement Disorder Detection and Symptom Identification Using Wearable Inertial Sensors},
author = {Umar Khan and Qaiser Riaz and Mehdi Hussain and Muhammad Zeeshan and Björn Krüger},
url = {https://www.mdpi.com/1999-4893/18/4/203},
doi = {10.3390/a18040203},
issn = {1999-4893},
year = {2025},
date = {2025-04-04},
urldate = {2025-01-01},
journal = {Algorithms},
volume = {18},
number = {4},
abstract = {Parkinson’s disease lacks a cure, yet symptomatic relief can be achieved through various treatments. This study dives into the critical aspect of anomalous event detection in the activities of daily living of patients with Parkinson’s disease and the identification of associated movement disorders, such as tremors, dyskinesia, and bradykinesia. Utilizing the inertial data acquired from the most affected upper limb of the patients, this study aims to create an optimal pipeline for Parkinson’s patient monitoring. This study proposes a two-stage movement disorder detection and classification pipeline for binary classification (normal or anomalous event) and multi-label classification (tremors, dyskinesia, and bradykinesia), respectively. The proposed pipeline employs and evaluates manual feature crafting for classical machine learning algorithms, as well as an RNN-CNN-inspired deep learning model that does not require manual feature crafting. This study also explore three different window sizes for signal segmentation and two different auto-segment labeling approaches for precise and correct labeling of the continuous signal. The performance of the proposed model is validated on a publicly available inertial dataset. Comparisons with existing works reveal the novelty of our approach, covering multiple anomalies (tremors, dyskinesia, and bradykinesia) and achieving 93.03% recall for movement disorder detection (binary) and 91.54% recall for movement disorder classification (multi-label). We believe that the proposed approach will advance the field towards more effective and comprehensive solutions for Parkinson’s detection and symptom classification.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Greß, Hannah; Alouardani, Saied; Hoffmann, Nico; Trebing, Pia; Becker, Albert J.; Surges, Rainer; Pitsch, Julika; Krüger, Björn
Digitale Transformation des Blutprobenmanagements Conference
Dreiländertagung Epilepsie 2025, 2025.
@conference{gress2025a,
title = {Digitale Transformation des Blutprobenmanagements},
author = {Hannah Greß and Saied Alouardani and Nico Hoffmann and Pia Trebing and Albert J. Becker and Rainer Surges and Julika Pitsch and Björn Krüger},
year = {2025},
date = {2025-03-26},
urldate = {2025-03-26},
booktitle = {Dreiländertagung Epilepsie 2025},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Haaga, Lisa; Jansen, Anna; Steininger, Melissa; Müllers, Johannes; Bausch, Marcel; Jordan, Arthur; Surges, Rainer; Krüger, Björn
EpiEye – Einfluss anfallssupressiver Medikamente auf Augenbewegungen und autonome Veränderungen bei Epilepsien Conference
Dreiländertagung Epilepsie 2025, 2025.
@conference{haaga2025a,
title = {EpiEye – Einfluss anfallssupressiver Medikamente auf Augenbewegungen und autonome Veränderungen bei Epilepsien},
author = {Lisa Haaga and Anna Jansen and Melissa Steininger and Johannes Müllers and Marcel Bausch and Arthur Jordan and Rainer Surges and Björn Krüger},
year = {2025},
date = {2025-03-26},
booktitle = {Dreiländertagung Epilepsie 2025},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Müllers, Johannes; Greß, Hannah; Weber, Christian; Nadeem, Mubaris; Hütwohl, Daniela; Pukropski, Jan; Grond, Martin; Surges, Rainer; Krüger, Björn
Aufbau eines epileptologischen Telekonsils zwischen dem Klinikum Siegen und dem Universitätsklinikum Bonn Conference
Dreiländertagung Epilepsie 2025, 2025.
@conference{muellers2025a,
title = {Aufbau eines epileptologischen Telekonsils zwischen dem Klinikum Siegen und dem Universitätsklinikum Bonn},
author = {Johannes Müllers and Hannah Greß and Christian Weber and Mubaris Nadeem and Daniela Hütwohl and Jan Pukropski and Martin Grond and Rainer Surges and Björn Krüger},
year = {2025},
date = {2025-03-26},
urldate = {2025-03-26},
booktitle = {Dreiländertagung Epilepsie 2025},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Mustafa, Sarah Al-Haj; Jansen, Anna; Surges, Rainer; Helmstaedter, Christoph; von Wrede, Randi; Krüger, Björn
ANNE – Augen-Tracking zur Erkennung von Nebenwirkungen bei Epilepsie Conference
Dreiländertagung Epilepsie 2025, 2025.
@conference{nokey,
title = {ANNE – Augen-Tracking zur Erkennung von Nebenwirkungen bei Epilepsie},
author = {Sarah Al-Haj Mustafa and Anna Jansen and Rainer Surges and Christoph Helmstaedter and Randi von Wrede and Björn Krüger},
year = {2025},
date = {2025-03-26},
urldate = {2025-03-26},
booktitle = {Dreiländertagung Epilepsie 2025},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Neuß, Maike Susanne; Pitsch, Julika; Krüger, Björn; Becker, Albert J.; Surges, Rainer; Baumgartner, Tobias
Semiologische Charakteristika der Temporallappenepilepsie mit Antikörpern gegen GAD65 Conference
Dreiländertagung Epilepsie 2025, 2025.
@conference{neuss2025,
title = {Semiologische Charakteristika der Temporallappenepilepsie mit Antikörpern gegen GAD65},
author = {Maike Susanne Neuß and Julika Pitsch and Björn Krüger and Albert J. Becker and Rainer Surges and Tobias Baumgartner},
year = {2025},
date = {2025-03-26},
booktitle = {Dreiländertagung Epilepsie 2025},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Greß, Hannah; Krüger, Björn; Tischhauser, Elmar
The Newer, the More Secure? Standards-Compliant Bluetooth Low Energy Man-in-the-Middle Attacks on Fitness Trackers Journal Article
In: Sensors, vol. 25, no. 6, 2025, ISSN: 1424-8220.
@article{2025gressBT,
title = {The Newer, the More Secure? Standards-Compliant Bluetooth Low Energy Man-in-the-Middle Attacks on Fitness Trackers},
author = {Hannah Greß and Björn Krüger and Elmar Tischhauser},
url = {https://www.mdpi.com/1424-8220/25/6/1815},
doi = {10.3390/s25061815},
issn = {1424-8220},
year = {2025},
date = {2025-03-14},
urldate = {2025-01-01},
journal = {Sensors},
volume = {25},
number = {6},
abstract = {The trend in self-tracking devices has remained unabated for years. Even if they record a large quantity of sensitive data, most users are not concerned about their data being transmitted and stored in a secure way from the device via the companion app to the vendor’s server. However, the secure implementation of this chain from the manufacturer is not always given, as various publications have already shown. Therefore, we first provide an overview of attack vectors within the ecosystem of self-tracking devices. Second, we evaluate the data security of eight contemporary fitness trackers from leading vendors by applying four still partly standards-compliant Bluetooth Low-Energy Man-in-the-Middle (MitM) attacks. Our results show that the examined devices are partially vulnerable against the attacks. For most of the trackers, the manufacturers put different security measures in place. These include short and user-initiated visibility and connectivity or app-level authentication to limit the attack surface. Interestingly, newer models are more likely to be attackable, underlining the constant need for verifying the security of BLE devices, reporting found vulnerabilities, and also strengthening standards and improving security awareness among manufacturers and users. Therefore, we finish our work with recommendations and best practices for law- and regulation-makers, vendors, and users on how to strengthen the security of BLE devices.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Steininger, Melissa; Jansen, Anna; Welle, Kristian; Krüger, Björn
Optimized Sensor Position Detection: Improving Visual Sensor Setups for Hand Tracking in VR Conference
2025 IEEE Conference Virtual Reality and 3D User Interfaces (VR), 2025.
@conference{steininger2025b,
title = {Optimized Sensor Position Detection: Improving Visual Sensor Setups for Hand Tracking in VR},
author = {Melissa Steininger and Anna Jansen and Kristian Welle and Björn Krüger},
year = {2025},
date = {2025-03-12},
urldate = {2025-03-12},
booktitle = {2025 IEEE Conference Virtual Reality and 3D User Interfaces (VR)},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Müllers, Johannes; Staehle, Ricarda; Stroth, Sanna; Poustka, Luise; Krüger, Björn; Schulte-Rüther, Martin
Multi-Stream Analysis for Robust Head Gesture Classification in Natural Social Interaction: Leveraging High-Resolution Sensor Data for Optimized Visual Classification Conference
16. Wissenschaftlichen Tagung Autismus-Spektrum (WTAS), 2025.
@conference{muellers2025b,
title = {Multi-Stream Analysis for Robust Head Gesture Classification in Natural Social Interaction: Leveraging High-Resolution Sensor Data for Optimized Visual Classification},
author = {Johannes Müllers and Ricarda Staehle and Sanna Stroth and Luise Poustka and Björn Krüger and Martin Schulte-Rüther},
year = {2025},
date = {2025-03-07},
booktitle = {16. Wissenschaftlichen Tagung Autismus-Spektrum (WTAS)},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Steininger, Melissa; Jansen, Anna; Mustafa, Sarah Al-Haj; Surges, Rainer; Helmstaedter, Christoph; von Wrede, Randi; Krüger, Björn
Eye-Tracking Reveals Search Behaviour in Epilepsy Patients Conference
3rd International Conference on Artificial Intelligence in Epilepsy and Neurological Disorders, 2025.
@conference{steininger2025a,
title = {Eye-Tracking Reveals Search Behaviour in Epilepsy Patients},
author = {Melissa Steininger and Anna Jansen and Sarah Al-Haj Mustafa and Rainer Surges and Christoph Helmstaedter and Randi von Wrede and Björn Krüger
},
year = {2025},
date = {2025-03-03},
urldate = {2025-03-03},
booktitle = {3rd International Conference on Artificial Intelligence in Epilepsy and Neurological Disorders},
journal = {3rd International Conference on Artificial Intelligence in Epilepsy and Neurological Disorders},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Jansen, Anna; Steininger, Melissa; Mustafa, Sarah Al-Haj; Müllers, Johannes; Surges, Rainer; Helmstaedter, Christoph; von Wrede, Randi; Krüger, Björn
Prediction Models on Eye Tracking Data in Epilepsy Conference
3rd International Conference on Artificial Intelligence in Epilepsy and Neurological Disorders, 2025.
@conference{jansen2025a,
title = {Prediction Models on Eye Tracking Data in Epilepsy},
author = {Anna Jansen and Melissa Steininger and Sarah Al-Haj Mustafa and Johannes Müllers and Rainer Surges and Christoph Helmstaedter and Randi von Wrede and Björn Krüger},
year = {2025},
date = {2025-03-03},
urldate = {2025-03-03},
booktitle = {3rd International Conference on Artificial Intelligence in Epilepsy and Neurological Disorders},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
2024
Otsubo, Hiromu; Lehnort, Marvin; Steininger, Melissa; Marquardt, Alexander; Dollack, Felix; Hirao, Yutaro; Perusquía-Hernández, Monica; Uchiyama, Hideaki; Kruijff, Ernst; Riecke, Bernhard; Kiyokawa, Kiyoshi
First-Person Perspective Induces Stronger Feelings of Awe and Presence Compared to Third-Person Perspective in Virtual Reality Proceedings Article
In: ICMI '24: Proceedings of the 26th International Conference on Multimodal Interaction, pp. 439 - 448, 2024.
@inproceedings{Otsubo2024,
title = {First-Person Perspective Induces Stronger Feelings of Awe and Presence Compared to Third-Person Perspective in Virtual Reality},
author = {Hiromu Otsubo and Marvin Lehnort and Melissa Steininger and Alexander Marquardt and Felix Dollack and Yutaro Hirao and Monica Perusquía-Hernández and Hideaki Uchiyama and Ernst Kruijff and Bernhard Riecke and Kiyoshi Kiyokawa},
doi = {https://doi.org/10.1145/3678957.3685753},
year = {2024},
date = {2024-11-04},
urldate = {2024-11-04},
booktitle = {ICMI '24: Proceedings of the 26th International Conference on Multimodal Interaction},
pages = {439 - 448},
abstract = {Awe is a complex emotion described as a perception of vastness and a need for accommodation to integrate new, overwhelming experiences. Virtual Reality (VR) has recently gained attention as a convenient means to facilitate experiences of awe. In VR, a first-person perspective might increase awe due to its immersive nature, while a third-person perspective might enhance the perception of vastness. However, the impact of VR perspectives on experiencing awe has not been thoroughly examined. We created two types of VR scenes: one with elements designed to induce high awe, such as a snowy mountain, and a low awe scene without such elements. We compared first-person and third-person perspectives in each scene. Forty-two participants explored the VR scenes, with their physiological responses captured by electrocardiogram (ECG) and face tracking (FT). Subsequently, participants self-reported their experience of awe (AWE-S) and presence (IPQ) within VR. The results revealed that the first-person perspective induced stronger feelings of awe and presence than the third-person perspective. The findings of this study provide useful guidelines for designing VR content that enhances emotional experiences.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Marquardt, Alexander; Lehnort, Marvin; Otsubo, Hiromu; Perusquia-Hernandez, Monica; Steininger, Melissa; Dollack, Felix; Uchiyama, Hideaki; Kiyokawa, Kiyoshi; Kruijff, Ernst
Exploring Gesture Interaction in Underwater Virtual Reality Proceedings Article
In: Proceedings of the 2024 ACM Symposium on Spatial User Interaction, pp. 1-2, 2024.
@inproceedings{marquardtLehnort2024,
title = {Exploring Gesture Interaction in Underwater Virtual Reality},
author = {Alexander Marquardt and Marvin Lehnort and Hiromu Otsubo and Monica Perusquia-Hernandez and Melissa Steininger and Felix Dollack and Hideaki Uchiyama and Kiyoshi Kiyokawa and Ernst Kruijff},
doi = {https://doi.org/10.1145/3677386.3688890},
year = {2024},
date = {2024-10-07},
urldate = {2024-10-07},
booktitle = {Proceedings of the 2024 ACM Symposium on Spatial User Interaction},
pages = {1-2},
abstract = {An underwater virtual reality (UVR) system with gesture-based controls was developed to facilitate navigation and interaction while submerged. The system uses a waterproof head-mounted display and camera-based gesture recognition, originally trained for abovewater conditions, employing three gestures: grab for navigation, pinch for single interactions, and point for continuous interactions. In an experimental study, we tested gesture recognition both above and underwater, and evaluated participant interaction within an immersive underwater scene. Results showed that underwater conditions slightly affected gesture accuracy, but the system maintained high performance. Participants reported a strong sense of presence and found the gestures intuitive while highlighting the need for further refinement to address usability challenges.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Müllers, Johannes; Krüger, Björn; Schulte-Rüther, Martin
Gesten und Körperbewegungen entschlüsseln - Automatisierte Analyse mit Hilfe von Sensordaten und Methoden des maschinellen Lernens Conference
XXXVIII. DGKJP Kongress 2024, 2024.
@conference{nokey,
title = {Gesten und Körperbewegungen entschlüsseln - Automatisierte Analyse mit Hilfe von Sensordaten und Methoden des maschinellen Lernens},
author = {Johannes Müllers and Björn Krüger and Martin Schulte-Rüther},
year = {2024},
date = {2024-09-19},
booktitle = {XXXVIII. DGKJP Kongress 2024},
abstract = {Hintergrund
Subtile Kopfbewegungen wie Nicken, Kopfschütteln und Kopfneigen sind bedeutende nonverbale Kommunikationssignale während einer therapeutischen Sitzung. Diese Bewegungen können leicht übersehen werden, selbst bei detaillierter Videoanalyse. Die Nutzung von Sensordaten, insbesondere die Erfassung von Beschleunigung und Drehrate mittels Accelerometer und Gyroskop, ermöglicht eine präzise Echtzeitanalyse der Kopfbewegungen.
Methode
Zur Analyse der Sensordaten werden verschiedene Ansätze in Betracht gezogen. Klassische analytische Methoden erlauben eine direkte Auswertung der Beschleunigungs- und Rotationsdaten durch festgelegte Schwellenwerte und Mustererkennung. Graphbasierte Ansätze bieten eine flexible Struktur zur Modellierung der Bewegungssequenzen und deren Beziehungen. Zudem können Methoden des maschinellen Lernens, insbesondere überwachte und unüberwachte Lernverfahren, genutzt werden, um komplexe Bewegungsmuster zu identifizieren und zu klassifizieren. In diesem Vortrag werden die Vor- und Nachteile dieser Ansätze diskutiert und verglichen. Ein besonderer Fokus liegt auf der Echtzeitfähigkeit der Methoden, um eine laufende Annotation der Videos zu gewährleisten.
Ergebnisse
Vorläufige Resultate zeigen die Machbarkeit der Sensordatenanalyse. Erste Untersuchungen belegen, dass die erfassten Daten eine detaillierte Erkennung und Differenzierung von Kopfbewegungen ermöglichen.
Diskussion und Schlussfolgerung
Die bisherigen Ergebnisse zeigen, dass Sensordaten und fortschrittliche Analysemethoden das Potenzial haben, die Erkennung und Annotation von Kopfbewegungen erheblich zu verbessern. Analytische Methoden bieten einfache Implementierungsmöglichkeiten, könnten jedoch gegenüber maschinellen Lernmethoden an Anpassungsfähigkeit verlieren. Maschinelles Lernen bietet höhere Genauigkeit, erfordert jedoch umfangreiche Daten und Trainingszeit. Zukünftige Arbeiten werden sich auf die Verfeinerung und Validierung der Methoden konzentrieren, um eine optimale Balance zwischen Genauigkeit, Echtzeitfähigkeit und praktischer Anwendbarkeit zu finden. Insgesamt zeigt sich, dass die Integration von Sensordatenanalyse in therapeutische Sitzungen die Kommunikation und das Verständnis zwischen Therapeut und Patient verbessern kann.},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Subtile Kopfbewegungen wie Nicken, Kopfschütteln und Kopfneigen sind bedeutende nonverbale Kommunikationssignale während einer therapeutischen Sitzung. Diese Bewegungen können leicht übersehen werden, selbst bei detaillierter Videoanalyse. Die Nutzung von Sensordaten, insbesondere die Erfassung von Beschleunigung und Drehrate mittels Accelerometer und Gyroskop, ermöglicht eine präzise Echtzeitanalyse der Kopfbewegungen.
Methode
Zur Analyse der Sensordaten werden verschiedene Ansätze in Betracht gezogen. Klassische analytische Methoden erlauben eine direkte Auswertung der Beschleunigungs- und Rotationsdaten durch festgelegte Schwellenwerte und Mustererkennung. Graphbasierte Ansätze bieten eine flexible Struktur zur Modellierung der Bewegungssequenzen und deren Beziehungen. Zudem können Methoden des maschinellen Lernens, insbesondere überwachte und unüberwachte Lernverfahren, genutzt werden, um komplexe Bewegungsmuster zu identifizieren und zu klassifizieren. In diesem Vortrag werden die Vor- und Nachteile dieser Ansätze diskutiert und verglichen. Ein besonderer Fokus liegt auf der Echtzeitfähigkeit der Methoden, um eine laufende Annotation der Videos zu gewährleisten.
Ergebnisse
Vorläufige Resultate zeigen die Machbarkeit der Sensordatenanalyse. Erste Untersuchungen belegen, dass die erfassten Daten eine detaillierte Erkennung und Differenzierung von Kopfbewegungen ermöglichen.
Diskussion und Schlussfolgerung
Die bisherigen Ergebnisse zeigen, dass Sensordaten und fortschrittliche Analysemethoden das Potenzial haben, die Erkennung und Annotation von Kopfbewegungen erheblich zu verbessern. Analytische Methoden bieten einfache Implementierungsmöglichkeiten, könnten jedoch gegenüber maschinellen Lernmethoden an Anpassungsfähigkeit verlieren. Maschinelles Lernen bietet höhere Genauigkeit, erfordert jedoch umfangreiche Daten und Trainingszeit. Zukünftige Arbeiten werden sich auf die Verfeinerung und Validierung der Methoden konzentrieren, um eine optimale Balance zwischen Genauigkeit, Echtzeitfähigkeit und praktischer Anwendbarkeit zu finden. Insgesamt zeigt sich, dass die Integration von Sensordatenanalyse in therapeutische Sitzungen die Kommunikation und das Verständnis zwischen Therapeut und Patient verbessern kann.
Steininger, Melissa; Perusquıa-Hernández, Monica; Marquardt, Alexander; Otsubo, Hiromu; Lehnort, Marvin; Dollack, Felix; Kiyokawa, Kiyoshi; Kruijff, Ernst; Riecke, Bernhard
Using Skin Conductance to Predict Awe and Perceived Vastness in Virtual Reality Conference
12th International Conference on Affective Computing and Intelligent Interaction, 2024.
@conference{steininger2024,
title = {Using Skin Conductance to Predict Awe and Perceived Vastness in Virtual Reality},
author = {Melissa Steininger and Monica Perusquıa-Hernández and Alexander Marquardt and Hiromu Otsubo and Marvin Lehnort and Felix Dollack and Kiyoshi Kiyokawa and Ernst Kruijff and Bernhard Riecke},
year = {2024},
date = {2024-09-17},
urldate = {2024-09-17},
booktitle = {12th International Conference on Affective Computing and Intelligent Interaction},
abstract = {Awe is an emotion characterized by the perception of vastness and the need to accommodate this vastness into one’s mental framework. We propose an elicitation scene to induce awe in Virtual Reality (VR), validate it through selfreport, and explore the feasibility of using skin conductance to predict self-reported awe and vastness as labeled from the stimuli in VR. Sixty-two participants took part in the study comparing the awe-eliciting space scene and a neutral scene. The space scene was confirmed as more awe-eliciting. A k-nearest neighbor algorithm confirmed high and low-awe score clusters used to label the data. A Random Forest algorithm achieved 65% accuracy (F1= 0.56, AU C= 0.73) when predicting the self-reported low and high awe categories from continuous skin conductance data. A similar approach achieved 55% accuracy (F1= 0.59, AU C= 0.56) when predicting the perception of vastness. These results underscore the potential of skinconductance-based algorithms to predict awe.},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Kiran, Samia; Riaz, Qaiser; Hussain, Mehdi; Zeeshan, Muhammad; Krüger, Björn
Unveiling Fall Origins: Leveraging Wearable Sensors to Detect Pre-Impact Fall Causes Journal Article
In: IEEE Sensors Journal, vol. 24, no. 15, pp. 24086-24095, 2024, ISSN: 1558-1748.
@article{10552639,
title = {Unveiling Fall Origins: Leveraging Wearable Sensors to Detect Pre-Impact Fall Causes},
author = {Samia Kiran and Qaiser Riaz and Mehdi Hussain and Muhammad Zeeshan and Björn Krüger},
doi = {10.1109/JSEN.2024.3407835},
issn = {1558-1748},
year = {2024},
date = {2024-08-01},
urldate = {2024-01-01},
journal = {IEEE Sensors Journal},
volume = {24},
number = {15},
pages = {24086-24095},
abstract = {Falling poses a significant challenge to the health and well-being of the elderly and people with various disabilities. Precise and prompt fall detection plays a crucial role in preventing falls and mitigating the impact of injuries. In this research, we propose a deep classifier for pre-impact fall detection which can detect a fall in the pre-impact phase with an inference time of 46–52 milliseconds. The proposed classifier is an ensemble of Convolutional Neural Networks (CNNs) and Bidirectional Gated Recurrent Units (BiGRU) with residual connections. We validated the performance of the proposed classifier on a comprehensive, publicly available preimpact fall dataset. The dataset covers 36 diverse activities, including 15 types of fall-related activities and 21 types of activities of daily living (ADLs). Furthermore, we evaluated the proposed model using three different inputs of varying dimensions: 6D input (comprising 3D accelerations and 3D angular velocities), 3D input (3D accelerations), and 1D input (magnitude of 3D accelerations). The reduction in the input space from 6D to 1D is aimed at minimizing the computation cost. We have attained commendable results outperforming the state-of-the-art approaches by achieving an average accuracy and F1 score of 98% for 6D input size. The potential implications of this research are particularly relevant in the realm of smart healthcare, with a focus on the elderly and differently-abled population.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Krüger, Björn; Weber, Christian; Müllers, Johannes; Greß, Hannah; Beyer, Franziska; Knaub, Jessica; Pukropski, Jan; Hütwohl, Daniela; Hahn, Kai; Grond, Martin; Jonas, Stephan; Surges, Rainer
Teleconsultation to Improve Epilepsy Diagnosis and Therapy Book Chapter
In: Herrmann, Wolfram J.; Leser, Ulf; Möller, Sebastian; Voigt-Antons, Jan-Niklas; Gellert, Paul (Ed.): pp. 18-23, Future-Proofing Healthcare for Older Adults Through Digitalization, 2024.
@inbook{krueger2024a,
title = {Teleconsultation to Improve Epilepsy Diagnosis and Therapy},
author = {Björn Krüger and Christian Weber and Johannes Müllers and Hannah Greß and Franziska Beyer and Jessica Knaub and Jan Pukropski and Daniela Hütwohl and Kai Hahn and Martin Grond and Stephan Jonas and Rainer Surges},
editor = {Wolfram J. Herrmann and Ulf Leser and Sebastian Möller and Jan-Niklas Voigt-Antons and Paul Gellert},
doi = {10.14279/depositonce-20417},
year = {2024},
date = {2024-08-01},
urldate = {2024-08-01},
pages = {18-23},
edition = {Future-Proofing Healthcare for Older Adults Through Digitalization},
abstract = {Teleconsultation in epileptology significantly enhances patient diagnosis and treatment, often eliminating the necessity for physical referral to a specialized clinic. In this paper, we detail the typical teleconsultation process, exploring its technical requirements and legal boundaries. Notably, we focus on the groundwork for establishing a teleconsultation specifically between the University Hospital Bonn and the Klinikum Siegen. Additionally, we provide an overview of currently implemented teleconsultations in epileptology in Germany, concluding with research questions stemming from these advancements. },
keywords = {},
pubstate = {published},
tppubtype = {inbook}
}
Riedlinger, Dorothee; Krüger, Björn; Winkler, Hanna; Deutschbein, Johannes; Fischer-Rosinský, Antje; Slagman, Anna; Möckel, Martin
In: Herrmann, Wolfram J.; Leser, Ulf; Möller, Sebastian; Voigt-Antons, Jan-Niklas; Gellert, Paul (Ed.): pp. 108-113, Future-Proofing Healthcare for Older Adults Through Digitalization, 2024.
@inbook{riedlinger2024,
title = {Development of an early warning system to identify patients at risk of falling – Combining the analysis of medication prescription data and movement profiles},
author = {Dorothee Riedlinger and Björn Krüger and Hanna Winkler and Johannes Deutschbein and Antje Fischer-Rosinský and Anna Slagman and Martin Möckel},
editor = {Wolfram J. Herrmann and Ulf Leser and Sebastian Möller and Jan-Niklas Voigt-Antons and Paul Gellert},
doi = {10.14279/depositonce-20431},
year = {2024},
date = {2024-08-01},
urldate = {2024-08-01},
pages = {108-113},
edition = {Future-Proofing Healthcare for Older Adults Through Digitalization},
abstract = {Fall related injuries are a common cause for a reduction of autonomy and quality of life in older patients. The early detection of patients at risk of falling or the prediction of falls may help to prevent falls and thereby improve the health of people of advanced age. Prior analyses of routine medication data pointed to an increase of pain medication prescription prior to an ED },
keywords = {},
pubstate = {published},
tppubtype = {inbook}
}