2025
Greß, Hannah; Alouardani, Saied; Hoffmann, Nico; Trebing, Pia; Becker, Albert J.; Surges, Rainer; Pitsch, Julika; Krüger, Björn
Digitale Transformation des Blutprobenmanagements Conference
Dreiländertagung Epilepsie 2025, 2025.
@conference{gress2025a,
title = {Digitale Transformation des Blutprobenmanagements},
author = {Hannah Greß and Saied Alouardani and Nico Hoffmann and Pia Trebing and Albert J. Becker and Rainer Surges and Julika Pitsch and Björn Krüger},
year = {2025},
date = {2025-03-26},
urldate = {2025-03-26},
booktitle = {Dreiländertagung Epilepsie 2025},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Haaga, Lisa; Jansen, Anna; Steininger, Melissa; Müllers, Johannes; Bausch, Marcel; Jordan, Arthur; Surges, Rainer; Krüger, Björn
EpiEye – Einfluss anfallssupressiver Medikamente auf Augenbewegungen und autonome Veränderungen bei Epilepsien Conference
Dreiländertagung Epilepsie 2025, 2025.
@conference{haaga2025a,
title = {EpiEye – Einfluss anfallssupressiver Medikamente auf Augenbewegungen und autonome Veränderungen bei Epilepsien},
author = {Lisa Haaga and Anna Jansen and Melissa Steininger and Johannes Müllers and Marcel Bausch and Arthur Jordan and Rainer Surges and Björn Krüger},
year = {2025},
date = {2025-03-26},
booktitle = {Dreiländertagung Epilepsie 2025},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Müllers, Johannes; Greß, Hannah; Weber, Christian; Nadeem, Mubaris; Hütwohl, Daniela; Pukropski, Jan; Grond, Martin; Surges, Rainer; Krüger, Björn
Aufbau eines epileptologischen Telekonsils zwischen dem Klinikum Siegen und dem Universitätsklinikum Bonn Conference
Dreiländertagung Epilepsie 2025, 2025.
@conference{muellers2025a,
title = {Aufbau eines epileptologischen Telekonsils zwischen dem Klinikum Siegen und dem Universitätsklinikum Bonn},
author = {Johannes Müllers and Hannah Greß and Christian Weber and Mubaris Nadeem and Daniela Hütwohl and Jan Pukropski and Martin Grond and Rainer Surges and Björn Krüger},
year = {2025},
date = {2025-03-26},
urldate = {2025-03-26},
booktitle = {Dreiländertagung Epilepsie 2025},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Mustafa, Sarah Al-Haj; Jansen, Anna; Surges, Rainer; Helmstaedter, Christoph; von Wrede, Randi; Krüger, Björn
ANNE – Augen-Tracking zur Erkennung von Nebenwirkungen bei Epilepsie Conference
Dreiländertagung Epilepsie 2025, 2025.
@conference{nokey,
title = {ANNE – Augen-Tracking zur Erkennung von Nebenwirkungen bei Epilepsie},
author = {Sarah Al-Haj Mustafa and Anna Jansen and Rainer Surges and Christoph Helmstaedter and Randi von Wrede and Björn Krüger},
year = {2025},
date = {2025-03-26},
urldate = {2025-03-26},
booktitle = {Dreiländertagung Epilepsie 2025},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Neuß, Maike Susanne; Pitsch, Julika; Krüger, Björn; Becker, Albert J.; Surges, Rainer; Baumgartner, Tobias
Semiologische Charakteristika der Temporallappenepilepsie mit Antikörpern gegen GAD65 Conference
Dreiländertagung Epilepsie 2025, 2025.
@conference{neuss2025,
title = {Semiologische Charakteristika der Temporallappenepilepsie mit Antikörpern gegen GAD65},
author = {Maike Susanne Neuß and Julika Pitsch and Björn Krüger and Albert J. Becker and Rainer Surges and Tobias Baumgartner},
year = {2025},
date = {2025-03-26},
booktitle = {Dreiländertagung Epilepsie 2025},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Steininger, Melissa; Jansen, Anna; Welle, Kristian; Krüger, Björn
Optimized Sensor Position Detection: Improving Visual Sensor Setups for Hand Tracking in VR Conference Forthcoming
2025 IEEE Conference Virtual Reality and 3D User Interfaces (VR), Forthcoming.
@conference{steininger2025b,
title = {Optimized Sensor Position Detection: Improving Visual Sensor Setups for Hand Tracking in VR},
author = {Melissa Steininger and Anna Jansen and Kristian Welle and Björn Krüger},
year = {2025},
date = {2025-03-12},
urldate = {2025-03-12},
booktitle = {2025 IEEE Conference Virtual Reality and 3D User Interfaces (VR)},
keywords = {},
pubstate = {forthcoming},
tppubtype = {conference}
}
Müllers, Johannes; Staehle, Ricarda; Stroth, Sanna; Poustka, Luise; Krüger, Björn; Schulte-Rüther, Martin
Multi-Stream Analysis for Robust Head Gesture Classification in Natural Social Interaction: Leveraging High-Resolution Sensor Data for Optimized Visual Classification Conference
16. Wissenschaftlichen Tagung Autismus-Spektrum (WTAS), 2025.
@conference{muellers2025b,
title = {Multi-Stream Analysis for Robust Head Gesture Classification in Natural Social Interaction: Leveraging High-Resolution Sensor Data for Optimized Visual Classification},
author = {Johannes Müllers and Ricarda Staehle and Sanna Stroth and Luise Poustka and Björn Krüger and Martin Schulte-Rüther},
year = {2025},
date = {2025-03-07},
booktitle = {16. Wissenschaftlichen Tagung Autismus-Spektrum (WTAS)},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Steininger, Melissa; Jansen, Anna; Mustafa, Sarah Al-Haj; Surges, Rainer; Helmstaedter, Christoph; von Wrede, Randi; Krüger, Björn
Eye-Tracking Reveals Search Behaviour in Epilepsy Patients Conference
3rd International Conference on Artificial Intelligence in Epilepsy and Neurological Disorders, 2025.
@conference{steininger2025a,
title = {Eye-Tracking Reveals Search Behaviour in Epilepsy Patients},
author = {Melissa Steininger and Anna Jansen and Sarah Al-Haj Mustafa and Rainer Surges and Christoph Helmstaedter and Randi von Wrede and Björn Krüger
},
year = {2025},
date = {2025-03-03},
urldate = {2025-03-03},
booktitle = {3rd International Conference on Artificial Intelligence in Epilepsy and Neurological Disorders},
journal = {3rd International Conference on Artificial Intelligence in Epilepsy and Neurological Disorders},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Jansen, Anna; Steininger, Melissa; Mustafa, Sarah Al-Haj; Müllers, Johannes; Surges, Rainer; Helmstaedter, Christoph; von Wrede, Randi; Krüger, Björn
Prediction Models on Eye Tracking Data in Epilepsy Conference
3rd International Conference on Artificial Intelligence in Epilepsy and Neurological Disorders, 2025.
@conference{jansen2025a,
title = {Prediction Models on Eye Tracking Data in Epilepsy},
author = {Anna Jansen and Melissa Steininger and Sarah Al-Haj Mustafa and Johannes Müllers and Rainer Surges and Christoph Helmstaedter and Randi von Wrede and Björn Krüger},
year = {2025},
date = {2025-03-03},
urldate = {2025-03-03},
booktitle = {3rd International Conference on Artificial Intelligence in Epilepsy and Neurological Disorders},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
2024
Otsubo, Hiromu; Lehnort, Marvin; Steininger, Melissa; Marquardt, Alexander; Dollack, Felix; Hirao, Yutaro; Perusquía-Hernández, Monica; Uchiyama, Hideaki; Kruijff, Ernst; Riecke, Bernhard; Kiyokawa, Kiyoshi
First-Person Perspective Induces Stronger Feelings of Awe and Presence Compared to Third-Person Perspective in Virtual Reality Proceedings Article
In: ICMI '24: Proceedings of the 26th International Conference on Multimodal Interaction, pp. 439 - 448, 2024.
@inproceedings{Otsubo2024,
title = {First-Person Perspective Induces Stronger Feelings of Awe and Presence Compared to Third-Person Perspective in Virtual Reality},
author = {Hiromu Otsubo and Marvin Lehnort and Melissa Steininger and Alexander Marquardt and Felix Dollack and Yutaro Hirao and Monica Perusquía-Hernández and Hideaki Uchiyama and Ernst Kruijff and Bernhard Riecke and Kiyoshi Kiyokawa},
doi = {https://doi.org/10.1145/3678957.3685753},
year = {2024},
date = {2024-11-04},
urldate = {2024-11-04},
booktitle = {ICMI '24: Proceedings of the 26th International Conference on Multimodal Interaction},
pages = {439 - 448},
abstract = {Awe is a complex emotion described as a perception of vastness and a need for accommodation to integrate new, overwhelming experiences. Virtual Reality (VR) has recently gained attention as a convenient means to facilitate experiences of awe. In VR, a first-person perspective might increase awe due to its immersive nature, while a third-person perspective might enhance the perception of vastness. However, the impact of VR perspectives on experiencing awe has not been thoroughly examined. We created two types of VR scenes: one with elements designed to induce high awe, such as a snowy mountain, and a low awe scene without such elements. We compared first-person and third-person perspectives in each scene. Forty-two participants explored the VR scenes, with their physiological responses captured by electrocardiogram (ECG) and face tracking (FT). Subsequently, participants self-reported their experience of awe (AWE-S) and presence (IPQ) within VR. The results revealed that the first-person perspective induced stronger feelings of awe and presence than the third-person perspective. The findings of this study provide useful guidelines for designing VR content that enhances emotional experiences.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Awe is a complex emotion described as a perception of vastness and a need for accommodation to integrate new, overwhelming experiences. Virtual Reality (VR) has recently gained attention as a convenient means to facilitate experiences of awe. In VR, a first-person perspective might increase awe due to its immersive nature, while a third-person perspective might enhance the perception of vastness. However, the impact of VR perspectives on experiencing awe has not been thoroughly examined. We created two types of VR scenes: one with elements designed to induce high awe, such as a snowy mountain, and a low awe scene without such elements. We compared first-person and third-person perspectives in each scene. Forty-two participants explored the VR scenes, with their physiological responses captured by electrocardiogram (ECG) and face tracking (FT). Subsequently, participants self-reported their experience of awe (AWE-S) and presence (IPQ) within VR. The results revealed that the first-person perspective induced stronger feelings of awe and presence than the third-person perspective. The findings of this study provide useful guidelines for designing VR content that enhances emotional experiences. Marquardt, Alexander; Lehnort, Marvin; Otsubo, Hiromu; Perusquia-Hernandez, Monica; Steininger, Melissa; Dollack, Felix; Uchiyama, Hideaki; Kiyokawa, Kiyoshi; Kruijff, Ernst
Exploring Gesture Interaction in Underwater Virtual Reality Proceedings Article
In: Proceedings of the 2024 ACM Symposium on Spatial User Interaction, pp. 1-2, 2024.
@inproceedings{marquardtLehnort2024,
title = {Exploring Gesture Interaction in Underwater Virtual Reality},
author = {Alexander Marquardt and Marvin Lehnort and Hiromu Otsubo and Monica Perusquia-Hernandez and Melissa Steininger and Felix Dollack and Hideaki Uchiyama and Kiyoshi Kiyokawa and Ernst Kruijff},
doi = {https://doi.org/10.1145/3677386.3688890},
year = {2024},
date = {2024-10-07},
urldate = {2024-10-07},
booktitle = {Proceedings of the 2024 ACM Symposium on Spatial User Interaction},
pages = {1-2},
abstract = {An underwater virtual reality (UVR) system with gesture-based controls was developed to facilitate navigation and interaction while submerged. The system uses a waterproof head-mounted display and camera-based gesture recognition, originally trained for abovewater conditions, employing three gestures: grab for navigation, pinch for single interactions, and point for continuous interactions. In an experimental study, we tested gesture recognition both above and underwater, and evaluated participant interaction within an immersive underwater scene. Results showed that underwater conditions slightly affected gesture accuracy, but the system maintained high performance. Participants reported a strong sense of presence and found the gestures intuitive while highlighting the need for further refinement to address usability challenges.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
An underwater virtual reality (UVR) system with gesture-based controls was developed to facilitate navigation and interaction while submerged. The system uses a waterproof head-mounted display and camera-based gesture recognition, originally trained for abovewater conditions, employing three gestures: grab for navigation, pinch for single interactions, and point for continuous interactions. In an experimental study, we tested gesture recognition both above and underwater, and evaluated participant interaction within an immersive underwater scene. Results showed that underwater conditions slightly affected gesture accuracy, but the system maintained high performance. Participants reported a strong sense of presence and found the gestures intuitive while highlighting the need for further refinement to address usability challenges. Müllers, Johannes; Krüger, Björn; Schulte-Rüther, Martin
Gesten und Körperbewegungen entschlüsseln - Automatisierte Analyse mit Hilfe von Sensordaten und Methoden des maschinellen Lernens Conference
XXXVIII. DGKJP Kongress 2024, 2024.
@conference{nokey,
title = {Gesten und Körperbewegungen entschlüsseln - Automatisierte Analyse mit Hilfe von Sensordaten und Methoden des maschinellen Lernens},
author = {Johannes Müllers and Björn Krüger and Martin Schulte-Rüther},
year = {2024},
date = {2024-09-19},
booktitle = {XXXVIII. DGKJP Kongress 2024},
abstract = {Hintergrund
Subtile Kopfbewegungen wie Nicken, Kopfschütteln und Kopfneigen sind bedeutende nonverbale Kommunikationssignale während einer therapeutischen Sitzung. Diese Bewegungen können leicht übersehen werden, selbst bei detaillierter Videoanalyse. Die Nutzung von Sensordaten, insbesondere die Erfassung von Beschleunigung und Drehrate mittels Accelerometer und Gyroskop, ermöglicht eine präzise Echtzeitanalyse der Kopfbewegungen.
Methode
Zur Analyse der Sensordaten werden verschiedene Ansätze in Betracht gezogen. Klassische analytische Methoden erlauben eine direkte Auswertung der Beschleunigungs- und Rotationsdaten durch festgelegte Schwellenwerte und Mustererkennung. Graphbasierte Ansätze bieten eine flexible Struktur zur Modellierung der Bewegungssequenzen und deren Beziehungen. Zudem können Methoden des maschinellen Lernens, insbesondere überwachte und unüberwachte Lernverfahren, genutzt werden, um komplexe Bewegungsmuster zu identifizieren und zu klassifizieren. In diesem Vortrag werden die Vor- und Nachteile dieser Ansätze diskutiert und verglichen. Ein besonderer Fokus liegt auf der Echtzeitfähigkeit der Methoden, um eine laufende Annotation der Videos zu gewährleisten.
Ergebnisse
Vorläufige Resultate zeigen die Machbarkeit der Sensordatenanalyse. Erste Untersuchungen belegen, dass die erfassten Daten eine detaillierte Erkennung und Differenzierung von Kopfbewegungen ermöglichen.
Diskussion und Schlussfolgerung
Die bisherigen Ergebnisse zeigen, dass Sensordaten und fortschrittliche Analysemethoden das Potenzial haben, die Erkennung und Annotation von Kopfbewegungen erheblich zu verbessern. Analytische Methoden bieten einfache Implementierungsmöglichkeiten, könnten jedoch gegenüber maschinellen Lernmethoden an Anpassungsfähigkeit verlieren. Maschinelles Lernen bietet höhere Genauigkeit, erfordert jedoch umfangreiche Daten und Trainingszeit. Zukünftige Arbeiten werden sich auf die Verfeinerung und Validierung der Methoden konzentrieren, um eine optimale Balance zwischen Genauigkeit, Echtzeitfähigkeit und praktischer Anwendbarkeit zu finden. Insgesamt zeigt sich, dass die Integration von Sensordatenanalyse in therapeutische Sitzungen die Kommunikation und das Verständnis zwischen Therapeut und Patient verbessern kann.},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Hintergrund
Subtile Kopfbewegungen wie Nicken, Kopfschütteln und Kopfneigen sind bedeutende nonverbale Kommunikationssignale während einer therapeutischen Sitzung. Diese Bewegungen können leicht übersehen werden, selbst bei detaillierter Videoanalyse. Die Nutzung von Sensordaten, insbesondere die Erfassung von Beschleunigung und Drehrate mittels Accelerometer und Gyroskop, ermöglicht eine präzise Echtzeitanalyse der Kopfbewegungen.
Methode
Zur Analyse der Sensordaten werden verschiedene Ansätze in Betracht gezogen. Klassische analytische Methoden erlauben eine direkte Auswertung der Beschleunigungs- und Rotationsdaten durch festgelegte Schwellenwerte und Mustererkennung. Graphbasierte Ansätze bieten eine flexible Struktur zur Modellierung der Bewegungssequenzen und deren Beziehungen. Zudem können Methoden des maschinellen Lernens, insbesondere überwachte und unüberwachte Lernverfahren, genutzt werden, um komplexe Bewegungsmuster zu identifizieren und zu klassifizieren. In diesem Vortrag werden die Vor- und Nachteile dieser Ansätze diskutiert und verglichen. Ein besonderer Fokus liegt auf der Echtzeitfähigkeit der Methoden, um eine laufende Annotation der Videos zu gewährleisten.
Ergebnisse
Vorläufige Resultate zeigen die Machbarkeit der Sensordatenanalyse. Erste Untersuchungen belegen, dass die erfassten Daten eine detaillierte Erkennung und Differenzierung von Kopfbewegungen ermöglichen.
Diskussion und Schlussfolgerung
Die bisherigen Ergebnisse zeigen, dass Sensordaten und fortschrittliche Analysemethoden das Potenzial haben, die Erkennung und Annotation von Kopfbewegungen erheblich zu verbessern. Analytische Methoden bieten einfache Implementierungsmöglichkeiten, könnten jedoch gegenüber maschinellen Lernmethoden an Anpassungsfähigkeit verlieren. Maschinelles Lernen bietet höhere Genauigkeit, erfordert jedoch umfangreiche Daten und Trainingszeit. Zukünftige Arbeiten werden sich auf die Verfeinerung und Validierung der Methoden konzentrieren, um eine optimale Balance zwischen Genauigkeit, Echtzeitfähigkeit und praktischer Anwendbarkeit zu finden. Insgesamt zeigt sich, dass die Integration von Sensordatenanalyse in therapeutische Sitzungen die Kommunikation und das Verständnis zwischen Therapeut und Patient verbessern kann. Steininger, Melissa; Perusquıa-Hernández, Monica; Marquardt, Alexander; Otsubo, Hiromu; Lehnort, Marvin; Dollack, Felix; Kiyokawa, Kiyoshi; Kruijff, Ernst; Riecke, Bernhard
Using Skin Conductance to Predict Awe and Perceived Vastness in Virtual Reality Conference
12th International Conference on Affective Computing and Intelligent Interaction, 2024.
@conference{steininger2024,
title = {Using Skin Conductance to Predict Awe and Perceived Vastness in Virtual Reality},
author = {Melissa Steininger and Monica Perusquıa-Hernández and Alexander Marquardt and Hiromu Otsubo and Marvin Lehnort and Felix Dollack and Kiyoshi Kiyokawa and Ernst Kruijff and Bernhard Riecke},
year = {2024},
date = {2024-09-17},
urldate = {2024-09-17},
booktitle = {12th International Conference on Affective Computing and Intelligent Interaction},
abstract = {Awe is an emotion characterized by the perception of vastness and the need to accommodate this vastness into one’s mental framework. We propose an elicitation scene to induce awe in Virtual Reality (VR), validate it through selfreport, and explore the feasibility of using skin conductance to predict self-reported awe and vastness as labeled from the stimuli in VR. Sixty-two participants took part in the study comparing the awe-eliciting space scene and a neutral scene. The space scene was confirmed as more awe-eliciting. A k-nearest neighbor algorithm confirmed high and low-awe score clusters used to label the data. A Random Forest algorithm achieved 65% accuracy (F1= 0.56, AU C= 0.73) when predicting the self-reported low and high awe categories from continuous skin conductance data. A similar approach achieved 55% accuracy (F1= 0.59, AU C= 0.56) when predicting the perception of vastness. These results underscore the potential of skinconductance-based algorithms to predict awe.},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Awe is an emotion characterized by the perception of vastness and the need to accommodate this vastness into one’s mental framework. We propose an elicitation scene to induce awe in Virtual Reality (VR), validate it through selfreport, and explore the feasibility of using skin conductance to predict self-reported awe and vastness as labeled from the stimuli in VR. Sixty-two participants took part in the study comparing the awe-eliciting space scene and a neutral scene. The space scene was confirmed as more awe-eliciting. A k-nearest neighbor algorithm confirmed high and low-awe score clusters used to label the data. A Random Forest algorithm achieved 65% accuracy (F1= 0.56, AU C= 0.73) when predicting the self-reported low and high awe categories from continuous skin conductance data. A similar approach achieved 55% accuracy (F1= 0.59, AU C= 0.56) when predicting the perception of vastness. These results underscore the potential of skinconductance-based algorithms to predict awe. Kiran, Samia; Riaz, Qaiser; Hussain, Mehdi; Zeeshan, Muhammad; Krüger, Björn
Unveiling Fall Origins: Leveraging Wearable Sensors to Detect Pre-Impact Fall Causes Journal Article
In: IEEE Sensors Journal, vol. 24, no. 15, pp. 24086-24095, 2024, ISSN: 1558-1748.
@article{10552639,
title = {Unveiling Fall Origins: Leveraging Wearable Sensors to Detect Pre-Impact Fall Causes},
author = {Samia Kiran and Qaiser Riaz and Mehdi Hussain and Muhammad Zeeshan and Björn Krüger},
doi = {10.1109/JSEN.2024.3407835},
issn = {1558-1748},
year = {2024},
date = {2024-08-01},
urldate = {2024-01-01},
journal = {IEEE Sensors Journal},
volume = {24},
number = {15},
pages = {24086-24095},
abstract = {Falling poses a significant challenge to the health and well-being of the elderly and people with various disabilities. Precise and prompt fall detection plays a crucial role in preventing falls and mitigating the impact of injuries. In this research, we propose a deep classifier for pre-impact fall detection which can detect a fall in the pre-impact phase with an inference time of 46–52 milliseconds. The proposed classifier is an ensemble of Convolutional Neural Networks (CNNs) and Bidirectional Gated Recurrent Units (BiGRU) with residual connections. We validated the performance of the proposed classifier on a comprehensive, publicly available preimpact fall dataset. The dataset covers 36 diverse activities, including 15 types of fall-related activities and 21 types of activities of daily living (ADLs). Furthermore, we evaluated the proposed model using three different inputs of varying dimensions: 6D input (comprising 3D accelerations and 3D angular velocities), 3D input (3D accelerations), and 1D input (magnitude of 3D accelerations). The reduction in the input space from 6D to 1D is aimed at minimizing the computation cost. We have attained commendable results outperforming the state-of-the-art approaches by achieving an average accuracy and F1 score of 98% for 6D input size. The potential implications of this research are particularly relevant in the realm of smart healthcare, with a focus on the elderly and differently-abled population.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Falling poses a significant challenge to the health and well-being of the elderly and people with various disabilities. Precise and prompt fall detection plays a crucial role in preventing falls and mitigating the impact of injuries. In this research, we propose a deep classifier for pre-impact fall detection which can detect a fall in the pre-impact phase with an inference time of 46–52 milliseconds. The proposed classifier is an ensemble of Convolutional Neural Networks (CNNs) and Bidirectional Gated Recurrent Units (BiGRU) with residual connections. We validated the performance of the proposed classifier on a comprehensive, publicly available preimpact fall dataset. The dataset covers 36 diverse activities, including 15 types of fall-related activities and 21 types of activities of daily living (ADLs). Furthermore, we evaluated the proposed model using three different inputs of varying dimensions: 6D input (comprising 3D accelerations and 3D angular velocities), 3D input (3D accelerations), and 1D input (magnitude of 3D accelerations). The reduction in the input space from 6D to 1D is aimed at minimizing the computation cost. We have attained commendable results outperforming the state-of-the-art approaches by achieving an average accuracy and F1 score of 98% for 6D input size. The potential implications of this research are particularly relevant in the realm of smart healthcare, with a focus on the elderly and differently-abled population. Krüger, Björn; Weber, Christian; Müllers, Johannes; Greß, Hannah; Beyer, Franziska; Knaub, Jessica; Pukropski, Jan; Hütwohl, Daniela; Hahn, Kai; Grond, Martin; Jonas, Stephan; Surges, Rainer
Teleconsultation to Improve Epilepsy Diagnosis and Therapy Book Chapter
In: Herrmann, Wolfram J.; Leser, Ulf; Möller, Sebastian; Voigt-Antons, Jan-Niklas; Gellert, Paul (Ed.): pp. 18-23, Future-Proofing Healthcare for Older Adults Through Digitalization, 2024.
@inbook{krueger2024a,
title = {Teleconsultation to Improve Epilepsy Diagnosis and Therapy},
author = {Björn Krüger and Christian Weber and Johannes Müllers and Hannah Greß and Franziska Beyer and Jessica Knaub and Jan Pukropski and Daniela Hütwohl and Kai Hahn and Martin Grond and Stephan Jonas and Rainer Surges},
editor = {Wolfram J. Herrmann and Ulf Leser and Sebastian Möller and Jan-Niklas Voigt-Antons and Paul Gellert},
doi = {10.14279/depositonce-20417},
year = {2024},
date = {2024-08-01},
urldate = {2024-08-01},
pages = {18-23},
edition = {Future-Proofing Healthcare for Older Adults Through Digitalization},
abstract = {Teleconsultation in epileptology significantly enhances patient diagnosis and treatment, often eliminating the necessity for physical referral to a specialized clinic. In this paper, we detail the typical teleconsultation process, exploring its technical requirements and legal boundaries. Notably, we focus on the groundwork for establishing a teleconsultation specifically between the University Hospital Bonn and the Klinikum Siegen. Additionally, we provide an overview of currently implemented teleconsultations in epileptology in Germany, concluding with research questions stemming from these advancements. },
keywords = {},
pubstate = {published},
tppubtype = {inbook}
}
Teleconsultation in epileptology significantly enhances patient diagnosis and treatment, often eliminating the necessity for physical referral to a specialized clinic. In this paper, we detail the typical teleconsultation process, exploring its technical requirements and legal boundaries. Notably, we focus on the groundwork for establishing a teleconsultation specifically between the University Hospital Bonn and the Klinikum Siegen. Additionally, we provide an overview of currently implemented teleconsultations in epileptology in Germany, concluding with research questions stemming from these advancements. Riedlinger, Dorothee; Krüger, Björn; Winkler, Hanna; Deutschbein, Johannes; Fischer-Rosinský, Antje; Slagman, Anna; Möckel, Martin
In: Herrmann, Wolfram J.; Leser, Ulf; Möller, Sebastian; Voigt-Antons, Jan-Niklas; Gellert, Paul (Ed.): pp. 108-113, Future-Proofing Healthcare for Older Adults Through Digitalization, 2024.
@inbook{riedlinger2024,
title = {Development of an early warning system to identify patients at risk of falling – Combining the analysis of medication prescription data and movement profiles},
author = {Dorothee Riedlinger and Björn Krüger and Hanna Winkler and Johannes Deutschbein and Antje Fischer-Rosinský and Anna Slagman and Martin Möckel},
editor = {Wolfram J. Herrmann and Ulf Leser and Sebastian Möller and Jan-Niklas Voigt-Antons and Paul Gellert},
doi = {10.14279/depositonce-20431},
year = {2024},
date = {2024-08-01},
urldate = {2024-08-01},
pages = {108-113},
edition = {Future-Proofing Healthcare for Older Adults Through Digitalization},
abstract = {Fall related injuries are a common cause for a reduction of autonomy and quality of life in older patients. The early detection of patients at risk of falling or the prediction of falls may help to prevent falls and thereby improve the health of people of advanced age. Prior analyses of routine medication data pointed to an increase of pain medication prescription prior to an ED },
keywords = {},
pubstate = {published},
tppubtype = {inbook}
}
Fall related injuries are a common cause for a reduction of autonomy and quality of life in older patients. The early detection of patients at risk of falling or the prediction of falls may help to prevent falls and thereby improve the health of people of advanced age. Prior analyses of routine medication data pointed to an increase of pain medication prescription prior to an ED Greß, Hannah; Krüger, Björn
Security of Bluetooth-capable devices in the healthcare sector Proceedings Article
In: Ohm, Marc (Ed.): pp. 13-14, GI SIG SIDAR, Bonn, Germany, 2024, ISSN: 2190-846X.
@inproceedings{greß2024,
title = {Security of Bluetooth-capable devices in the healthcare sector},
author = {Hannah Greß and Björn Krüger},
editor = {Marc Ohm},
url = {https://fg-sidar.gi.de/publikationen/sidar-reports},
issn = {2190-846X},
year = {2024},
date = {2024-06-30},
urldate = {2024-06-30},
journal = {Proceedings of the 14th graduate workshop of the special interest group Security - Intrusion Detection and Response (SIDAR) of the German Informatics Society (GI) (SPRING 2024) },
pages = {13-14},
publisher = {GI SIG SIDAR},
address = {Bonn, Germany},
abstract = {The steady growth of Internet of Medical Things (IoMT) devices collecting, storing and transmitting sensitive data, mostly over Bluetooth Low Energy (BLE), increases also the demand to test them regarding their security. Therefore, this work aims to give an overview of already existing Bluetooth pentesting tools and frameworks, BLE specific attacks and their countermeasures as well as to develop a framework which implements all of these to fasten the security testing process of IoMT wearables.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
The steady growth of Internet of Medical Things (IoMT) devices collecting, storing and transmitting sensitive data, mostly over Bluetooth Low Energy (BLE), increases also the demand to test them regarding their security. Therefore, this work aims to give an overview of already existing Bluetooth pentesting tools and frameworks, BLE specific attacks and their countermeasures as well as to develop a framework which implements all of these to fasten the security testing process of IoMT wearables. Müllers, Johannes; Greß, Hannah; Haaga, Lisa; Krüger, Björn
Sensorik am Krankenbett – Synchrone Datenakquise für Studien in der Epileptologie Conference
Clinical Epileptology, vol. 37 (Suppl 1), 2024.
@conference{muellers2024,
title = {Sensorik am Krankenbett – Synchrone Datenakquise für Studien in der Epileptologie},
author = {Johannes Müllers and Hannah Greß and Lisa Haaga and Björn Krüger},
doi = {10.1007/s10309-024-00672-x},
year = {2024},
date = {2024-04-18},
urldate = {2024-04-18},
booktitle = {Clinical Epileptology},
issuetitle = {Abstracts zur 62. Jahrestagung der Deutschen Gesellschaft für Epileptologie},
volume = {37 (Suppl 1)},
pages = {1–73},
abstract = {Die Möglichkeit der Anfallserkennung oder -vorhersage außerhalb des Krankenhauses kann die Lebensqualität und das Sicherheitsbedürfnis von Epilepsiepatienten erhöhen. Die Überwachung von Vitalparametern, Bewegungen und weiteren Messgrößen kann von einer Vielzahl von Wearables oder sonstigen neuartigen Sensorsystemen gewährleistet werden. Videoüberwachte EEG-Messplätze dienen als Goldstandard und werden für Studien mit solchen Sensoren genutzt, um Korrelationen festzustellen. Hierbei stellen technische Herausforderungen ein wiederkehrendes Problem dar. Neben der Inbetriebnahme der Sensorsysteme, die ohne informationstechnische Kenntnisse oft nur mit proprietären Mitteln möglich ist, ist insbesondere die Synchronizität zur EEG-Aufzeichnung anspruchsvoll. Aktuelle Vorbereitungen einer Studie mit Eye-Tracker Brillen bieten den Anlass, ein neues System zur Datenakquisition aufzubauen. },
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Die Möglichkeit der Anfallserkennung oder -vorhersage außerhalb des Krankenhauses kann die Lebensqualität und das Sicherheitsbedürfnis von Epilepsiepatienten erhöhen. Die Überwachung von Vitalparametern, Bewegungen und weiteren Messgrößen kann von einer Vielzahl von Wearables oder sonstigen neuartigen Sensorsystemen gewährleistet werden. Videoüberwachte EEG-Messplätze dienen als Goldstandard und werden für Studien mit solchen Sensoren genutzt, um Korrelationen festzustellen. Hierbei stellen technische Herausforderungen ein wiederkehrendes Problem dar. Neben der Inbetriebnahme der Sensorsysteme, die ohne informationstechnische Kenntnisse oft nur mit proprietären Mitteln möglich ist, ist insbesondere die Synchronizität zur EEG-Aufzeichnung anspruchsvoll. Aktuelle Vorbereitungen einer Studie mit Eye-Tracker Brillen bieten den Anlass, ein neues System zur Datenakquisition aufzubauen. Marquardt, Alexander; Steininger, Melissa; Trepkowski, Christina; Weier, Martin; Kruijff, Ernst
Selection Performance and Reliability of Eye and Head Gaze Tracking Under Varying Light Conditions Conference
2024 IEEE Conference Virtual Reality and 3D User Interfaces (VR), 2024.
@conference{marquardt2024,
title = {Selection Performance and Reliability of Eye and Head Gaze Tracking Under Varying Light Conditions},
author = {Alexander Marquardt and Melissa Steininger and Christina Trepkowski and Martin Weier and Ernst Kruijff},
doi = {10.1109/VR58804.2024.00075},
year = {2024},
date = {2024-04-15},
urldate = {2024-04-15},
booktitle = {2024 IEEE Conference Virtual Reality and 3D User Interfaces (VR)},
abstract = {Augmented Reality (AR) applications increasingly rely on eye and head gaze tracking for user interaction, with their efficacy influenced by environmental factors such as spatial arrangements and lighting conditions. This paper presents two studies that examine how these variables affect the performance of eye and head gaze tracking in AR environments. While eye tracking partially delivered faster results, its performance exhibited greater variability, especially under dynamic lighting conditions. Conversely, head gaze tracking, while providing more consistent results, showed a notable reduction in accuracy in environments with fluctuating light levels. Furthermore, the spatial properties of the environment had notable implications on both tracking methods. Our research demonstrates that both spatial properties and lighting conditions are key determinants in the choice of a tracking method, underscoring the need for AR systems that can dynamically adapt to these environmental variables.},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Augmented Reality (AR) applications increasingly rely on eye and head gaze tracking for user interaction, with their efficacy influenced by environmental factors such as spatial arrangements and lighting conditions. This paper presents two studies that examine how these variables affect the performance of eye and head gaze tracking in AR environments. While eye tracking partially delivered faster results, its performance exhibited greater variability, especially under dynamic lighting conditions. Conversely, head gaze tracking, while providing more consistent results, showed a notable reduction in accuracy in environments with fluctuating light levels. Furthermore, the spatial properties of the environment had notable implications on both tracking methods. Our research demonstrates that both spatial properties and lighting conditions are key determinants in the choice of a tracking method, underscoring the need for AR systems that can dynamically adapt to these environmental variables. Schulte-Rüther, Martin; Lemken, Johannes; Krüger, Björn; Greß, Hannah; Stroth, Sanna; Kamp-Becker, Inge; Poustka, Luise
Automated annotation and quantification of non-verbal behavior from eye tracking and accelerometer data during live social interaction Conference
Wissenschaftliche Tagung Autismus-Spektrum (WTAS), 2024.
@conference{schulteruether2024,
title = {Automated annotation and quantification of non-verbal behavior from eye tracking and accelerometer data during live social interaction},
author = {Martin Schulte-Rüther and Johannes Lemken and Björn Krüger and Hannah Greß and Sanna Stroth and Inge Kamp-Becker and Luise Poustka},
year = {2024},
date = {2024-03-21},
booktitle = {Wissenschaftliche Tagung Autismus-Spektrum (WTAS)},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Hamza, Kiran; Riaz, Qaiser; Imran, Hamza Ali; Hussain, Mehdi; Krüger, Björn
Generisch-Net: A Generic Deep Model for Analyzing Human Motion with Wearable Sensors in the Internet of Health Things Journal Article
In: Sensors, vol. 24, no. 19, 2024, ISSN: 1424-8220.
@article{s24196167,
title = {Generisch-Net: A Generic Deep Model for Analyzing Human Motion with Wearable Sensors in the Internet of Health Things},
author = {Kiran Hamza and Qaiser Riaz and Hamza Ali Imran and Mehdi Hussain and Björn Krüger},
url = {https://www.mdpi.com/1424-8220/24/19/6167},
doi = {10.3390/s24196167},
issn = {1424-8220},
year = {2024},
date = {2024-01-01},
urldate = {2024-01-01},
journal = {Sensors},
volume = {24},
number = {19},
abstract = {The Internet of Health Things (IoHT) is a broader version of the Internet of Things. The main goal is to intervene autonomously from geographically diverse regions and provide low-cost preventative or active healthcare treatments. Smart wearable IMUs for human motion analysis have proven to provide valuable insights into a person’s psychological state, activities of daily living, identification/re-identification through gait signatures, etc. The existing literature, however, focuses on specificity i.e., problem-specific deep models. This work presents a generic BiGRU-CNN deep model that can predict the emotional state of a person, classify the activities of daily living, and re-identify a person in a closed-loop scenario. For training and validation, we have employed publicly available and closed-access datasets. The data were collected with wearable inertial measurement units mounted non-invasively on the bodies of the subjects. Our findings demonstrate that the generic model achieves an impressive accuracy of 96.97% in classifying activities of daily living. Additionally, it re-identifies individuals in closed-loop scenarios with an accuracy of 93.71% and estimates emotional states with an accuracy of 78.20%. This study represents a significant effort towards developing a versatile deep-learning model for human motion analysis using wearable IMUs, demonstrating promising results across multiple applications.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
The Internet of Health Things (IoHT) is a broader version of the Internet of Things. The main goal is to intervene autonomously from geographically diverse regions and provide low-cost preventative or active healthcare treatments. Smart wearable IMUs for human motion analysis have proven to provide valuable insights into a person’s psychological state, activities of daily living, identification/re-identification through gait signatures, etc. The existing literature, however, focuses on specificity i.e., problem-specific deep models. This work presents a generic BiGRU-CNN deep model that can predict the emotional state of a person, classify the activities of daily living, and re-identify a person in a closed-loop scenario. For training and validation, we have employed publicly available and closed-access datasets. The data were collected with wearable inertial measurement units mounted non-invasively on the bodies of the subjects. Our findings demonstrate that the generic model achieves an impressive accuracy of 96.97% in classifying activities of daily living. Additionally, it re-identifies individuals in closed-loop scenarios with an accuracy of 93.71% and estimates emotional states with an accuracy of 78.20%. This study represents a significant effort towards developing a versatile deep-learning model for human motion analysis using wearable IMUs, demonstrating promising results across multiple applications.2023
Krüger, Björn; Weber, Christian; Greß, Hannah; Knaub, Jessica; Pukropski, Jan; Hütwohl, Daniela; Hahn, Kai; Grond, Martin; Jonas, Stephan; Surges, Rainer
Telekonsil zur Verbesserung der Epilepsiediagnostik Conference
Digitalisierung der Gesundheitsversorgung älterer Menschen, 2023.
@conference{krueger2023,
title = {Telekonsil zur Verbesserung der Epilepsiediagnostik},
author = {Björn Krüger and Christian Weber and Hannah Greß and Jessica Knaub and Jan Pukropski and Daniela Hütwohl and Kai Hahn and Martin Grond and Stephan Jonas and Rainer Surges},
year = {2023},
date = {2023-07-01},
urldate = {2023-07-01},
booktitle = {Digitalisierung der Gesundheitsversorgung älterer Menschen},
abstract = {Erfolgreiche Diagnose von Epilepsien bedürfen einer engen Zusammenarbeit von Hausärzt:innen, Kinderärzt:innen und neurologischen und epileptologischen Fachärzt:innen sowie den entsprechenden Fachkliniken. Zusätzlich zu der Expertise der Ärzt:innen ist ein fortlaufender Austausch und die kontinuierliche Anreicherung von Fachwissen von Bedeutung. Neben der frühzeitigen Überweisung an Fachkliniken kann die gemeinsame Fall-begleitende und Klinikübergreifende Aufnahme, der Austausch und die Pflege von Falldokumentationen ein wichtiger Baustein für die langfristige und erfolgreiche Begleitung der Patient:innen sein. },
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Erfolgreiche Diagnose von Epilepsien bedürfen einer engen Zusammenarbeit von Hausärzt:innen, Kinderärzt:innen und neurologischen und epileptologischen Fachärzt:innen sowie den entsprechenden Fachkliniken. Zusätzlich zu der Expertise der Ärzt:innen ist ein fortlaufender Austausch und die kontinuierliche Anreicherung von Fachwissen von Bedeutung. Neben der frühzeitigen Überweisung an Fachkliniken kann die gemeinsame Fall-begleitende und Klinikübergreifende Aufnahme, der Austausch und die Pflege von Falldokumentationen ein wichtiger Baustein für die langfristige und erfolgreiche Begleitung der Patient:innen sein. Riedlinger, Dorothee; Krüger, Björn; Sydow, Hanna; Deutschbein, Johannes; Fischer-Rosinský, Antje; Slagman, Anna; Möckel, Martin
Ein Frühwarnsystem für Stürze – Identifikation von sturzgefährdeten Patient:innen durch die kombinierte Analyse von Medikamenten-Verordnungsdaten und Bewegungsprofilen Conference
Digitalisierung der Gesundheitsversorgung älterer Menschen, 2023.
@conference{riedlinger2023,
title = {Ein Frühwarnsystem für Stürze – Identifikation von sturzgefährdeten Patient:innen durch die kombinierte Analyse von Medikamenten-Verordnungsdaten und Bewegungsprofilen},
author = {Dorothee Riedlinger and Björn Krüger and Hanna Sydow and Johannes Deutschbein and Antje Fischer-Rosinský and Anna Slagman and Martin Möckel},
year = {2023},
date = {2023-07-01},
booktitle = {Digitalisierung der Gesundheitsversorgung älterer Menschen},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Schulte-Rüther, Martin; Krüger, Björn; Lemken, Johannes; Greß, Hannah; Stroth, Sanna; Kamp-Becker, Inge; Poustka, Luise
2023.
@conference{schulteruether2023,
title = {Automatic Delineation and Classification of Head Movements Using 3D Accelerometer Data from Live Social Interaction},
author = {Martin Schulte-Rüther and Björn Krüger and Johannes Lemken and Hannah Greß and Sanna Stroth and Inge Kamp-Becker and Luise Poustka},
url = {https://cdn.ymaws.com/www.autism-insar.org/resource/resmgr/docs/annualmeeting/insar_2023_full_abstract_boo.pdf, INSAR 2023 Abstract Book (p. 1246)},
year = {2023},
date = {2023-05-06},
urldate = {2023-05-06},
abstract = {Individuals with autism spectrum disorder (ASD) often show reduced non-verbal communication during social interactive encounters, e.g. facial expressions, deictic and communicative gestures, eye gaze. This includes reduced usage and expression of gestures of the head, such as nodding, shaking the head, and head turning. Diagnostic criteria of ASD suggest that reduced non-verbal communicative behavior is an important symptom, but current diagnostic tools are restricted to subjective, clinical evaluation.
While many tools for automatic delineation and classification of facial expressions and gestures from video data are available, less work has been done with respective to the usage of accelerometer sensor data. Considering the current lack of objective, quantitative measures of non-verbal behavior during social interaction, an automated analysis pipeline for movement annotation from accelerometer data would be helpful for both clinical evaluation and research.
},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Individuals with autism spectrum disorder (ASD) often show reduced non-verbal communication during social interactive encounters, e.g. facial expressions, deictic and communicative gestures, eye gaze. This includes reduced usage and expression of gestures of the head, such as nodding, shaking the head, and head turning. Diagnostic criteria of ASD suggest that reduced non-verbal communicative behavior is an important symptom, but current diagnostic tools are restricted to subjective, clinical evaluation.
While many tools for automatic delineation and classification of facial expressions and gestures from video data are available, less work has been done with respective to the usage of accelerometer sensor data. Considering the current lack of objective, quantitative measures of non-verbal behavior during social interaction, an automated analysis pipeline for movement annotation from accelerometer data would be helpful for both clinical evaluation and research.
Lemken, Johannes; Krüger, Björn; Stroth, Sanna; Kamp-Becker, Inge; Gail, A.; Poustka, Luise; Schulte-Rüther, Martin
The relationship between eye gaze and engagement in dyadic conversations – a semi-automatic analysis using unobtrusive eye tracking glasses Conference
Wissenschaftliche Tagung Autismus-Spektrum, 2023.
@conference{lemken2023,
title = {The relationship between eye gaze and engagement in dyadic conversations – a semi-automatic analysis using unobtrusive eye tracking glasses},
author = {Johannes Lemken and Björn Krüger and Sanna Stroth and Inge Kamp-Becker and A. Gail and Luise Poustka and Martin Schulte-Rüther},
year = {2023},
date = {2023-03-03},
booktitle = {Wissenschaftliche Tagung Autismus-Spektrum},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
2025
Greß, Hannah; Alouardani, Saied; Hoffmann, Nico; Trebing, Pia; Becker, Albert J.; Surges, Rainer; Pitsch, Julika; Krüger, Björn
Digitale Transformation des Blutprobenmanagements Conference
Dreiländertagung Epilepsie 2025, 2025.
@conference{gress2025a,
title = {Digitale Transformation des Blutprobenmanagements},
author = {Hannah Greß and Saied Alouardani and Nico Hoffmann and Pia Trebing and Albert J. Becker and Rainer Surges and Julika Pitsch and Björn Krüger},
year = {2025},
date = {2025-03-26},
urldate = {2025-03-26},
booktitle = {Dreiländertagung Epilepsie 2025},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Haaga, Lisa; Jansen, Anna; Steininger, Melissa; Müllers, Johannes; Bausch, Marcel; Jordan, Arthur; Surges, Rainer; Krüger, Björn
EpiEye – Einfluss anfallssupressiver Medikamente auf Augenbewegungen und autonome Veränderungen bei Epilepsien Conference
Dreiländertagung Epilepsie 2025, 2025.
@conference{haaga2025a,
title = {EpiEye – Einfluss anfallssupressiver Medikamente auf Augenbewegungen und autonome Veränderungen bei Epilepsien},
author = {Lisa Haaga and Anna Jansen and Melissa Steininger and Johannes Müllers and Marcel Bausch and Arthur Jordan and Rainer Surges and Björn Krüger},
year = {2025},
date = {2025-03-26},
booktitle = {Dreiländertagung Epilepsie 2025},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Müllers, Johannes; Greß, Hannah; Weber, Christian; Nadeem, Mubaris; Hütwohl, Daniela; Pukropski, Jan; Grond, Martin; Surges, Rainer; Krüger, Björn
Aufbau eines epileptologischen Telekonsils zwischen dem Klinikum Siegen und dem Universitätsklinikum Bonn Conference
Dreiländertagung Epilepsie 2025, 2025.
@conference{muellers2025a,
title = {Aufbau eines epileptologischen Telekonsils zwischen dem Klinikum Siegen und dem Universitätsklinikum Bonn},
author = {Johannes Müllers and Hannah Greß and Christian Weber and Mubaris Nadeem and Daniela Hütwohl and Jan Pukropski and Martin Grond and Rainer Surges and Björn Krüger},
year = {2025},
date = {2025-03-26},
urldate = {2025-03-26},
booktitle = {Dreiländertagung Epilepsie 2025},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Mustafa, Sarah Al-Haj; Jansen, Anna; Surges, Rainer; Helmstaedter, Christoph; von Wrede, Randi; Krüger, Björn
ANNE – Augen-Tracking zur Erkennung von Nebenwirkungen bei Epilepsie Conference
Dreiländertagung Epilepsie 2025, 2025.
@conference{nokey,
title = {ANNE – Augen-Tracking zur Erkennung von Nebenwirkungen bei Epilepsie},
author = {Sarah Al-Haj Mustafa and Anna Jansen and Rainer Surges and Christoph Helmstaedter and Randi von Wrede and Björn Krüger},
year = {2025},
date = {2025-03-26},
urldate = {2025-03-26},
booktitle = {Dreiländertagung Epilepsie 2025},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Neuß, Maike Susanne; Pitsch, Julika; Krüger, Björn; Becker, Albert J.; Surges, Rainer; Baumgartner, Tobias
Semiologische Charakteristika der Temporallappenepilepsie mit Antikörpern gegen GAD65 Conference
Dreiländertagung Epilepsie 2025, 2025.
@conference{neuss2025,
title = {Semiologische Charakteristika der Temporallappenepilepsie mit Antikörpern gegen GAD65},
author = {Maike Susanne Neuß and Julika Pitsch and Björn Krüger and Albert J. Becker and Rainer Surges and Tobias Baumgartner},
year = {2025},
date = {2025-03-26},
booktitle = {Dreiländertagung Epilepsie 2025},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Steininger, Melissa; Jansen, Anna; Welle, Kristian; Krüger, Björn
Optimized Sensor Position Detection: Improving Visual Sensor Setups for Hand Tracking in VR Conference Forthcoming
2025 IEEE Conference Virtual Reality and 3D User Interfaces (VR), Forthcoming.
@conference{steininger2025b,
title = {Optimized Sensor Position Detection: Improving Visual Sensor Setups for Hand Tracking in VR},
author = {Melissa Steininger and Anna Jansen and Kristian Welle and Björn Krüger},
year = {2025},
date = {2025-03-12},
urldate = {2025-03-12},
booktitle = {2025 IEEE Conference Virtual Reality and 3D User Interfaces (VR)},
keywords = {},
pubstate = {forthcoming},
tppubtype = {conference}
}
Müllers, Johannes; Staehle, Ricarda; Stroth, Sanna; Poustka, Luise; Krüger, Björn; Schulte-Rüther, Martin
Multi-Stream Analysis for Robust Head Gesture Classification in Natural Social Interaction: Leveraging High-Resolution Sensor Data for Optimized Visual Classification Conference
16. Wissenschaftlichen Tagung Autismus-Spektrum (WTAS), 2025.
@conference{muellers2025b,
title = {Multi-Stream Analysis for Robust Head Gesture Classification in Natural Social Interaction: Leveraging High-Resolution Sensor Data for Optimized Visual Classification},
author = {Johannes Müllers and Ricarda Staehle and Sanna Stroth and Luise Poustka and Björn Krüger and Martin Schulte-Rüther},
year = {2025},
date = {2025-03-07},
booktitle = {16. Wissenschaftlichen Tagung Autismus-Spektrum (WTAS)},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Steininger, Melissa; Jansen, Anna; Mustafa, Sarah Al-Haj; Surges, Rainer; Helmstaedter, Christoph; von Wrede, Randi; Krüger, Björn
Eye-Tracking Reveals Search Behaviour in Epilepsy Patients Conference
3rd International Conference on Artificial Intelligence in Epilepsy and Neurological Disorders, 2025.
@conference{steininger2025a,
title = {Eye-Tracking Reveals Search Behaviour in Epilepsy Patients},
author = {Melissa Steininger and Anna Jansen and Sarah Al-Haj Mustafa and Rainer Surges and Christoph Helmstaedter and Randi von Wrede and Björn Krüger
},
year = {2025},
date = {2025-03-03},
urldate = {2025-03-03},
booktitle = {3rd International Conference on Artificial Intelligence in Epilepsy and Neurological Disorders},
journal = {3rd International Conference on Artificial Intelligence in Epilepsy and Neurological Disorders},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Jansen, Anna; Steininger, Melissa; Mustafa, Sarah Al-Haj; Müllers, Johannes; Surges, Rainer; Helmstaedter, Christoph; von Wrede, Randi; Krüger, Björn
Prediction Models on Eye Tracking Data in Epilepsy Conference
3rd International Conference on Artificial Intelligence in Epilepsy and Neurological Disorders, 2025.
@conference{jansen2025a,
title = {Prediction Models on Eye Tracking Data in Epilepsy},
author = {Anna Jansen and Melissa Steininger and Sarah Al-Haj Mustafa and Johannes Müllers and Rainer Surges and Christoph Helmstaedter and Randi von Wrede and Björn Krüger},
year = {2025},
date = {2025-03-03},
urldate = {2025-03-03},
booktitle = {3rd International Conference on Artificial Intelligence in Epilepsy and Neurological Disorders},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
2024
Otsubo, Hiromu; Lehnort, Marvin; Steininger, Melissa; Marquardt, Alexander; Dollack, Felix; Hirao, Yutaro; Perusquía-Hernández, Monica; Uchiyama, Hideaki; Kruijff, Ernst; Riecke, Bernhard; Kiyokawa, Kiyoshi
First-Person Perspective Induces Stronger Feelings of Awe and Presence Compared to Third-Person Perspective in Virtual Reality Proceedings Article
In: ICMI '24: Proceedings of the 26th International Conference on Multimodal Interaction, pp. 439 - 448, 2024.
@inproceedings{Otsubo2024,
title = {First-Person Perspective Induces Stronger Feelings of Awe and Presence Compared to Third-Person Perspective in Virtual Reality},
author = {Hiromu Otsubo and Marvin Lehnort and Melissa Steininger and Alexander Marquardt and Felix Dollack and Yutaro Hirao and Monica Perusquía-Hernández and Hideaki Uchiyama and Ernst Kruijff and Bernhard Riecke and Kiyoshi Kiyokawa},
doi = {https://doi.org/10.1145/3678957.3685753},
year = {2024},
date = {2024-11-04},
urldate = {2024-11-04},
booktitle = {ICMI '24: Proceedings of the 26th International Conference on Multimodal Interaction},
pages = {439 - 448},
abstract = {Awe is a complex emotion described as a perception of vastness and a need for accommodation to integrate new, overwhelming experiences. Virtual Reality (VR) has recently gained attention as a convenient means to facilitate experiences of awe. In VR, a first-person perspective might increase awe due to its immersive nature, while a third-person perspective might enhance the perception of vastness. However, the impact of VR perspectives on experiencing awe has not been thoroughly examined. We created two types of VR scenes: one with elements designed to induce high awe, such as a snowy mountain, and a low awe scene without such elements. We compared first-person and third-person perspectives in each scene. Forty-two participants explored the VR scenes, with their physiological responses captured by electrocardiogram (ECG) and face tracking (FT). Subsequently, participants self-reported their experience of awe (AWE-S) and presence (IPQ) within VR. The results revealed that the first-person perspective induced stronger feelings of awe and presence than the third-person perspective. The findings of this study provide useful guidelines for designing VR content that enhances emotional experiences.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Marquardt, Alexander; Lehnort, Marvin; Otsubo, Hiromu; Perusquia-Hernandez, Monica; Steininger, Melissa; Dollack, Felix; Uchiyama, Hideaki; Kiyokawa, Kiyoshi; Kruijff, Ernst
Exploring Gesture Interaction in Underwater Virtual Reality Proceedings Article
In: Proceedings of the 2024 ACM Symposium on Spatial User Interaction, pp. 1-2, 2024.
@inproceedings{marquardtLehnort2024,
title = {Exploring Gesture Interaction in Underwater Virtual Reality},
author = {Alexander Marquardt and Marvin Lehnort and Hiromu Otsubo and Monica Perusquia-Hernandez and Melissa Steininger and Felix Dollack and Hideaki Uchiyama and Kiyoshi Kiyokawa and Ernst Kruijff},
doi = {https://doi.org/10.1145/3677386.3688890},
year = {2024},
date = {2024-10-07},
urldate = {2024-10-07},
booktitle = {Proceedings of the 2024 ACM Symposium on Spatial User Interaction},
pages = {1-2},
abstract = {An underwater virtual reality (UVR) system with gesture-based controls was developed to facilitate navigation and interaction while submerged. The system uses a waterproof head-mounted display and camera-based gesture recognition, originally trained for abovewater conditions, employing three gestures: grab for navigation, pinch for single interactions, and point for continuous interactions. In an experimental study, we tested gesture recognition both above and underwater, and evaluated participant interaction within an immersive underwater scene. Results showed that underwater conditions slightly affected gesture accuracy, but the system maintained high performance. Participants reported a strong sense of presence and found the gestures intuitive while highlighting the need for further refinement to address usability challenges.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Müllers, Johannes; Krüger, Björn; Schulte-Rüther, Martin
Gesten und Körperbewegungen entschlüsseln - Automatisierte Analyse mit Hilfe von Sensordaten und Methoden des maschinellen Lernens Conference
XXXVIII. DGKJP Kongress 2024, 2024.
@conference{nokey,
title = {Gesten und Körperbewegungen entschlüsseln - Automatisierte Analyse mit Hilfe von Sensordaten und Methoden des maschinellen Lernens},
author = {Johannes Müllers and Björn Krüger and Martin Schulte-Rüther},
year = {2024},
date = {2024-09-19},
booktitle = {XXXVIII. DGKJP Kongress 2024},
abstract = {Hintergrund
Subtile Kopfbewegungen wie Nicken, Kopfschütteln und Kopfneigen sind bedeutende nonverbale Kommunikationssignale während einer therapeutischen Sitzung. Diese Bewegungen können leicht übersehen werden, selbst bei detaillierter Videoanalyse. Die Nutzung von Sensordaten, insbesondere die Erfassung von Beschleunigung und Drehrate mittels Accelerometer und Gyroskop, ermöglicht eine präzise Echtzeitanalyse der Kopfbewegungen.
Methode
Zur Analyse der Sensordaten werden verschiedene Ansätze in Betracht gezogen. Klassische analytische Methoden erlauben eine direkte Auswertung der Beschleunigungs- und Rotationsdaten durch festgelegte Schwellenwerte und Mustererkennung. Graphbasierte Ansätze bieten eine flexible Struktur zur Modellierung der Bewegungssequenzen und deren Beziehungen. Zudem können Methoden des maschinellen Lernens, insbesondere überwachte und unüberwachte Lernverfahren, genutzt werden, um komplexe Bewegungsmuster zu identifizieren und zu klassifizieren. In diesem Vortrag werden die Vor- und Nachteile dieser Ansätze diskutiert und verglichen. Ein besonderer Fokus liegt auf der Echtzeitfähigkeit der Methoden, um eine laufende Annotation der Videos zu gewährleisten.
Ergebnisse
Vorläufige Resultate zeigen die Machbarkeit der Sensordatenanalyse. Erste Untersuchungen belegen, dass die erfassten Daten eine detaillierte Erkennung und Differenzierung von Kopfbewegungen ermöglichen.
Diskussion und Schlussfolgerung
Die bisherigen Ergebnisse zeigen, dass Sensordaten und fortschrittliche Analysemethoden das Potenzial haben, die Erkennung und Annotation von Kopfbewegungen erheblich zu verbessern. Analytische Methoden bieten einfache Implementierungsmöglichkeiten, könnten jedoch gegenüber maschinellen Lernmethoden an Anpassungsfähigkeit verlieren. Maschinelles Lernen bietet höhere Genauigkeit, erfordert jedoch umfangreiche Daten und Trainingszeit. Zukünftige Arbeiten werden sich auf die Verfeinerung und Validierung der Methoden konzentrieren, um eine optimale Balance zwischen Genauigkeit, Echtzeitfähigkeit und praktischer Anwendbarkeit zu finden. Insgesamt zeigt sich, dass die Integration von Sensordatenanalyse in therapeutische Sitzungen die Kommunikation und das Verständnis zwischen Therapeut und Patient verbessern kann.},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Subtile Kopfbewegungen wie Nicken, Kopfschütteln und Kopfneigen sind bedeutende nonverbale Kommunikationssignale während einer therapeutischen Sitzung. Diese Bewegungen können leicht übersehen werden, selbst bei detaillierter Videoanalyse. Die Nutzung von Sensordaten, insbesondere die Erfassung von Beschleunigung und Drehrate mittels Accelerometer und Gyroskop, ermöglicht eine präzise Echtzeitanalyse der Kopfbewegungen.
Methode
Zur Analyse der Sensordaten werden verschiedene Ansätze in Betracht gezogen. Klassische analytische Methoden erlauben eine direkte Auswertung der Beschleunigungs- und Rotationsdaten durch festgelegte Schwellenwerte und Mustererkennung. Graphbasierte Ansätze bieten eine flexible Struktur zur Modellierung der Bewegungssequenzen und deren Beziehungen. Zudem können Methoden des maschinellen Lernens, insbesondere überwachte und unüberwachte Lernverfahren, genutzt werden, um komplexe Bewegungsmuster zu identifizieren und zu klassifizieren. In diesem Vortrag werden die Vor- und Nachteile dieser Ansätze diskutiert und verglichen. Ein besonderer Fokus liegt auf der Echtzeitfähigkeit der Methoden, um eine laufende Annotation der Videos zu gewährleisten.
Ergebnisse
Vorläufige Resultate zeigen die Machbarkeit der Sensordatenanalyse. Erste Untersuchungen belegen, dass die erfassten Daten eine detaillierte Erkennung und Differenzierung von Kopfbewegungen ermöglichen.
Diskussion und Schlussfolgerung
Die bisherigen Ergebnisse zeigen, dass Sensordaten und fortschrittliche Analysemethoden das Potenzial haben, die Erkennung und Annotation von Kopfbewegungen erheblich zu verbessern. Analytische Methoden bieten einfache Implementierungsmöglichkeiten, könnten jedoch gegenüber maschinellen Lernmethoden an Anpassungsfähigkeit verlieren. Maschinelles Lernen bietet höhere Genauigkeit, erfordert jedoch umfangreiche Daten und Trainingszeit. Zukünftige Arbeiten werden sich auf die Verfeinerung und Validierung der Methoden konzentrieren, um eine optimale Balance zwischen Genauigkeit, Echtzeitfähigkeit und praktischer Anwendbarkeit zu finden. Insgesamt zeigt sich, dass die Integration von Sensordatenanalyse in therapeutische Sitzungen die Kommunikation und das Verständnis zwischen Therapeut und Patient verbessern kann.
Steininger, Melissa; Perusquıa-Hernández, Monica; Marquardt, Alexander; Otsubo, Hiromu; Lehnort, Marvin; Dollack, Felix; Kiyokawa, Kiyoshi; Kruijff, Ernst; Riecke, Bernhard
Using Skin Conductance to Predict Awe and Perceived Vastness in Virtual Reality Conference
12th International Conference on Affective Computing and Intelligent Interaction, 2024.
@conference{steininger2024,
title = {Using Skin Conductance to Predict Awe and Perceived Vastness in Virtual Reality},
author = {Melissa Steininger and Monica Perusquıa-Hernández and Alexander Marquardt and Hiromu Otsubo and Marvin Lehnort and Felix Dollack and Kiyoshi Kiyokawa and Ernst Kruijff and Bernhard Riecke},
year = {2024},
date = {2024-09-17},
urldate = {2024-09-17},
booktitle = {12th International Conference on Affective Computing and Intelligent Interaction},
abstract = {Awe is an emotion characterized by the perception of vastness and the need to accommodate this vastness into one’s mental framework. We propose an elicitation scene to induce awe in Virtual Reality (VR), validate it through selfreport, and explore the feasibility of using skin conductance to predict self-reported awe and vastness as labeled from the stimuli in VR. Sixty-two participants took part in the study comparing the awe-eliciting space scene and a neutral scene. The space scene was confirmed as more awe-eliciting. A k-nearest neighbor algorithm confirmed high and low-awe score clusters used to label the data. A Random Forest algorithm achieved 65% accuracy (F1= 0.56, AU C= 0.73) when predicting the self-reported low and high awe categories from continuous skin conductance data. A similar approach achieved 55% accuracy (F1= 0.59, AU C= 0.56) when predicting the perception of vastness. These results underscore the potential of skinconductance-based algorithms to predict awe.},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Kiran, Samia; Riaz, Qaiser; Hussain, Mehdi; Zeeshan, Muhammad; Krüger, Björn
Unveiling Fall Origins: Leveraging Wearable Sensors to Detect Pre-Impact Fall Causes Journal Article
In: IEEE Sensors Journal, vol. 24, no. 15, pp. 24086-24095, 2024, ISSN: 1558-1748.
@article{10552639,
title = {Unveiling Fall Origins: Leveraging Wearable Sensors to Detect Pre-Impact Fall Causes},
author = {Samia Kiran and Qaiser Riaz and Mehdi Hussain and Muhammad Zeeshan and Björn Krüger},
doi = {10.1109/JSEN.2024.3407835},
issn = {1558-1748},
year = {2024},
date = {2024-08-01},
urldate = {2024-01-01},
journal = {IEEE Sensors Journal},
volume = {24},
number = {15},
pages = {24086-24095},
abstract = {Falling poses a significant challenge to the health and well-being of the elderly and people with various disabilities. Precise and prompt fall detection plays a crucial role in preventing falls and mitigating the impact of injuries. In this research, we propose a deep classifier for pre-impact fall detection which can detect a fall in the pre-impact phase with an inference time of 46–52 milliseconds. The proposed classifier is an ensemble of Convolutional Neural Networks (CNNs) and Bidirectional Gated Recurrent Units (BiGRU) with residual connections. We validated the performance of the proposed classifier on a comprehensive, publicly available preimpact fall dataset. The dataset covers 36 diverse activities, including 15 types of fall-related activities and 21 types of activities of daily living (ADLs). Furthermore, we evaluated the proposed model using three different inputs of varying dimensions: 6D input (comprising 3D accelerations and 3D angular velocities), 3D input (3D accelerations), and 1D input (magnitude of 3D accelerations). The reduction in the input space from 6D to 1D is aimed at minimizing the computation cost. We have attained commendable results outperforming the state-of-the-art approaches by achieving an average accuracy and F1 score of 98% for 6D input size. The potential implications of this research are particularly relevant in the realm of smart healthcare, with a focus on the elderly and differently-abled population.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Krüger, Björn; Weber, Christian; Müllers, Johannes; Greß, Hannah; Beyer, Franziska; Knaub, Jessica; Pukropski, Jan; Hütwohl, Daniela; Hahn, Kai; Grond, Martin; Jonas, Stephan; Surges, Rainer
Teleconsultation to Improve Epilepsy Diagnosis and Therapy Book Chapter
In: Herrmann, Wolfram J.; Leser, Ulf; Möller, Sebastian; Voigt-Antons, Jan-Niklas; Gellert, Paul (Ed.): pp. 18-23, Future-Proofing Healthcare for Older Adults Through Digitalization, 2024.
@inbook{krueger2024a,
title = {Teleconsultation to Improve Epilepsy Diagnosis and Therapy},
author = {Björn Krüger and Christian Weber and Johannes Müllers and Hannah Greß and Franziska Beyer and Jessica Knaub and Jan Pukropski and Daniela Hütwohl and Kai Hahn and Martin Grond and Stephan Jonas and Rainer Surges},
editor = {Wolfram J. Herrmann and Ulf Leser and Sebastian Möller and Jan-Niklas Voigt-Antons and Paul Gellert},
doi = {10.14279/depositonce-20417},
year = {2024},
date = {2024-08-01},
urldate = {2024-08-01},
pages = {18-23},
edition = {Future-Proofing Healthcare for Older Adults Through Digitalization},
abstract = {Teleconsultation in epileptology significantly enhances patient diagnosis and treatment, often eliminating the necessity for physical referral to a specialized clinic. In this paper, we detail the typical teleconsultation process, exploring its technical requirements and legal boundaries. Notably, we focus on the groundwork for establishing a teleconsultation specifically between the University Hospital Bonn and the Klinikum Siegen. Additionally, we provide an overview of currently implemented teleconsultations in epileptology in Germany, concluding with research questions stemming from these advancements. },
keywords = {},
pubstate = {published},
tppubtype = {inbook}
}
Riedlinger, Dorothee; Krüger, Björn; Winkler, Hanna; Deutschbein, Johannes; Fischer-Rosinský, Antje; Slagman, Anna; Möckel, Martin
In: Herrmann, Wolfram J.; Leser, Ulf; Möller, Sebastian; Voigt-Antons, Jan-Niklas; Gellert, Paul (Ed.): pp. 108-113, Future-Proofing Healthcare for Older Adults Through Digitalization, 2024.
@inbook{riedlinger2024,
title = {Development of an early warning system to identify patients at risk of falling – Combining the analysis of medication prescription data and movement profiles},
author = {Dorothee Riedlinger and Björn Krüger and Hanna Winkler and Johannes Deutschbein and Antje Fischer-Rosinský and Anna Slagman and Martin Möckel},
editor = {Wolfram J. Herrmann and Ulf Leser and Sebastian Möller and Jan-Niklas Voigt-Antons and Paul Gellert},
doi = {10.14279/depositonce-20431},
year = {2024},
date = {2024-08-01},
urldate = {2024-08-01},
pages = {108-113},
edition = {Future-Proofing Healthcare for Older Adults Through Digitalization},
abstract = {Fall related injuries are a common cause for a reduction of autonomy and quality of life in older patients. The early detection of patients at risk of falling or the prediction of falls may help to prevent falls and thereby improve the health of people of advanced age. Prior analyses of routine medication data pointed to an increase of pain medication prescription prior to an ED },
keywords = {},
pubstate = {published},
tppubtype = {inbook}
}
Greß, Hannah; Krüger, Björn
Security of Bluetooth-capable devices in the healthcare sector Proceedings Article
In: Ohm, Marc (Ed.): pp. 13-14, GI SIG SIDAR, Bonn, Germany, 2024, ISSN: 2190-846X.
@inproceedings{greß2024,
title = {Security of Bluetooth-capable devices in the healthcare sector},
author = {Hannah Greß and Björn Krüger},
editor = {Marc Ohm},
url = {https://fg-sidar.gi.de/publikationen/sidar-reports},
issn = {2190-846X},
year = {2024},
date = {2024-06-30},
urldate = {2024-06-30},
journal = {Proceedings of the 14th graduate workshop of the special interest group Security - Intrusion Detection and Response (SIDAR) of the German Informatics Society (GI) (SPRING 2024) },
pages = {13-14},
publisher = {GI SIG SIDAR},
address = {Bonn, Germany},
abstract = {The steady growth of Internet of Medical Things (IoMT) devices collecting, storing and transmitting sensitive data, mostly over Bluetooth Low Energy (BLE), increases also the demand to test them regarding their security. Therefore, this work aims to give an overview of already existing Bluetooth pentesting tools and frameworks, BLE specific attacks and their countermeasures as well as to develop a framework which implements all of these to fasten the security testing process of IoMT wearables.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Müllers, Johannes; Greß, Hannah; Haaga, Lisa; Krüger, Björn
Sensorik am Krankenbett – Synchrone Datenakquise für Studien in der Epileptologie Conference
Clinical Epileptology, vol. 37 (Suppl 1), 2024.
@conference{muellers2024,
title = {Sensorik am Krankenbett – Synchrone Datenakquise für Studien in der Epileptologie},
author = {Johannes Müllers and Hannah Greß and Lisa Haaga and Björn Krüger},
doi = {10.1007/s10309-024-00672-x},
year = {2024},
date = {2024-04-18},
urldate = {2024-04-18},
booktitle = {Clinical Epileptology},
issuetitle = {Abstracts zur 62. Jahrestagung der Deutschen Gesellschaft für Epileptologie},
volume = {37 (Suppl 1)},
pages = {1–73},
abstract = {Die Möglichkeit der Anfallserkennung oder -vorhersage außerhalb des Krankenhauses kann die Lebensqualität und das Sicherheitsbedürfnis von Epilepsiepatienten erhöhen. Die Überwachung von Vitalparametern, Bewegungen und weiteren Messgrößen kann von einer Vielzahl von Wearables oder sonstigen neuartigen Sensorsystemen gewährleistet werden. Videoüberwachte EEG-Messplätze dienen als Goldstandard und werden für Studien mit solchen Sensoren genutzt, um Korrelationen festzustellen. Hierbei stellen technische Herausforderungen ein wiederkehrendes Problem dar. Neben der Inbetriebnahme der Sensorsysteme, die ohne informationstechnische Kenntnisse oft nur mit proprietären Mitteln möglich ist, ist insbesondere die Synchronizität zur EEG-Aufzeichnung anspruchsvoll. Aktuelle Vorbereitungen einer Studie mit Eye-Tracker Brillen bieten den Anlass, ein neues System zur Datenakquisition aufzubauen. },
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Marquardt, Alexander; Steininger, Melissa; Trepkowski, Christina; Weier, Martin; Kruijff, Ernst
Selection Performance and Reliability of Eye and Head Gaze Tracking Under Varying Light Conditions Conference
2024 IEEE Conference Virtual Reality and 3D User Interfaces (VR), 2024.
@conference{marquardt2024,
title = {Selection Performance and Reliability of Eye and Head Gaze Tracking Under Varying Light Conditions},
author = {Alexander Marquardt and Melissa Steininger and Christina Trepkowski and Martin Weier and Ernst Kruijff},
doi = {10.1109/VR58804.2024.00075},
year = {2024},
date = {2024-04-15},
urldate = {2024-04-15},
booktitle = {2024 IEEE Conference Virtual Reality and 3D User Interfaces (VR)},
abstract = {Augmented Reality (AR) applications increasingly rely on eye and head gaze tracking for user interaction, with their efficacy influenced by environmental factors such as spatial arrangements and lighting conditions. This paper presents two studies that examine how these variables affect the performance of eye and head gaze tracking in AR environments. While eye tracking partially delivered faster results, its performance exhibited greater variability, especially under dynamic lighting conditions. Conversely, head gaze tracking, while providing more consistent results, showed a notable reduction in accuracy in environments with fluctuating light levels. Furthermore, the spatial properties of the environment had notable implications on both tracking methods. Our research demonstrates that both spatial properties and lighting conditions are key determinants in the choice of a tracking method, underscoring the need for AR systems that can dynamically adapt to these environmental variables.},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Schulte-Rüther, Martin; Lemken, Johannes; Krüger, Björn; Greß, Hannah; Stroth, Sanna; Kamp-Becker, Inge; Poustka, Luise
Automated annotation and quantification of non-verbal behavior from eye tracking and accelerometer data during live social interaction Conference
Wissenschaftliche Tagung Autismus-Spektrum (WTAS), 2024.
@conference{schulteruether2024,
title = {Automated annotation and quantification of non-verbal behavior from eye tracking and accelerometer data during live social interaction},
author = {Martin Schulte-Rüther and Johannes Lemken and Björn Krüger and Hannah Greß and Sanna Stroth and Inge Kamp-Becker and Luise Poustka},
year = {2024},
date = {2024-03-21},
booktitle = {Wissenschaftliche Tagung Autismus-Spektrum (WTAS)},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Hamza, Kiran; Riaz, Qaiser; Imran, Hamza Ali; Hussain, Mehdi; Krüger, Björn
Generisch-Net: A Generic Deep Model for Analyzing Human Motion with Wearable Sensors in the Internet of Health Things Journal Article
In: Sensors, vol. 24, no. 19, 2024, ISSN: 1424-8220.
@article{s24196167,
title = {Generisch-Net: A Generic Deep Model for Analyzing Human Motion with Wearable Sensors in the Internet of Health Things},
author = {Kiran Hamza and Qaiser Riaz and Hamza Ali Imran and Mehdi Hussain and Björn Krüger},
url = {https://www.mdpi.com/1424-8220/24/19/6167},
doi = {10.3390/s24196167},
issn = {1424-8220},
year = {2024},
date = {2024-01-01},
urldate = {2024-01-01},
journal = {Sensors},
volume = {24},
number = {19},
abstract = {The Internet of Health Things (IoHT) is a broader version of the Internet of Things. The main goal is to intervene autonomously from geographically diverse regions and provide low-cost preventative or active healthcare treatments. Smart wearable IMUs for human motion analysis have proven to provide valuable insights into a person’s psychological state, activities of daily living, identification/re-identification through gait signatures, etc. The existing literature, however, focuses on specificity i.e., problem-specific deep models. This work presents a generic BiGRU-CNN deep model that can predict the emotional state of a person, classify the activities of daily living, and re-identify a person in a closed-loop scenario. For training and validation, we have employed publicly available and closed-access datasets. The data were collected with wearable inertial measurement units mounted non-invasively on the bodies of the subjects. Our findings demonstrate that the generic model achieves an impressive accuracy of 96.97% in classifying activities of daily living. Additionally, it re-identifies individuals in closed-loop scenarios with an accuracy of 93.71% and estimates emotional states with an accuracy of 78.20%. This study represents a significant effort towards developing a versatile deep-learning model for human motion analysis using wearable IMUs, demonstrating promising results across multiple applications.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
2023
Krüger, Björn; Weber, Christian; Greß, Hannah; Knaub, Jessica; Pukropski, Jan; Hütwohl, Daniela; Hahn, Kai; Grond, Martin; Jonas, Stephan; Surges, Rainer
Telekonsil zur Verbesserung der Epilepsiediagnostik Conference
Digitalisierung der Gesundheitsversorgung älterer Menschen, 2023.
@conference{krueger2023,
title = {Telekonsil zur Verbesserung der Epilepsiediagnostik},
author = {Björn Krüger and Christian Weber and Hannah Greß and Jessica Knaub and Jan Pukropski and Daniela Hütwohl and Kai Hahn and Martin Grond and Stephan Jonas and Rainer Surges},
year = {2023},
date = {2023-07-01},
urldate = {2023-07-01},
booktitle = {Digitalisierung der Gesundheitsversorgung älterer Menschen},
abstract = {Erfolgreiche Diagnose von Epilepsien bedürfen einer engen Zusammenarbeit von Hausärzt:innen, Kinderärzt:innen und neurologischen und epileptologischen Fachärzt:innen sowie den entsprechenden Fachkliniken. Zusätzlich zu der Expertise der Ärzt:innen ist ein fortlaufender Austausch und die kontinuierliche Anreicherung von Fachwissen von Bedeutung. Neben der frühzeitigen Überweisung an Fachkliniken kann die gemeinsame Fall-begleitende und Klinikübergreifende Aufnahme, der Austausch und die Pflege von Falldokumentationen ein wichtiger Baustein für die langfristige und erfolgreiche Begleitung der Patient:innen sein. },
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Riedlinger, Dorothee; Krüger, Björn; Sydow, Hanna; Deutschbein, Johannes; Fischer-Rosinský, Antje; Slagman, Anna; Möckel, Martin
Ein Frühwarnsystem für Stürze – Identifikation von sturzgefährdeten Patient:innen durch die kombinierte Analyse von Medikamenten-Verordnungsdaten und Bewegungsprofilen Conference
Digitalisierung der Gesundheitsversorgung älterer Menschen, 2023.
@conference{riedlinger2023,
title = {Ein Frühwarnsystem für Stürze – Identifikation von sturzgefährdeten Patient:innen durch die kombinierte Analyse von Medikamenten-Verordnungsdaten und Bewegungsprofilen},
author = {Dorothee Riedlinger and Björn Krüger and Hanna Sydow and Johannes Deutschbein and Antje Fischer-Rosinský and Anna Slagman and Martin Möckel},
year = {2023},
date = {2023-07-01},
booktitle = {Digitalisierung der Gesundheitsversorgung älterer Menschen},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Schulte-Rüther, Martin; Krüger, Björn; Lemken, Johannes; Greß, Hannah; Stroth, Sanna; Kamp-Becker, Inge; Poustka, Luise
2023.
@conference{schulteruether2023,
title = {Automatic Delineation and Classification of Head Movements Using 3D Accelerometer Data from Live Social Interaction},
author = {Martin Schulte-Rüther and Björn Krüger and Johannes Lemken and Hannah Greß and Sanna Stroth and Inge Kamp-Becker and Luise Poustka},
url = {https://cdn.ymaws.com/www.autism-insar.org/resource/resmgr/docs/annualmeeting/insar_2023_full_abstract_boo.pdf, INSAR 2023 Abstract Book (p. 1246)},
year = {2023},
date = {2023-05-06},
urldate = {2023-05-06},
abstract = {Individuals with autism spectrum disorder (ASD) often show reduced non-verbal communication during social interactive encounters, e.g. facial expressions, deictic and communicative gestures, eye gaze. This includes reduced usage and expression of gestures of the head, such as nodding, shaking the head, and head turning. Diagnostic criteria of ASD suggest that reduced non-verbal communicative behavior is an important symptom, but current diagnostic tools are restricted to subjective, clinical evaluation.
While many tools for automatic delineation and classification of facial expressions and gestures from video data are available, less work has been done with respective to the usage of accelerometer sensor data. Considering the current lack of objective, quantitative measures of non-verbal behavior during social interaction, an automated analysis pipeline for movement annotation from accelerometer data would be helpful for both clinical evaluation and research.
},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
While many tools for automatic delineation and classification of facial expressions and gestures from video data are available, less work has been done with respective to the usage of accelerometer sensor data. Considering the current lack of objective, quantitative measures of non-verbal behavior during social interaction, an automated analysis pipeline for movement annotation from accelerometer data would be helpful for both clinical evaluation and research.
Lemken, Johannes; Krüger, Björn; Stroth, Sanna; Kamp-Becker, Inge; Gail, A.; Poustka, Luise; Schulte-Rüther, Martin
The relationship between eye gaze and engagement in dyadic conversations – a semi-automatic analysis using unobtrusive eye tracking glasses Conference
Wissenschaftliche Tagung Autismus-Spektrum, 2023.
@conference{lemken2023,
title = {The relationship between eye gaze and engagement in dyadic conversations – a semi-automatic analysis using unobtrusive eye tracking glasses},
author = {Johannes Lemken and Björn Krüger and Sanna Stroth and Inge Kamp-Becker and A. Gail and Luise Poustka and Martin Schulte-Rüther},
year = {2023},
date = {2023-03-03},
booktitle = {Wissenschaftliche Tagung Autismus-Spektrum},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}