2025
Lukas Schach, Christian Rack, Ryan P. McMahan, Marc Erich Latoschik,
Motion-Based User Identification across XR and Metaverse Applications by Deep Classification and Similarity Learning
.
2025.
[BibTeX]
[Download]
[BibSonomy]
@misc{schach2025motionbaseduseridentificationxr,
title = {Motion-Based User Identification across XR and Metaverse Applications by Deep Classification and Similarity Learning},
author = {Schach, Lukas and Rack, Christian and McMahan, Ryan P. and Latoschik, Marc Erich},
year = {2025},
url = {https://arxiv.org/abs/2509.08539}
}
2024
Vivek Nair, Mark Roman Miller, Rui Wang, Brandon Huang, Christian Rack, Marc Erich Latoschik, James O'Brien,
Effect of Data Degradation on Motion Re-Identification
, In
2024 IEEE 25th International Symposium on a World of Wireless, Mobile and Multimedia Networks (WoWMoM)
.
2024.
[BibTeX]
[Download]
[BibSonomy]
[Doi]
@inproceedings{nair2024effect,
title = {Effect of Data Degradation on Motion Re-Identification},
author = {Nair, Vivek and Miller, Mark Roman and Wang, Rui and Huang, Brandon and Rack, Christian and Latoschik, Marc Erich and O'Brien, James},
booktitle = {2024 IEEE 25th International Symposium on a World of Wireless, Mobile and Multimedia Networks (WoWMoM)},
year = {2024},
url = {https://downloads.hci.informatik.uni-wuerzburg.de/2024-06-nair-obfuscation.pdf},
doi = {10.1109/WoWMoM60985.2024.00026}
}
Mark R Miller, Vivek C Nair, Eugy Han, Cyan DeVeaux, Christian Rack, Rui Wang, Brandon Huang, Marc Erich Latoschik, James F O'Brien, Jeremy N Bailenson,
Effect of Duration and Delay on the Identifiability of VR Motion
, In
2024 IEEE 25th International Symposium on a World of Wireless, Mobile and Multimedia Networks (WoWMoM)
.
2024.
[BibTeX]
[Download]
[BibSonomy]
[Doi]
@inproceedings{miller2024effect,
title = {Effect of Duration and Delay on the Identifiability of VR Motion},
author = {Miller, Mark R and Nair, Vivek C and Han, Eugy and DeVeaux, Cyan and Rack, Christian and Wang, Rui and Huang, Brandon and Latoschik, Marc Erich and O'Brien, James F and Bailenson, Jeremy N},
booktitle = {2024 IEEE 25th International Symposium on a World of Wireless, Mobile and Multimedia Networks (WoWMoM)},
year = {2024},
url = {https://downloads.hci.informatik.uni-wuerzburg.de/2024-06-Miller-effect-of-duration-and-delay.pdf},
doi = {10.1109/WoWMoM60985.2024.00023}
}
Vivek Nair, Christian Rack, Wenbo Guo, Rui Wang, Shuixian Li, Brandon Huang, Atticus Cull, James F. O'Brien, Marc Latoschik, Louis Rosenberg, Dawn Song,
Inferring Private Personal Attributes of Virtual Reality Users from Ecologically Valid Head and Hand Motion Data
, In
2024 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)
, pp. 477-484
.
2024.
[BibTeX]
[Download]
[BibSonomy]
[Doi]
@inproceedings{10536245,
title = {Inferring Private Personal Attributes of Virtual Reality Users from Ecologically Valid Head and Hand Motion Data},
author = {Nair, Vivek and Rack, Christian and Guo, Wenbo and Wang, Rui and Li, Shuixian and Huang, Brandon and Cull, Atticus and O'Brien, James F. and Latoschik, Marc and Rosenberg, Louis and Song, Dawn},
booktitle = {2024 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)},
year = {2024},
pages = {477-484},
url = {},
doi = {10.1109/VRW62533.2024.00094}
}
Christian Rack, Lukas Schach, Felix Achter, Yousof Shehada, Jinghuai Lin, Marc Erich Latoschik,
Motion Passwords
, In
Proceedings of the 30th ACM Symposium on Virtual Reality Software and Technology
(
19)
, pp. 1-11
.
New York, NY, USA
:
Association for Computing Machinery
, 2024.
[BibTeX]
[Abstract]
[Download]
[BibSonomy]
[Doi]
@conference{rack2024motion,
title = {Motion Passwords},
author = {Rack, Christian and Schach, Lukas and Achter, Felix and Shehada, Yousof and Lin, Jinghuai and Latoschik, Marc Erich},
booktitle = {Proceedings of the 30th ACM Symposium on Virtual Reality Software and Technology},
year = {2024},
number = {19},
pages = {1-11},
publisher = {Association for Computing Machinery},
address = {New York, NY, USA},
url = {https://doi.org/10.1145/3641825.3687711},
doi = {10.1145/3641825.3687711}
}
Abstract: This paper introduces “Motion Passwords”, a novel biometric authentication approach where virtual reality users verify their identity by physically writing a chosen word in the air with their hand controller. This method allows combining three layers of verification: knowledge-based password input, handwriting style analysis, and motion profile recognition. As a first step towards realizing this potential, we focus on verifying users based on their motion profiles. We conducted a data collection study with 48 participants, who performed over 3800 Motion Password signatures across two sessions. We assessed the effectiveness of feature-distance and similarity-learning methods for motion-based verification using the Motion Passwords as well as specific and uniform ball-throwing signatures used in previous works. In our results, the similarity-learning model was able to verify users with the same accuracy for both signature types. This demonstrates that Motion Passwords, even when applying only the motion-based verification layer, achieve reliability comparable to previous methods. This highlights the potential for Motion Passwords to become even more reliable with the addition of knowledge-based and handwriting style verification layers. Furthermore, we present a proof-of-concept Unity application demonstrating the registration and verification process with our pretrained similarity-learning model. We publish our code, the Motion Password dataset, the pretrained model, and our Unity prototype on https://github.com/cschell/MoPs
Christian Rack, Vivek Nair, Lukas Schach, Felix Foschum, Marcel Roth, Marc Erich Latoschik,
Navigating the Kinematic Maze: Analyzing, Standardizing and Unifying XR Motion Datasets
, In
2024 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)
.
2024.
[BibTeX]
[Abstract]
[Download]
[BibSonomy]
[Doi]
@inproceedings{noauthororeditor2024navigating,
title = {Navigating the Kinematic Maze: Analyzing, Standardizing and Unifying XR Motion Datasets},
author = {Rack, Christian and Nair, Vivek and Schach, Lukas and Foschum, Felix and Roth, Marcel and Latoschik, Marc Erich},
booktitle = {2024 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)},
year = {2024},
url = {http://downloads.hci.informatik.uni-wuerzburg.de/2024-01-Rack-Navigating_the_Kinematic_Maze.pdf},
doi = {10.1109/VRW62533.2024.00098}
}
Abstract: This paper addresses the critical importance of standards and documentation in kinematic research, particularly within Extended Reality (XR) environments. We focus on the pivotal role of motion data, emphasizing the challenges posed by the current lack of standardized practices in XR user motion datasets. Our work involves a detailed analysis of 8 existing datasets, identifying gaps in documentation and essential specifications such as coordinate systems, rotation representations, and units of measurement. We highlight how these gaps can lead to misinterpretations and irreproducible results. Based on our findings, we propose a set of guidelines and best practices for creating and documenting motion datasets, aiming to improve their quality, usability, and reproducibility. We also created a web-based tool for visual inspection of motion recordings, further aiding in dataset evaluation and standardization. Furthermore, we introduce the XR Motion Dataset Catalogue, a collection of the analyzed datasets in a unified and aligned format. This initiative significantly streamlines access for researchers, allowing them to download partial or entire datasets with a single line of code and without the need for additional alignment efforts. Our contributions enhance dataset integrity and reliability in kinematic research, paving the way for more consistent and scientifically robust studies in this evolving field.
2023
Christian Rack, Lukas Schach, Marc Latoschik,
Motion Learning Toolbox – A Python library for preprocessing of XR motion tracking data for machine learning applications
.
2023.
[BibTeX]
[Abstract]
[Download]
[BibSonomy]
@misc{rack2023motionlearningtoolbox,
title = {Motion Learning Toolbox – A Python library for preprocessing of XR motion tracking data for machine learning applications},
author = {Rack, Christian and Schach, Lukas and Latoschik, Marc},
year = {2023},
url = {https://github.com/cschell/Motion-Learning-Toolbox}
}
Abstract: The Motion Learning Toolbox is a Python library designed to facilitate the preprocessing of motion tracking data in extended reality (XR) setups. It's particularly useful for researchers and engineers wanting to use XR tracking data as input for machine learning models. Originally developed for academic research targeting the identification of XR users by their motions, this toolbox includes a variety of data encoding methods that enhance machine learning model performance.
Christian Rack, Konstantin Kobs, Tamara Fernando, Andreas Hotho, Marc Erich Latoschik,
Versatile User Identification in Extended Reality using Pretrained Similarity-Learning
, In
arXiv
, p. arXiv:2302.07517
.
2023.
[BibTeX]
[Abstract]
[Download]
[BibSonomy]
[Doi]
@preprint{2023arXiv230207517S,
title = {Versatile User Identification in Extended Reality using Pretrained Similarity-Learning},
author = {Rack, Christian and Kobs, Konstantin and Fernando, Tamara and Hotho, Andreas and Latoschik, Marc Erich},
journal = {arXiv},
year = {2023},
pages = {arXiv:2302.07517},
url = {https://arxiv.org/abs/2302.07517},
doi = {10.48550/arXiv.2302.07517}
}
Abstract: In this paper, we combine the strengths of distance-based and classification-based approaches for the task of identifying extended reality users by their movements. For this we explore an embedding-based model that leverages deep metric learning. We train the model on a dataset of users playing the VR game "Half-Life: Alyx" and conduct multiple experiments and analyses using a state of the art classification-based model as baseline. The results show that the embedding-based method 1) is able to identify new users from non-specific movements using only a few minutes of enrollment data, 2) can enroll new users within seconds, while retraining the baseline approach takes almost a day, 3) is more reliable than the baseline approach when only little enrollment data is available, 4) can be used to identify new users from another dataset recorded with different VR devices.
Altogether, our solution is a foundation for easily extensible XR user identification systems, applicable to a wide range of user motions. It also paves the way for production-ready models that could be used by XR practitioners without the requirements of expertise, hardware, or data for training deep learning models.
Christian Rack, Tamara Fernando, Murat Yalcin, Andreas Hotho, Marc Erich Latoschik,
Who Is Alyx? A new Behavioral Biometric Dataset for User Identification in XR
, In
Frontiers in Virtual Reality
David Swapp (Ed.),
, Vol.
4
.
2023.
[BibTeX]
[Abstract]
[Download]
[BibSonomy]
[Doi]
@article{rack2023behavioral,
title = {Who Is Alyx? A new Behavioral Biometric Dataset for User Identification in XR},
author = {Rack, Christian and Fernando, Tamara and Yalcin, Murat and Hotho, Andreas and Latoschik, Marc Erich},
editor = {Swapp, David},
journal = {Frontiers in Virtual Reality},
year = {2023},
volume = {4},
url = {https://www.frontiersin.org/journals/virtual-reality/articles/10.3389/frvir.2023.1272234/full},
doi = {10.3389/frvir.2023.1272234}
}
Abstract: This article presents a new dataset containing motion and physiological data of users playing the game 'Half-Life: Alyx'. The dataset specifically targets behavioral and biometric identification of XR users. It includes motion and eye-tracking data captured by a HTC Vive Pro of 71 users playing the game on two separate days for 45 minutes. Additionally, we collected physiological data from 31 of these users. We provide benchmark performances for the task of motion-based identification of XR users with two prominent state-of-the-art deep learning architectures (GRU and CNN). After training on the first session of each user, the best model can identify the 71 users in the second session with a mean accuracy of 95% within 2 minutes. The dataset is freely available under https://github.com/cschell/who-is-alyx
2022
Christian Rack, Andreas Hotho, Marc Erich Latoschik,
Comparison of Data Encodings and Machine Learning Architectures for User Identification on Arbitrary Motion Sequences
, In
Proceedings of the IEEE International conference on artificial intelligence & Virtual Reality (IEEE AIVR)
.
IEEE
, 2022.
[BibTeX]
[Download]
[BibSonomy]
[Doi]
@inproceedings{schell2022comparison,
title = {Comparison of Data Encodings and Machine Learning Architectures for User Identification on Arbitrary Motion Sequences},
author = {Rack, Christian and Hotho, Andreas and Latoschik, Marc Erich},
booktitle = {Proceedings of the IEEE International conference on artificial intelligence & Virtual Reality (IEEE AIVR)},
year = {2022},
publisher = {IEEE},
url = {https://downloads.hci.informatik.uni-wuerzburg.de/2022-ieeeaivr-schell-comparison-of-data-representations-and-machine-learning-architectures-for-user-identification-on-arbitrary-motion-sequences.pdf},
doi = {10.1109/AIVR56993.2022.00010}
}
Christian Rack, Fabian Sieper, Lukas Schach, Murat Yalcin, Marc E. Latoschik,
Dataset: Who is Alyx? (GitHub Repository)
.
2022.
[BibTeX]
[Abstract]
[Download]
[BibSonomy]
[Doi]
@dataset{who_is_alyx_2022,
title = {Dataset: Who is Alyx? (GitHub Repository)},
author = {Rack, Christian and Sieper, Fabian and Schach, Lukas and Yalcin, Murat and Latoschik, Marc E.},
year = {2022},
url = {https://github.com/cschell/who-is-alyx},
doi = {10.5281/zenodo.6472417}
}
Abstract: This dataset contains over 110 hours of motion, eye-tracking and physiological data from 71 players of the virtual reality game “Half-Life: Alyx”. Each player played the game on two separate days for about 45 minutes using a HTC Vive Pro.