Publication
@inproceedings{Reipschlaeger2022,
author = {Patrick Reipschl\"{a}ger and Frederik Brudy and Raimund Dachselt and Justin Matejka and George Fitzmaurice and Fraser Anderson},
title = {AvatAR: An Immersive Analysis Environment for Human Motion Data Combining Interactive 3D Avatars and Trajectories},
booktitle = {Proceedings of the 2022 CHI Conference on Human Factors in Computing Systems},
series = {CHI '22},
year = {2022},
month = {4},
isbn = {978-1-4503-9157-3/22/04},
location = {New Orleans, LA, USA},
numpages = {15},
doi = {10.1145/3491102.3517676},
publisher = {ACM},
address = {New York, NY, USA},
keywords = {Visualisation, augmented/mixed reality, human motion data, analysing space utilization, motion analysis, Immersive Analytics}
}Weitere Materialien
Abstract
Analysis of human motion data can reveal valuable insights about the utilization of space and interaction of humans with their environment. To support this, we present AvatAR, an immersive analysis environment for the in-situ visualization of human motion data, that combines 3D trajectories, virtual avatars of people’s movement, and a detailed representation of their posture. Additionally, we describe how to embed visualizations directly into the environment, showing what a person looked at or what surfaces they touched, and how the avatar’s body parts can be used to access and manipulate those visualizations. AvatAR combines an AR HMD with a tablet to provide both mid-air and touch interaction for system control, as well as an additional overview to help users navigate the environment. We implemented a prototype and present several scenarios to show that AvatAR can enhance the analysis of human motion data by making data not only explorable, but experienceable.
Media
Our Video Figure presents the overall concept of AvatAR, as well as the individual visualization techniques it’s composed of. Furthermore, we show how AvatAR can be applied to several use case scenarios.