How can we develop appropriate and natural forms of communication between humans and computers? In the future, computers will hardly be perceived as such, but will be seamlessly integrated into our living and working environments, e.g. in cyber-physical systems. How can people be effectively supported with their experiences, sensory organs, gestural means of expression and social needs?

Our work focuses on the systematic and fundamental research of different, preferably natural interaction modalities and their synergistic combination. We are researching gestural multi-touch interaction, digital pens in combination with digital paper, the efficient combination of touch and pen input, gestural interaction with hands, head and feet, gaze-supported interaction for remote displays, and tangibles. Our focus is on small and large interactive surfaces (mini-displays, smartphones, tablets, tabletops, high-resolution large wall displays) and their effective combination in multi-display environments.

We apply the developed techniques and principles in various application domains and projects with academic and industrial partners from Germany and abroad. We also investigate how modern user interfaces and interaction techniques can be used effectively in the fields of interactive information visualization, music informatics and semantic web.

Recent Publications

  • Hybrid User Interfaces: Past, Present, and Future of Complementary Cross-Device Interaction in Mixed Reality.

    Hubenschmid, S.; Satkowski, M.; Zagermann, J.; Méndez, J.; Elmqvist, N.; Feiner, S.; Feuchtner, T.; Grønbæk, J.; Lee, B.; Schmalstieg, D.; Dachselt, R.; Reiterer, H.;

    @article{HZS2025huis,
       author = {Sebastian Hubenschmid and Marc Satkowski and Johannes Zagermann and Juli\'{a}n M\'{e}ndez and Niklas Elmqvist and Steven Feiner and Tiare Feuchtner and Jens Emil Grønbæk and Benjamin Lee and Dieter Schmalstieg and Raimund Dachselt and Harald Reiterer},
       title = {Hybrid User Interfaces: Past, Present, and Future of Complementary Cross-Device Interaction in Mixed Reality},
       doi = {10.1109/TVCG.2026.3683941}
    }

  • @inproceedings{krug2026mixedpresence,
       author = {Katja Krug and Wolfgang B\"{u}schel and Marc Satkowski and Stefan Gumhold and Raimund Dachselt},
       title = {Mixed Presence in Mixed Reality: Charting the Challenges and Opportunities},
       isbn = {979-8-4007-2278-3/2026/04},
       numpages = {22},
       doi = {10.1145/3772318.3791508},
       keywords = {Mixed Presence, Mixed Reality, Collaboration}
    }

  • Beyond Links: Exploring Visual Representations of Multi-View Relations in Mixed Reality.

    Luo, W.; Rzayev, R.; Russig, B.; Visutarporn, S.; Satkowski, M.; Gumhold, S.; Dachselt, R.;

    @inproceedings{luo2026beyondlinks,
       author = {Weizhou Luo and Rufat Rzayev and Benjamin Russig and Sivanon Visutarporn and Marc Satkowski and Stefan Gumhold and Raimund Dachselt},
       title = {Beyond Links: Exploring Visual Representations of Multi-View Relations in Mixed Reality},
       isbn = {979-8-4007-2278-3/2026/04},
       numpages = {19},
       doi = {10.1145/3772318.3791398},
       keywords = {Spatiality, content organization, Multiple Views, Multi-view relations, Mixed Reality}
    }

  • @inproceedings{krug2026advancing,
       author = {Katja Krug},
       title = {Advancing Co-located and Distributed Multi-user Mixed Reality},
       isbn = {979-8-4007-2281-3/2026/04},
       doi = {10.1145/3772363.3799196},
       keywords = {Mixed Reality, Multi-User, Social Interaction,Collaboration}
    }

  • Declarative Debugging for Datalog with Aggregation.

    Dachselt, R.; Gerlach, L.; Hanisch, P.; Ivliev, A.; Krötzsch, M.; Marx, M.; Méndez, J.;

    @inproceedings{DGHIKMM2026,
       author = {Raimund Dachselt and Lukas Gerlach and Philipp Hanisch and Alex Ivliev and Markus Kr\"{o}tzsch and Maximilian Marx and Juli\'{a}n M\'{e}ndez},
       title = {Declarative Debugging for Datalog with Aggregation},
       series = {CEUR Workshop Proceedings},
       numpages = {8}
    }