
Sebastian Günther
Research Assistant at TU Darmstadt
Research Assistant at TU Darmstadt
Sebastian Günther is a PhD student working in the Telecooperation Lab at the Technical University of Darmstadt (TU Darmstadt) under supervision of Prof. Max Mühlhäuser. He has a strong interest in Human-Computer interaction research with special focus on haptic systems as well as Virtual- and Augmented-Reality technologies. Prior to his work at the Telecooperation Lab, he received a Master's degree in Computer Science from TU Darmstadt and did a research internship at FXPAL.
@inproceedings{Guenther2022smooth, address = {New York, NY, USA}, author = {G\"{u}nther, Sebastian and Rasch, Julian and Sch\"{o}n, Dominik and M\"{u}ller, Florian and Schmitz, Martin and Riemann, Jan and Matviienko, Andrii and M\"{u}hlh\"{a}user, Max}, booktitle = {Proceedings of the 2022 CHI Conference on Human Factors in Computing Systems (CHI '22)}, doi = {10.1145/3491102.3517454}, isbn = {978-1-4503-9157-3/22/04}, keywords = {haptic,smooth,stimuli,stroke,visual,visualizations}, month = {apr}, publisher = {ACM}, title = {Smooth as Steel Wool: Effects of Visual Stimuli on the Haptic Perception of Roughness in Virtual Reality}, url = {https://dl.acm.org/doi/10.1145/3491102.3517454}, year = {2022}, abstract = {Haptic Feedback is essential for lifelike Virtual Reality (VR) experiences. To provide a wide range of matching sensations of being touched or stroked, current approaches typically need large numbers of different physical textures. However, even advanced devices can only accommodate a limited number of textures to remain wearable. Therefore, a better understanding is necessary of how expectations elicited by different visualizations affect haptic perception, to achieve a balance between physical constraints and great variety of matching physical textures. In this work, we conducted an experiment (N=31) assessing how the perception of roughness is affected within VR. We designed a prototype for arm stroking and compared the effects of different visualizations on the perception of physical textures with distinct roughnesses. Additionally, we used the visualizations' real-world materials, no-haptics and vibrotactile feedback as baselines. As one result, we found that two levels of roughness can be sufficient to convey a realistic illusion.}, series = {CHI '22}, file={https://fileserver.tk.informatik.tu-darmstadt.de/Publications/2022/Guenther2022smooth.pdf}, video = {https://www.youtube.com/watch?v=9q6zZCJ9rLg}, teaservideo = {https://www.youtube.com/watch?v=glEOP48qVCE}, }
@inproceedings{Matviienko2022bikear, author = {Matviienko, Andrii and Müller, Florian and Schön, Dominik and Seesemann, Paul and Günther, Sebastian and Mühlhäuser, Max}, title = {BikeAR: Understanding Cyclists' Crossing Decision-Making at Uncontrolled Intersections using Augmented Reality}, year = {2022}, booktitle = {Proceedings of the 2022 CHI Conference on Human Factors in Computing Systems (CHI '22)}, publisher = {Association for Computing Machinery}, address = {New York, NY, USA}, url = {https://doi.org/10.1145/3491102.3517560}, doi = {10.1145/3491102.3517560}, keywords = {augmented reality, cyclist safety, crossing decision-making}, location = {New Orleans, LA, USA}, series = {CHI '22}, abstract = {Cycling has become increasingly popular as a means of transportation. However, cyclists remain a highly vulnerable group of road users. According to accident reports, one of the most dangerous situations for cyclists are uncontrolled intersections, where cars approach from both directions. To address this issue and assist cyclists in crossing decision-making at uncontrolled intersections, we designed two visualizations that: (1) highlight occluded cars through an X-ray vision and (2) depict the remaining time the intersection is safe to cross via a Countdown. To investigate the efficiency of these visualizations, we proposed an Augmented Reality simulation as a novel evaluation method, in which the above visualizations are represented as AR, and conducted a controlled experiment with 24 participants indoors. We found that the X-ray ensures a fast selection of shorter gaps between cars, while the Countdown facilitates a feeling of safety and provides a better intersection overview.} }
@inproceedings{Schmitz2022squeezyfeely, address = {New York, NY, USA}, author = {Schmitz, Martin and G\"{u}nther, Sebastian and Sch\"{o}n, Dominik and M\"{u}ller, Florian}, booktitle = {Proceedings of the 2022 CHI Conference on Human Factors in Computing Systems (CHI '22)}, doi = {10.1145/3491102.3501981}, isbn = {978-1-4503-9157-3/22/04}, keywords = {Input, Pinching, Deformation, Mixed Reality, Thumb-to-fnger, User Studies}, month = {apr}, publisher = {ACM}, title = {Squeezy-Feely: Investigating Lateral Thumb-Index Pinching as an Input Modality}, url = {https://doi.org/10.1145/3491102.3501981}, year = {2022}, abstract = {From zooming on smartphones and mid-air gestures to deformable user interfaces, thumb-index pinching grips are used in many interaction techniques. However, there is still a lack of systematic understanding of how the accuracy and efciency of such grips are afected by various factors such as counterforce, grip span, and grip direction. Therefore, in this paper, we contribute an evaluation (N = 18) of thumb-index pinching performance in a visual targeting task using scales up to 75 items. As part of our fndings, we conclude that the pinching interaction between the thumb and index fnger is a promising modality also for one-dimensional input on higher scales. Furthermore, we discuss and outline implications for future user interfaces that beneft from pinching as an additional and complementary interaction modality.}, series = {CHI '22}, file={https://fileserver.tk.informatik.tu-darmstadt.de/Publications/2022/schmitz2022squeezyfeely.pdf}, teaservideo = {https://www.youtube.com/watch?v=DW23J3CalFw}, award={Best Paper}, note={Best Paper Award} }
@inproceedings{Elsayed2021cameraready, abstract = {Computer-supported posture guidance is used in sports, dance training, expression of art with movements, and learning gestures for interaction. At present, the influence of display types and visualizations have not been investigated in the literature. These factors are important as they directly impact perception and cognitive load, and hence influence the performance of participants. In this paper, we conducted a controlled experiment with 20 participants to compare the use of five display types with different screen sizes: smartphones, tablets, desktop monitors, TVs, and large displays. On each device, we compared three common visualizations for posture guidance: skeletons, silhouettes, and 3d body models. To conduct our assessment, we developed a mobile and cross-platform system that only requires a single camera. Our results show that compared to a smartphone display, larger displays show a lower error. Regarding the choice of visualization, participants rated 3D body models as significantly more usable in comparison to a skeleton visualization.}, address = {New York, NY, USA}, author = {Elsayed, Hesham and Hoffmann, Philipp and G\"{u}nther, Sebastian and Schmitz, Martin and Weigel, Martin and M\"{u}hlh\"{a}user, Max and M\"{u}ller, Florian}, booktitle = {Designing Interactive Systems Conference 2021}, doi = {10.1145/3461778.3462026}, isbn = {9781450384766}, month = {jun}, pages = {1046--1055}, publisher = {ACM}, title = {CameraReady: Assessing the Influence of Display Types and Visualizations on Posture Guidance}, url = {https://dl.acm.org/doi/10.1145/3461778.3462026}, year = {2021}, series = {DIS '21} }
@inproceedings{Guenther2021actuboard, author = {G\"{u}nther, Sebastian and M\"{u}ller, Florian and H\"{u}bner, Felix and M\"{u}hlh\"{a}user, Max and Matviienko, Andrii}, title = {ActuBoard: An Open Rapid Prototyping Platform to Integrate Hardware Actuators in Remote Applications}, year = {2021}, isbn = {9781450384490}, publisher = {Association for Computing Machinery}, address = {New York, NY, USA}, url = {https://doi.org/10.1145/3459926.3464757}, doi = {10.1145/3459926.3464757}, abstract = { Prototyping is an essential step in developing tangible experiences and novel devices, ranging from haptic feedback to wearables. However, prototyping of actuated devices nowadays often requires repetitive and time-consuming steps, such as wiring, soldering, and programming basic communication, before HCI researchers and designers can focus on their primary interest: designing interaction. In this paper, we present ActuBoard, a prototyping platform to support 1) quick assembly, 2) less preparation work, and 3) the inclusion of non-tech-savvy users. With ActuBoard, users are not required to create complex circuitry, write a single line of firmware, or implementing communication protocols. Acknowledging existing systems, our platform combines the flexibility of low-level microcontrollers and ease-of-use of abstracted tinker platforms to control actuators from separate applications. As further contribution, we highlight the technical specifications and published the ActuBoard platform as Open Source.}, booktitle = {Companion of the 2021 ACM SIGCHI Symposium on Engineering Interactive Computing Systems}, pages = {70–76}, numpages = {7}, keywords = {hardware, tinkering, actuators, haptics, rapid prototyping, open source, virtual reality}, location = {Virtual Event, Netherlands}, series = {EICS '21} }
@article{elsayed2020vibromap, author = {Elsayed, Hesham and Weigel, Martin and M\"{u}ller, Florian and Schmitz, Martin and Marky, Karola and G\"{u}nther, Sebastian and Riemann, Jan and M\"{u}hlh\"{a}user, Max}, title = {VibroMap: Understanding the Spacing of Vibrotactile Actuators across the Body}, year = {2020}, issue_date = {December 2020}, publisher = {Association for Computing Machinery}, address = {New York, NY, USA}, volume = {4}, number = {4}, url = {https://doi.org/10.1145/3432189}, doi = {10.1145/3432189}, abstract = {In spite of the great potential of on-body vibrotactile displays for a variety of applications, research lacks an understanding of the spacing between vibrotactile actuators. Through two experiments, we systematically investigate vibrotactile perception on the wrist, forearm, upper arm, back, torso, thigh, and leg, each in transverse and longitudinal body orientation. In the first experiment, we address the maximum distance between vibration motors that still preserves the ability to generate phantom sensations. In the second experiment, we investigate the perceptual accuracy of localizing vibrations in order to establish the minimum distance between vibration motors. Based on the results, we derive VibroMap, a spatial map of the functional range of inter-motor distances across the body. VibroMap supports hardware and interaction designers with design guidelines for constructing body-worn vibrotactile displays.}, journal = {Proc. ACM Interact. Mob. Wearable Ubiquitous Technol.}, month = {dec}, articleno = {125}, numpages = {16}, keywords = {haptic output, design implications, wearable computing, vibrotactile interfaces, phantom sensation, actuator spacing, ERM vibration motors} }
@inproceedings{guenther2020therminator, author = {G{\"u}nther, Sebastian and M{\"u}ller, Florian and Sch{\"o}n, Dominik and Elmoghazy, Omar and Schmitz, Martin and M{\"u}hlh{\"a}user, Max}, title = {Therminator: Understanding the Interdependency of Visual and On-Body Thermal Feedback in Virtual Reality}, booktitle = {Proceedings of the 2020 CHI Conference on Human Factors in Computing Systems (to appear)}, series = {CHI '20}, year = {2020}, isbn = {978-1-4503-6708-0}, location = {Honolulu, HI, USA}, url = {http://dx.doi.org/10.1145/3313831.3376195}, video = {https://www.youtube.com/watch?v=q5lkmqAua78}, teaservideo = {https://youtu.be/w9FnG1eoWD8}, doi = {10.1145/3313831.3376195}, publisher = {ACM}, address = {New York, NY, USA}, keywords = {Haptics, Temperature, Thermal Feedback, Virtual Reality}, abstract = {Recent advances have made Virtual Reality (VR) more realistic than ever before. This improved realism is attributed to today's ability to increasingly appeal to human sensations, such as visual, auditory or tactile. While research also examines temperature sensation as an important aspect, the interdependency of visual and thermal perception in VR is still underexplored. In this paper, we propose Therminator, a thermal display concept that provides warm and cold on-body feedback in VR through heat conduction of flowing liquids with different temperatures. Further, we systematically evaluate the interdependency of different visual and thermal stimuli on the temperature perception of arm and abdomen with 25 participants. As part of the results, we found varying temperature perception depending on the stimuli, as well as increasing involvement of users during conditions with matching stimuli.} }
@inproceedings{guenther2020pneumovolley, author = {G{\"u}nther, Sebastian and Sch{\"o}n, Dominik and M{\"u}ller, Florian and M{\"u}hlh{\"a}user, Max and Schmitz, Martin}, title = {PneumoVolley: Pressure-based Haptic Feedback on the Head through Pneumatic Actuation}, booktitle = {Proceedings of the 2020 CHI Conference Extended Abstracts on Human Factors in Computing Systems}, series = {CHI EA '20}, year = {2020}, isbn = {978-1-4503-6708-0}, location = {Honolulu, HI, USA}, url = {http://dx.doi.org/10.1145/3334480.3382916}, file={https://fileserver.tk.informatik.tu-darmstadt.de/Publications/2020/guenther2020pneumovolley.pdf}, video = {https://www.youtube.com/watch?v=ZKnV8HrUx9M}, teaservideo = {https://www.youtube.com/watch?v=-SlrCqF-5m4}, doi = {10.1145/3334480.3382916}, publisher = {ACM}, address = {New York, NY, USA}, keywords = {Haptics, Pressure, Volleyball, Virtual Reality, Blobbyvolley}, abstract = {Haptic Feedback brings immersion and presence in Virtual Reality (VR) to the next level. While research proposes the usage of various tactile sensations, such as vibration or ultrasound approaches, the potential applicability of pressure feedback on the head is still underexplored. In this paper, we contribute concepts and design considerations for pressure-based feedback on the head through pneumatic actuation. As a proof-of-concept implementing our pressure-based haptics, we further present PneumoVolley: a VR experience similar to the classic Volleyball game but played with the head. In an exploratory user study with 9 participants, we evaluated our concepts and identified a significantly increased involvement compared to a no-haptics baseline along with high realism and enjoyment ratings using pressure-based feedback on the head in VR.} }
@InProceedings{koutny2020toolsupport, author={Koutny, Reinhard and G{\"u}nther, Sebastian and Dhingra, Naina and Kunz, Andreas and Miesenberger, Klaus and M{\"u}hlh{\"a}user, Max}, editor={Miesenberger, Klaus and Manduchi, Roberto and Covarrubias Rodriguez, Mario and Pe{\v{n}}{\'a}z, Petr}, title="Accessible Multimodal Tool Support for Brainstorming Meetings", booktitle="Computers Helping People with Special Needs", year="2020", publisher="Springer International Publishing", address="Cham", pages="11--20", abstract="In recent years, assistive technology and digital accessibility for blind and visually impaired people (BVIP) has been significantly improved. Yet, group discussions, especially in a business context, are still challenging as non-verbal communication (NVC) is often depicted on digital whiteboards, including deictic gestures paired with visual artifacts. However, as NVC heavily relies on the visual perception, whichrepresents a large amount of detail, an adaptive approach is required that identifies the most relevant information for BVIP. Additionally, visual artifacts usually rely on spatial properties such as position, orientation, and dimensions to convey essential information such as hierarchy, cohesion, and importance that is often not accessible to the BVIP. In this paper, we investigate the requirements of BVIP during brainstorming sessions and, based on our findings, provide an accessible multimodal tool that uses non-verbal and spatial cues as an additional layer of information. Further, we contribute by presenting a set of input and output modalities that encode and decode information with respect to the individual demands of BVIP and the requirements of different use cases.", isbn="978-3-030-58805-2", doi="10.1007/978-3-030-58805-2_2", url="https://doi.org/10.1007/978-3-030-58805-2_2", series="ICCHP '20" }
@ARTICLE{mueller2020aroundbodyinteraction, author={M{\"u}ller, Florian and G{\"u}nther, Sebastian and M{\"u}hlh{\"a}user, Max}, journal={IEEE Pervasive Computing}, title={Around-Body Interaction: Interacting While on the Go}, year={2020}, volume={19}, number={2}, pages={74-78}, doi = {10.1109/MPRV.2020.2977850}, url = {https://doi.org/10.1109/MPRV.2020.2977850} }
@inproceedings{mueller2020walktheline, author = {M{\"u}ller, Florian and Schmitz, Martin and Schmitt, Daniel and G{\"u}nther, Sebastian and Funk, Markus and M{\"u}hlh{\"a}user, Max}, title = {Walk The Line: Leveraging Lateral Shifts of the Walking Path as an Input Modality for Head-Mounted Displays}, booktitle = {Proceedings of the 2020 CHI Conference on Human Factors in Computing Systems (to appear)}, series = {CHI '20}, year = {2020}, isbn = {978-1-4503-6708-0}, location = {Honolulu, HI, USA}, url = {http://dx.doi.org/10.1145/3313831.3376852}, file={https://fileserver.tk.informatik.tu-darmstadt.de/Publications/2020/mueller2020walktheline.pdf}, video = {https://youtu.be/ylAlzFqWx7g}, teaservideo = {https://youtu.be/6-XrF6J9cTc}, doi = {10.1145/3313831.3376852}, publisher = {ACM}, address = {New York, NY, USA}, keywords = {Augmented Reality, Head-Mounted Display, Input, Walking}, abstract = {Recent technological advances have made head-mounted displays (HMDs) smaller and untethered, fostering the vision of ubiquitous interaction in a digitally augmented physical world. Consequently, a major part of the interaction with such devices will happen on the go, calling for interaction techniques that allow users to interact while walking. In this paper, we explore lateral shifts of the walking path as a hands-free input modality. The available input options are visualized as lanes on the ground parallel to the user's walking path. Users can select options by shifting the walking path sideways to the respective lane. We contribute the results of a controlled experiment with 18 participants, confirming the viability of our approach for fast, accurate, and joyful interactions. Further, based on the findings of the controlled experiment, we present three example applications.} }
@InProceedings{dhingra2020pointing, author={dhingra, Naina and Koutny, Reinhard and G{\"u}nther, Sebastian and Miesenberger, Klaus and M{\"u}hlh{\"a}user, Max and Kunz, Andreas}, editor={Miesenberger, Klaus and Manduchi, Roberto and Covarrubias Rodriguez, Mario and Pe{\v{n}}{\'a}z, Petr}, title="Pointing Gesture Based User Interaction of Tool Supported Brainstorming Meetings", booktitle="Computers Helping People with Special Needs", publisher="Springer International Publishing", address="Cham", pages="21--29", abstract="This paper presents a brainstorming tool combined with pointing gestures to improve the brainstorming meeting experience for blind and visually impaired people (BVIP). In brainstorming meetings, BVIPs are not able to participate in the conversation as well as sighted users because of the unavailability of supporting tools for understanding the explicit and implicit meaning of the non-verbal communication (NVC). Therefore, the proposed system assists BVIP in interpreting pointing gestures which play an important role in non-verbal communication. Our system will help BVIP to access the contents of a Metaplan card, a team member in the brainstorming meeting is referring to by pointing. The prototype of our system shows that targets on the screen a user is pointing at can be detected with 80{\%} accuracy.", isbn="978-3-030-58805-2", doi="10.1007/978-3-030-58805-2_3", url="https://doi.org/10.1007/978-3-030-58805-2_3", series="ICCHP '20", year="2020" }
@inproceedings{funk2019assessing, title={Assessing the Accuracy of Point \& Teleport Locomotion with Orientation Indication for Virtual Reality using Curved Trajectories}, author={Funk, Markus and M{\"u}ller, Florian and Fendrich, Marco and Shene, Megan and Kolvenbach, Moritz and Dobbertin, Niclas and G{\"u}nther, Sebastian and M{\"u}hlh{\"a}user, Max}, booktitle={Proceedings of the 2019 CHI Conference on Human Factors in Computing Systems}, doi={10.1145/3290605.3300377}, year={2019}, teaservideo={https://www.youtube.com/watch?v=klu82WxeBlA}, video={https://www.youtube.com/watch?v=uXctClcQu_g} }
@inproceedings{guenther2019mapvi, title = {MAPVI: Meeting Accessibility for Persons with Visual Impairments}, author = {G{\"u}nther, Sebastian and Koutny, Reinhard and Dhingra, Naina and Funk, Markus and Hirt, Christian and Miesenberger, Klaus and M{\"u}hlh{\"a}user, Max and Kunz, Andreas}, doi = {10.1145/3316782.3322747}, booktitle = {Proceedings of the 12th PErvasive Technologies Related to Assistive Environments Conference}, year = {2019}, series = {PETRA '19}, acmid = {3322747}, publisher = {ACM}, address = {New York, NY, USA}, }
@inproceedings{guenther2019slappyfications, title={Slappyfications: Towards Ubiquitous Physical and Embodied Notifications}, author={G{\"u}nther, Sebastian and M{\"u}ller, Florian and Funk, Markus and M{\"u}hlh{\"a}user, Max}, booktitle = {Proceedings of the 2019 CHI Conference Extended Abstracts on Human Factors in Computing Systems}, series = {CHI EA '19}, doi={10.1145/3290607.3311780}, year={2019}, video = {https://www.youtube.com/watch?v=jZF0hNZRfY8} }
@inproceedings{guenther2019pneumact, title = {PneumAct: Pneumatic Kinesthetic Actuation of Body Joints in Virtual Reality Environments}, author = {G{\"u}nther, Sebastian and Makhija, Mohit and M{\"u}ller, Florian and Sch{\"o}n, Dominik and M{\"u}hlh{\"a}user, Max and Funk, Markus}, doi = {10.1145/3322276.3322302}, booktitle = {Proceedings of the ACM Conference on Designing Interactive Systems, DIS '19}, keywords = {Compressed Air,Force Feedback,Kinesthetic,Pneumatic,haptics,virtual Reality}, year = {2019}, series = {DIS '19}, teaservideo = {https://youtu.be/4lRWxzs4Rgs}, abstract={Virtual Reality Environments (VRE) create an immersive user experience through visual, aural, and haptic sensations. However, the latter is often limited to vibrotactile sensations that are not able to actively provide kinesthetic motion actuation. Further, such sensations do not cover natural representations of physical forces, for example, when lifting a weight. We present PneumAct, a jacket to enable pneumatically actuated kinesthetic movements of arm joints in VRE. It integrates two types of actuators inflated through compressed air: a Contraction Actuator and an Extension Actuator. We evaluate our PneumAct jacket through two user studies with a total of 32 participants: First, we perform a technical evaluation measuring the contraction and extension angles of different inflation patterns and inflation durations. Second, we evaluate PneumAct in three VRE scenarios comparing our system to traditional controller-based vibrotactile and a baseline without haptic feedback.} }
@inproceedings{mueller2019mind, title={Mind the Tap: Assessing Foot-Taps for Interacting with Head-Mounted Displays}, author={M{\"u}ller, Florian and McManus, Joshua and G{\"u}nther, Sebastian and Schmitz, Martin and M{\"u}hlh{\"a}user, Max and Funk, Markus}, booktitle={Proceedings of the 2019 CHI Conference on Human Factors in Computing Systems}, doi={10.1145/3290605.3300707}, year={2019}, teaservideo={https://www.youtube.com/watch?v=RhabMsP0X14}, video={https://www.youtube.com/watch?v=D5hTVIEb7iA&t}, note={Honorable Mention Award} }
@inproceedings{mueller2019smartobjects, title={Proceedings of the 7th Workshop on Interacting with Smart Objects}, author={M{\"u}ller, Florian and Schnelle-Walka, Dirk and G{\"u}nther, Sebastian and Karola, Marky and Funk, Markus and M{\"u}hlh{\"a}user, Max}, year={2019}, booktitle = {Workshop Co-located with the 11th ACM SIGCHI Symposium on Engineering Interactive Computing Systems (EICS ’19)} }
@inproceedings{marky2019teachyverse, author = {Marky, Karola and M\"{u}ller, Florian and Funk, Markus and Geiss, Alex and G\"{u}nther, Sebastian and Schmitz, Martin and Riemann, Jan and M\"{u}hlh\"{a}user, Max}, title = {Teachyverse: Collaborative E-Learning in Virtual Reality Lecture Halls}, booktitle = {Proceedings of Mensch Und Computer 2019}, series = {MuC'19}, year = {2019}, isbn = {978-1-4503-7198-8}, location = {Hamburg, Germany}, pages = {831--834}, numpages = {4}, url = {http://doi.acm.org/10.1145/3340764.3344917}, doi = {10.1145/3340764.3344917}, acmid = {3344917}, publisher = {ACM}, address = {New York, NY, USA}, keywords = {E-Learning, Lecture halls, Virtual Lecture, Virtual Reality}, }
@inproceedings{marky2019music, booktitle = {Proceedings of the 7th Workshop on Interacting with Smart Objects (SmartObjects '19) in conjunction with EICS '19}, year = {2019}, author = {Marky, Karola and Weiss, Andreas and Gedeon, Julien and G{\"u}nther, Sebastian}, title = {Mastering Music Instruments through Technology in Solo Learning Sessions}, url = {http://tubiblio.ulb.tu-darmstadt.de/113376/} }
@inproceedings{willich2019vrchairracer, title={VRChairRacer: Using an Office Chair Backrest as a Locomotion Technique for VR Racing Games}, author={von Willich, Julius and Sch{\"o}n, Dominik and G{\"u}nther, Sebastian and M{\"u}ller, Florian and M{\"u}hlh{\"a}user, Max and Funk, Markus}, booktitle = {Proceedings of the 2019 CHI Conference Extended Abstracts on Human Factors in Computing Systems}, series = {CHI EA '19}, doi={10.1145/3290607.3313254}, year={2019}, teaservideo={https://www.youtube.com/watch?v=8ukVghWoTlE}, video={https://www.youtube.com/watch?v=v906aGntoKY} }
@inproceedings{guenther2018assistive, author = {G\"{u}nther, Sebastian and Kratz, Sven and Avrahami, Daniel and M\"{u}hlh\"{a}user, Max}, title = {Exploring Audio, Visual, and Tactile Cues for Synchronous Remote Assistance}, booktitle = {Proceedings of the 11th PErvasive Technologies Related to Assistive Environments Conference}, series = {PETRA '18}, year = {2018}, isbn = {978-1-4503-6390-7}, location = {Corfu, Greece}, pages = {339--344}, numpages = {6}, url = {http://doi.acm.org/10.1145/3197768.3201568}, doi = {10.1145/3197768.3201568}, acmid = {3201568}, publisher = {ACM}, address = {New York, NY, USA}, keywords = {3D-Space, Assistive Technology, Audio Cues, Augmented Reality, Haptics, Navigation, Remote Collaboration, Spatial Guidance, Vibrotactile Feedback}, }
@inproceedings{guenther2018tactileglove, author = {G\"{u}nther, Sebastian and M\"{u}ller, Florian and Funk, Markus and Kirchner, Jan and Dezfuli, Niloofar and M\"{u}hlh\"{a}user, Max}, title = {TactileGlove: Assistive Spatial Guidance in 3D Space Through Vibrotactile Navigation}, booktitle = {Proceedings of the 11th PErvasive Technologies Related to Assistive Environments Conference}, series = {PETRA '18}, year = {2018}, isbn = {978-1-4503-6390-7}, location = {Corfu, Greece}, pages = {273--280}, numpages = {8}, url = {http://doi.acm.org/10.1145/3197768.3197785}, doi = {10.1145/3197768.3197785}, acmid = {3197785}, publisher = {ACM}, address = {New York, NY, USA}, keywords = {3D-Space, Assistive Technology, Haptics, Navigation, Pull Push Metaphors, Spatial Guidance, Vibrotactile}, }
@inproceedings{guenther2018checkmate, author = {G\"{u}nther, Sebastian and M\"{u}ller, Florian and Schmitz, Martin and Riemann, Jan and Dezfuli, Niloofar and Funk, Markus and Sch\"{o}n, Dominik and M\"{u}hlh\"{a}user, Max}, title = {CheckMate: Exploring a Tangible Augmented Reality Interface for Remote Interaction}, booktitle = {Extended Abstracts of the 2018 CHI Conference on Human Factors in Computing Systems}, series = {CHI EA '18}, year = {2018}, isbn = {978-1-4503-5621-3}, location = {Montreal QC, Canada}, pages = {LBW570:1--LBW570:6}, articleno = {LBW570}, numpages = {6}, url = {http://doi.acm.org/10.1145/3170427.3188647}, doi = {10.1145/3170427.3188647}, acmid = {3188647}, publisher = {ACM}, address = {New York, NY, USA}, keywords = {3d fabrication, augmented reality, chess, mixed reality, remote collaboration, tabletops, tangibles}, teaservideo={https://www.youtube.com/watch?v=Geyr95Nl8mc} }
@inproceedings{mueller2018pucsbi, author = {M\"{u}ller, Florian and Schmitz, Martin and Funk, Markus and G\"{u}nther, Sebastian and Dezfuli, Niloofar and M\"{u}hlh\"{a}user, Max}, title = {Personalized User-Carried Single Button Interfaces As Shortcuts for Interacting with Smart Devices}, booktitle = {Extended Abstracts of the 2018 CHI Conference on Human Factors in Computing Systems}, series = {CHI EA '18}, year = {2018}, isbn = {978-1-4503-5621-3}, location = {Montreal QC, Canada}, pages = {LBW602:1--LBW602:6}, articleno = {LBW602}, numpages = {6}, url = {http://doi.acm.org/10.1145/3170427.3188661}, doi = {10.1145/3170427.3188661}, acmid = {3188661}, publisher = {ACM}, address = {New York, NY, USA}, keywords = {human factors, interaction, smart devices}, teaservideo={https://www.youtube.com/watch?v=Z5wicorfmxU} }
@book{mueller2018smartobjectsproceedings, number = {2082}, series = {CEUR Workshop Proceedings}, booktitle = {CEUR Workshop Proceedings (CHI '18 Workshop)}, year = {2018}, title = {Proceedings of the 6th Workshop on Interacting with Smart Objects (SmartObjects)}, issn = {1613-0073}, author = {M{\"u}ller, Florian and Schnelle-Walka, Dirk and G{\"u}nther, Sebastian and Funk, Markus}, address = {Montreal, Canada}, url = {http://tubiblio.ulb.tu-darmstadt.de/107555/} }
@inproceedings{mueller2018smartobjects, author = {M\"{u}ller, Florian and Schnelle-Walka, Dirk and Grosse-Puppendahl, Tobias and G\"{u}nther, Sebastian and Funk, Markus and Luyten, Kris and Brdiczka, Oliver and Dezfuli, Niloofar and M\"{u}hlh\"{a}user, Max}, title = {SmartObjects: Sixth Workshop on Interacting with Smart Objects}, booktitle = {Extended Abstracts of the 2018 CHI Conference on Human Factors in Computing Systems}, series = {CHI EA '18}, year = {2018}, isbn = {978-1-4503-5621-3}, location = {Montreal QC, Canada}, pages = {W20:1--W20:6}, articleno = {W20}, numpages = {6}, url = {http://doi.acm.org/10.1145/3170427.3170606}, doi = {10.1145/3170427.3170606}, acmid = {3170606}, publisher = {ACM}, address = {New York, NY, USA}, keywords = {context-awareness, embodied interaction, enabling techologies, hci, multimodal and adapter interaction, novel interaction, smart objects, tangible interaction}, }
@inproceedings{murauer2018language, author = {Murauer, Nela and M\"{u}ller, Florian and G\"{u}nther, Sebastian and Sch\"{o}n, Dominik and Pflanz, Nerina and Funk, Markus}, title = {An Analysis of Language Impact on Augmented Reality Order Picking Training}, booktitle = {Proceedings of the 11th PErvasive Technologies Related to Assistive Environments Conference}, series = {PETRA '18}, year = {2018}, isbn = {978-1-4503-6390-7}, location = {Corfu, Greece}, pages = {351--357}, numpages = {7}, url = {http://doi.acm.org/10.1145/3197768.3201570}, doi = {10.1145/3197768.3201570}, acmid = {3201570}, publisher = {ACM}, address = {New York, NY, USA}, keywords = {Assistive Systems, Augmented Reality, Order Picking}, }
@inproceedings{schmitz2018offline, author = {Schmitz, Martin and Herbers, Martin and Dezfuli, Niloofar and G\"{u}nther, Sebastian and M\"{u}hlh\"{a}user, Max}, title = {Off-Line Sensing: Memorizing Interactions in Passive 3D-Printed Objects}, booktitle = {Proceedings of the 2018 CHI Conference on Human Factors in Computing Systems}, series = {CHI '18}, year = {2018}, isbn = {978-1-4503-5620-6}, location = {Montreal QC, Canada}, pages = {182:1--182:8}, articleno = {182}, numpages = {8}, url = {http://doi.acm.org/10.1145/3173574.3173756}, doi = {10.1145/3173574.3173756}, acmid = {3173756}, publisher = {ACM}, address = {New York, NY, USA}, keywords = {3d printing, capacitive sensing, digital fabrication, input, mechanism, metamaterial, sensors}, teaservideo={https://www.youtube.com/watch?v=19dDaeBEnPM}, note={Honorable Mention Award} }
@inproceedings{guenther2017byo, author = {G\"{u}nther, Sebastian and Schmitz, Martin and M\"{u}ller, Florian and Riemann, Jan and M\"{u}hlh\"{a}user, Max}, title = {BYO*: Utilizing 3D Printed Tangible Tools for Interaction on Interactive Surfaces}, booktitle = {Proceedings of the 2017 ACM Workshop on Interacting with Smart Objects}, series = {SmartObject '17}, year = {2017}, isbn = {978-1-4503-4902-4}, location = {Limassol, Cyprus}, pages = {21--26}, numpages = {6}, url = {http://doi.acm.org/10.1145/3038450.3038456}, doi = {10.1145/3038450.3038456}, acmid = {3038456}, publisher = {ACM}, address = {New York, NY, USA}, keywords = {3D printing, capacitive sensing, data manipulation, data sharing, data visualization, digital fabrication, input sensing, rapid prototyping}, }
@inproceedings{mueller2017cloudbits, author = {M\"{u}ller, Florian and G\"{u}nther, Sebastian and Nejad, Azita Hosseini and Dezfuli, Niloofar and Khalilbeigi, Mohammadreza and M\"{u}hlh\"{a}user, Max}, title = {Cloudbits: Supporting Conversations Through Augmented Zero-query Search Visualization}, booktitle = {Proceedings of the 5th Symposium on Spatial User Interaction}, series = {SUI '17}, year = {2017}, isbn = {978-1-4503-5486-8}, location = {Brighton, United Kingdom}, pages = {30--38}, numpages = {9}, url = {http://doi.acm.org/10.1145/3131277.3132173}, doi = {10.1145/3131277.3132173}, acmid = {3132173}, publisher = {ACM}, address = {New York, NY, USA}, keywords = {HMD, design, human factors}, }
@inproceedings{meurisch2017, author = {Meurisch, Christian and Naeem, Usman and Scholl, Philipp M. and Azam, Muhammad Awais and G\"{u}nther, Sebastian and Baumann, Paul and R{\'e}hman, Shafiq ur and M\"{u}hlh\"{a}user, Max}, title = {SmartGuidance'17: 2Nd Workshop on Intelligent Personal Support of Human Behavior}, booktitle = {Proceedings of the 2017 ACM International Joint Conference on Pervasive and Ubiquitous Computing and Proceedings of the 2017 ACM International Symposium on Wearable Computers}, series = {UbiComp '17}, year = {2017}, isbn = {978-1-4503-5190-4}, location = {Maui, Hawaii}, pages = {623--626}, numpages = {4}, url = {http://doi.acm.org/10.1145/3123024.3124457}, doi = {10.1145/3123024.3124457}, acmid = {3124457}, publisher = {ACM}, address = {New York, NY, USA}, keywords = {anticipatory mobile computing, mobile sensing, personal assistance, pervasive environment, ubiquitous devices}, }
@inproceedings{riemann2017hybrid, author = {Riemann, Jan and M\"{u}ller, Florian and G\"{u}nther, Sebastian and M\"{u}hlh\"{a}user, Max}, title = {An Evaluation of Hybrid Stacking on Interactive Tabletops}, booktitle = {Proceedings of the 2017 ACM Workshop on Interacting with Smart Objects}, series = {SmartObject '17}, year = {2017}, isbn = {978-1-4503-4902-4}, location = {Limassol, Cyprus}, pages = {13--20}, numpages = {8}, url = {http://doi.acm.org/10.1145/3038450.3038451}, doi = {10.1145/3038450.3038451}, acmid = {3038451}, publisher = {ACM}, address = {New York, NY, USA}, keywords = {hybrid physical-digital interaction, interactive tabletop displays, multitouch, peripheral displays, piling, stacking}, }
@phdthesis{guenther2016proximity, title = {Proximity-based Interaction for Smartwatches}, author = {G\"{u}nther, Sebastian}, booktitle = {Master Thesis}, year = {2016} }
@inproceedings{mueller2016proxiwatch, author = {M\"{u}ller, Florian and G\"{u}nther, Sebastian and Dezfuli, Niloofar and Khalilbeigi, Mohammadreza and M\"{u}hlh\"{a}user, Max}, title = {ProxiWatch: Enhancing Smartwatch Interaction Through Proximity-based Hand Input}, booktitle = {Proceedings of the 2016 CHI Conference Extended Abstracts on Human Factors in Computing Systems}, series = {CHI EA '16}, year = {2016}, isbn = {978-1-4503-4082-3}, location = {San Jose, California, USA}, pages = {2617--2624}, numpages = {8}, url = {http://doi.acm.org/10.1145/2851581.2892450}, doi = {10.1145/2851581.2892450}, acmid = {2892450}, publisher = {ACM}, address = {New York, NY, USA}, keywords = {design, human factors, measurement, smartwatch}, }
@inproceedings{mueller2015proximity, author = {M\"{u}ller, Florian and Khalilbeigi, Mohammadreza and Dezfuli, Niloofar and Sahami Shirazi, Alireza and G\"{u}nther, Sebastian and M\"{u}hlh\"{a}user, Max}, title = {A Study on Proximity-based Hand Input for One-handed Mobile Interaction}, booktitle = {Proceedings of the 3rd ACM Symposium on Spatial User Interaction}, series = {SUI '15}, year = {2015}, isbn = {978-1-4503-3703-8}, location = {Los Angeles, California, USA}, pages = {53--56}, numpages = {4}, url = {http://doi.acm.org/10.1145/2788940.2788955}, doi = {10.1145/2788940.2788955}, acmid = {2788955}, publisher = {ACM}, address = {New York, NY, USA}, keywords = {design, human factors, measurement}, }
@inproceedings{dezfuli2013costream, author = {Dezfuli, Niloofar and G\"{u}nther, Sebastian and Khalilbeigi, Mohammadreza and M\"{u}hlh\"{a}user, Max and Huber, Jochen}, title = {CoStream@Home: Connected Live Event Experiences}, booktitle = {Proceedings of the 2Nd International Workshop on Socially-aware Multimedia}, series = {SAM '13}, year = {2013}, isbn = {978-1-4503-2394-9}, location = {Barcelona, Spain}, pages = {33--36}, numpages = {4}, url = {http://doi.acm.org/10.1145/2509916.2509927}, doi = {10.1145/2509916.2509927}, acmid = {2509927}, publisher = {ACM}, address = {New York, NY, USA}, keywords = {event, experience, experience co-construction, experience sharing, mobile live video sharing, multimedia sharing, user generated content}, video={https://www.youtube.com/watch?v=Ni8DqtHvLNE} }
@phdthesis{guenther2013, title = {Developing a Platform for Co-construction of Shared Experiences in Living Rooms Through Mobile Live User Generated Video Sharing}, author = {G\"{u}nther, Sebastian}, booktitle = {Bachelor Thesis}, year = {2013} }
@InProceedings{kauer2013, author="Kauer, Michaela and G{\"u}nther, Sebastian and Storck, Daniel and Volkamer, Melanie", editor="Marinos, Louis and Askoxylakis, Ioannis", title="A Comparison of American and German Folk Models of Home Computer Security", booktitle="Human Aspects of Information Security, Privacy, and Trust", year="2013", publisher="Springer Berlin Heidelberg", address="Berlin, Heidelberg", pages="100--109", abstract="Although many security solutions exist, home computer systems are vulnerable against different type of attacks. The main reason is that users are either not motivated to use these solutions or not able to correctly use them. In order to make security software more usable and hence computers more secure, we re-ran the study by Wash about ``Folk Models of Home Computer Security'' in Germany. We classified the different mental models in eleven folk models. Eight of the identified folk models are similar to the models Wash presented. We describe each folk model and illustrate how users think about computer security.", isbn="978-3-642-39345-7", doi="10.1007/978-3-642-39345-7_11" }
I'm a regular reviewer for a variety of conferences, journals, and other venues over the last years. I received special recognitions for outstanding reviews at CHI 2020 (2x) and CHI 2021, as well as a highly useful rated review at MobileHCI 2019.
I am always looking for new research collaborations and opportunities in HCI. If you are interested in discussing, brainstorming, tinkering, or just having a chat with me, feel free to contact. <3