
Sebastian Günther
Postdoctoral Researcher at TU Darmstadt
Postdoctoral Researcher at TU Darmstadt
Sebastian Günther is a postdoctoral researcher working in the Telecooperation Lab at the Technical University of Darmstadt (TU Darmstadt) of Prof. Max Mühlhäuser. He has a strong interest in Human-Computer interaction research with special focus on haptic systems as well as Virtual- and Augmented-Reality technologies.
@article{Elsayed2023movingvibro, title = {Understanding Stationary and Moving Direct Skin Vibrotactile Stimulation on the Palm}, author = {Elsayed, Hesham and Weigel, Martin and M\"{u}ller, Florian and Ibrahim, George and Gugenheimer, Jan and Schmitz, Martin and G\"{u}nther, Sebastian and M\"{u}hlh\"{a}user, Max}, year = {2023}, publisher = {Association for Computing Machinery}, volume = 1, number = 1, pages = {1--24}, file = {http://arxiv.org/abs/2302.08820}, abstract = {Palm-based tactile displays have the potential to evolve from single motor interfaces (e.g., smartphones) to high-resolution tactile displays (e.g., back-of-device haptic interfaces) enabling richer multi-modal experiences with more information. However, we lack a systematic understanding of vibrotactile perception on the palm and the influence of various factors on the core design decisions of tactile displays (number of actuators, resolution, and intensity). In a first experiment (N=16), we investigated the effect of these factors on the users' ability to localize stationary sensations. In a second experiment (N=20), we explored the influence of resolution on recognition rate for moving tactile sensations.Findings show that for stationary sensations a 9 actuator display offers a good trade-off and a $3\times3$ resolution can be accurately localized. For moving sensations, a $2\times4$ resolution led to the highest recognition accuracy, while $5\times10$ enables higher resolution output with a reasonable accuracy.}, archiveprefix = {arXiv}, arxivid = {2302.08820}, eprint = {2302.08820}, booktitle = {arXiv}, series = {arXiv}, keywords = {design guidelines,haptics,palm-based display,tactile perception,vibrotactile display} }
@inproceedings{Mueller2023tictactoes, title = {TicTacToes: Assessing Toe Movements as an Input Modality}, author = {M\"{u}ller, Florian and Schmitt, Daniel and Matviienko, Andrii and Sch\"{o}n, Dominik and G\"{u}nther, Sebastian and Kosch, Thomas and Schmitz, Martin}, year = {2023}, month = {apr}, booktitle = {Proceedings of the 2023 CHI Conference on Human Factors in Computing Systems}, publisher = {ACM}, address = {New York, NY, USA}, pages = {1--17}, doi = {10.1145/3544548.3580954}, isbn = 9781450394215, url = {https://dl.acm.org/doi/10.1145/3544548.3580954}, series = {CHI '23}, keywords = {augmented reality,foot,toes}, file = {https://arxiv.org/pdf/2303.15811.pdf} }
@inproceedings{Schoen2023TailorTwist, title = {Tailor Twist: Assessing Rotational Mid-Air Interactions for Augmented Reality}, author = {Sch{\"o}n, Dominik and Kosch, Thomas and M\"{u}ller, Florian and Schmitz, Martin and G\"{u}nther, Sebastian and Bommhardt, Lukas and M\"{u}hlh\"{a}user, Max}, year = {2023}, month = {apr}, booktitle = {Proceedings of the 2023 CHI Conference on Human Factors in Computing Systems}, publisher = {ACM}, address = {New York, NY, USA}, volume = 1, number = 1, pages = {1--14}, doi = {10.1145/3544548.3581461}, isbn = 9781450394215, series = {CHI '23}, url = {https://dl.acm.org/doi/10.1145/3544548.3581461}, abstract = {Mid-air gestures, widely used in today's Augmented Reality (AR) applications, are prone to the "gorilla arm"effect, leading to discomfort with prolonged interactions. While prior work has proposed metrics to quantify this effect and means to improve comfort and ergonomics, these works usually only consider simplistic, one-dimensional AR interactions, like reaching for a point or pushing a button. However, interacting with AR environments also involves far more complex tasks, such as rotational knobs, potentially impacting ergonomics. This paper advances the understanding of the ergonomics of rotational mid-air interactions in AR. For this, we contribute the results of a controlled experiment exposing the participants to a rotational task in the interaction space defined by their arms' reach. Based on the results, we discuss how novel future mid-air gesture modalities benefit from our findings concerning ergonomic-aware rotational interaction.}, keywords = {Augmented Reality,Mid-Air Gesture,Rotational Interaction}, file = {https://thomaskosch.com/wp-content/papercite-data/pdf/schoen2023tailor.pdf} }
@phdthesis{Guenther2022somatosensory, author = {G\"{u}nther, Sebastian}, month = {apr}, year = {2022}, school = {Technische Universit{\"a}t Darmstadt}, pages = {xiii, 329 pages}, language = {en}, title = {Somatosensory Interaction: Investigating Mechanoreception, Thermoception, and Proprioception for On-Body Haptic Feedback}, booktitle = {Dissertation}, address = {Darmstadt}, url = {http://tuprints.ulb.tu-darmstadt.de/21617/}, keywords = {haptics, virtual reality, thermoception, proprioception, mechanoreception, somatosensory system}, abstract = {Haptics are an important factor to make virtual worlds and remote interpersonal interaction tangible. While current technological advances, such as Virtual Reality (VR), are reaching the mass market, they are primarily visual while available haptic devices are mostly limited to vibrotactile stimuli, such as smartphone notifications or embedded in game controllers. However, haptic feedback consists of more components that make an experience physically perceivable and lifelike. In addition to the vibrotactile stimulation of fine mechanoreception, these also incorporate stronger forces addressing pressure-based mechanoreception, temperature perceived by thermoception, and body position and movement perceived by proprioception, which are all parts of the somatosensory system. Consequently, to get closer to a full haptic experience, haptics need to be considered in the broader context of the complete somatosensory system. In this thesis, novel haptic concepts will be introduced and implemented in prototypical systems to investigate them in a series of user studies, leading to a better understanding of somatosensory interaction. In this context, this dissertation provides six major contributions: (1) The first contribution presents a systematic investigation of fine and subtle mechanoreception involving vibrotactile stimuli on the hand for guidance and target acquisition. (2) The second contribution investigates more intense and pressure-based mechanoreception that employs pneumatically actuated air cushions to create immediate pressure sensations. (3) The third contribution on mechanoreception combines the findings of the previous two contributions and explores moving touches and stroke stimuli on the body, as well as their roughness perception in VR. (4) The fourth contribution addresses thermoception where the effects of cold and warm temperatures on the body are investigated within a VR environment. (5) The fifth contribution focuses on proprioception and kinesthesia and examines concepts for kinesthetic actuations that can evoke flexion and extension of body joints. (6) In a further contribution, a novel rapid prototyping platform is presented that considers the specific requirements for haptic actuations of the somatosensory system.}, doi = {10.26083/tuprints-00021617}, series = {Dissertation}, file={https://sebastian-guenther.com/publications/dissertation.pdf}, file_compressed={https://sebastian-guenther.com/publications/dissertation_compressed.pdf} }
@inproceedings{Guenther2022magnetischDemo, author = {G\"{u}nther, Sebastian and Nelles, Frank and Horn, Florian and M\"{u}hlh\"{a}user, Max}, title = {Demonstrating MagneTisch: Tangibles in Motion on an Interactive Surface}, year = {2022}, isbn = {9781450396905}, publisher = {Association for Computing Machinery}, address = {New York, NY, USA}, url = {https://doi.org/10.1145/3543758.3547520}, doi = {10.1145/3543758.3547520}, abstract = {In this demonstration, we present MagneTisch, an interactive surface that can move, rotate, and modify 3D printed tangibles. A height-adjustable gripper arm grabs tangibles using magnets to move them to any location on the interactive table surface and also is able to rotate them as a whole or internal parts independently. In addition, the tangibles are externally tracked and a top-down projector can render digital content directly onto the surface and objects. Thereby, our demonstrator will showcase three applications: (1) a tangible virtual whiteboard, (2) a racing application, and (3) a tech demo highlighting all features.}, booktitle = {Proceedings of Mensch Und Computer 2022}, pages = {590–593}, numpages = {4}, keywords = {tabletop, 3D printing, tangibles, interactive surface, magnets}, location = {Darmstadt, Germany}, series = {MuC '22}, file={https://fileserver.tk.informatik.tu-darmstadt.de/Publications/2022/Guenther2022magnetischDemo.pdf} }
@inproceedings{Guenther2022smooth, address = {New York, NY, USA}, author = {G\"{u}nther, Sebastian and Rasch, Julian and Sch\"{o}n, Dominik and M\"{u}ller, Florian and Schmitz, Martin and Riemann, Jan and Matviienko, Andrii and M\"{u}hlh\"{a}user, Max}, booktitle = {Proceedings of the 2022 CHI Conference on Human Factors in Computing Systems (CHI '22)}, doi = {10.1145/3491102.3517454}, isbn = {978-1-4503-9157-3/22/04}, keywords = {haptic,smooth,stimuli,stroke,visual,visualizations}, month = {apr}, publisher = {ACM}, title = {Smooth as Steel Wool: Effects of Visual Stimuli on the Haptic Perception of Roughness in Virtual Reality}, url = {https://dl.acm.org/doi/10.1145/3491102.3517454}, year = {2022}, abstract = {Haptic Feedback is essential for lifelike Virtual Reality (VR) experiences. To provide a wide range of matching sensations of being touched or stroked, current approaches typically need large numbers of different physical textures. However, even advanced devices can only accommodate a limited number of textures to remain wearable. Therefore, a better understanding is necessary of how expectations elicited by different visualizations affect haptic perception, to achieve a balance between physical constraints and great variety of matching physical textures. In this work, we conducted an experiment (N=31) assessing how the perception of roughness is affected within VR. We designed a prototype for arm stroking and compared the effects of different visualizations on the perception of physical textures with distinct roughnesses. Additionally, we used the visualizations' real-world materials, no-haptics and vibrotactile feedback as baselines. As one result, we found that two levels of roughness can be sufficient to convey a realistic illusion.}, series = {CHI '22}, file={https://fileserver.tk.informatik.tu-darmstadt.de/Publications/2022/Guenther2022smooth.pdf}, video = {https://www.youtube.com/watch?v=9q6zZCJ9rLg}, teaservideo = {https://www.youtube.com/watch?v=glEOP48qVCE}, }
@inproceedings{Marky2022trust, title = {Investigating State-of-the-Art Practices for Fostering Subjective Trust in Online Voting through Interviews}, author = {Marky, Karola and Gerber, Paul and G\"{u}nther, Sebastian and M\"{u}hlh\"{a}user, Max}, booktitle = {31st USENIX Security Symposium (USENIX Security 22)}, year = {2022}, isbn = {978-1-939133-31-1}, address = {Boston, MA}, pages = {4059--4076}, url = {https://www.usenix.org/conference/usenixsecurity22/presentation/marky}, publisher = {USENIX Association}, month = {aug}, series = {USENIX '22}, file={https://www.usenix.org/system/files/sec22-marky.pdf} }
@article{Matviienko2022arsightseeing, author = {Matviienko, Andrii and G\"{u}nther, Sebastian and Ritzenhofen, Sebastian and M\"{u}hlh\"{a}user, Max}, title = {AR Sightseeing: Comparing Information Placements at Outdoor Historical Heritage Sites Using Augmented Reality}, year = {2022}, issue_date = {September 2022}, publisher = {Association for Computing Machinery}, address = {New York, NY, USA}, volume = {6}, number = {MHCI}, url = {https://doi.org/10.1145/3546729}, doi = {10.1145/3546729}, abstract = {Augmented Reality (AR) has influenced the presentation of historical information to tourists and museum visitors by making the information more immersive and engaging. Since smartphones and AR glasses are the primary devices to present AR information to users, it is essential to understand how the information about a historical site can be presented effectively and what type of device is best suited for information placements. In this paper, we investigate the placement of two types of content, historical images and informational text, for smartphones and AR glasses in the context of outdoor historical sites. For this, we explore three types of placements: (1) on-body, (2) world, and (3) overlay. To evaluate all nine combinations of text and image placements for smartphone and AR glasses, we conducted a controlled experiment (N = 18) at outdoor historical landmarks. We discovered that on-body image and text placements were the most convenient compared to overlay and world for both devices. Furthermore, participants found themselves more successful in exploring historical sites using a smartphone than AR glasses. Although interaction with a smartphone was more convenient, participants found exploring AR content using AR glasses more fun.}, journal = {Proceedings of the ACM on Human-Computer Interaction, Volume 6, Issue MHCI}, series = {MobileHCI '22}, month = {sep}, articleno = {194}, numpages = {17}, keywords = {sightseeing, historical heritage, information placement, augmented reality}, file={https://fileserver.tk.informatik.tu-darmstadt.de/Publications/2022/Matviienko2022ARsightseeing.pdf}, }
@inproceedings{Matviienko2022bikear, author = {Matviienko, Andrii and Müller, Florian and Schön, Dominik and Seesemann, Paul and Günther, Sebastian and Mühlhäuser, Max}, title = {BikeAR: Understanding Cyclists' Crossing Decision-Making at Uncontrolled Intersections using Augmented Reality}, year = {2022}, booktitle = {Proceedings of the 2022 CHI Conference on Human Factors in Computing Systems (CHI '22)}, publisher = {Association for Computing Machinery}, address = {New York, NY, USA}, url = {https://doi.org/10.1145/3491102.3517560}, doi = {10.1145/3491102.3517560}, keywords = {augmented reality, cyclist safety, crossing decision-making}, location = {New Orleans, LA, USA}, series = {CHI '22}, abstract = {Cycling has become increasingly popular as a means of transportation. However, cyclists remain a highly vulnerable group of road users. According to accident reports, one of the most dangerous situations for cyclists are uncontrolled intersections, where cars approach from both directions. To address this issue and assist cyclists in crossing decision-making at uncontrolled intersections, we designed two visualizations that: (1) highlight occluded cars through an X-ray vision and (2) depict the remaining time the intersection is safe to cross via a Countdown. To investigate the efficiency of these visualizations, we proposed an Augmented Reality simulation as a novel evaluation method, in which the above visualizations are represented as AR, and conducted a controlled experiment with 24 participants indoors. We found that the X-ray ensures a fast selection of shorter gaps between cars, while the Countdown facilitates a feeling of safety and provides a better intersection overview.}, file={https://fileserver.tk.informatik.tu-darmstadt.de/Publications/2022/Matviienko2022bikear.pdf} }
@inproceedings{Schoen2022trackitpipe, author = {Sch\"{o}n, Dominik and Kosch, Thomas and Schmitz, Martin and M\"{u}ller, Florian and G\"{u}nther, Sebastian and Kreutz, Johannes and M\"{u}hlh\"{a}user, Max}, title = {TrackItPipe: A Fabrication Pipeline To Incorporate Location and Rotation Tracking Into 3D Printed Objects}, year = {2022}, isbn = {9781450393218}, publisher = {Association for Computing Machinery}, address = {New York, NY, USA}, url = {https://doi.org/10.1145/3526114.3558719}, doi = {10.1145/3526114.3558719}, abstract = {The increasing convergence of the digital and physical world creates a growing urgency to integrate 3D printed physical tangibles with virtual environments. A precise position and rotation tracking are essential to integrate such physical objects with a virtual environment. However, available 3D models commonly do not provide tracking support on their composition, which requires modifications by CAD experts. This poses a challenge for users with no prior CAD experience. This work presents TrackItPipe, a fabrication pipeline supporting users by semi-automatically adding tracking capabilities for 3D printable tangibles tailored to environmental requirements. TrackItPipe integrates modifications to the 3D model, produces the respective tangibles for 3D printing, and provides integration scripts for Mixed Reality. Using TrackItPipe, users can rapidly equip objects with tracking capabilities.}, booktitle = {The Adjunct Publication of the 35th Annual ACM Symposium on User Interface Software and Technology}, articleno = {30}, numpages = {5}, keywords = {Tracking, 3D Printing, Mixed Reality, Fabrication}, location = {Bend, OR, USA}, series = {UIST '22 Adjunct}, file={https://fileserver.tk.informatik.tu-darmstadt.de/Publications/2022/Schoen2022trackitpipe.pdf} }
@inproceedings{Schmitz2022squeezyfeely, address = {New York, NY, USA}, author = {Schmitz, Martin and G\"{u}nther, Sebastian and Sch\"{o}n, Dominik and M\"{u}ller, Florian}, booktitle = {Proceedings of the 2022 CHI Conference on Human Factors in Computing Systems (CHI '22)}, doi = {10.1145/3491102.3501981}, isbn = {978-1-4503-9157-3/22/04}, keywords = {Input, Pinching, Deformation, Mixed Reality, Thumb-to-fnger, User Studies}, month = {apr}, publisher = {ACM}, title = {Squeezy-Feely: Investigating Lateral Thumb-Index Pinching as an Input Modality}, url = {https://doi.org/10.1145/3491102.3501981}, year = {2022}, abstract = {From zooming on smartphones and mid-air gestures to deformable user interfaces, thumb-index pinching grips are used in many interaction techniques. However, there is still a lack of systematic understanding of how the accuracy and efciency of such grips are afected by various factors such as counterforce, grip span, and grip direction. Therefore, in this paper, we contribute an evaluation (N = 18) of thumb-index pinching performance in a visual targeting task using scales up to 75 items. As part of our fndings, we conclude that the pinching interaction between the thumb and index fnger is a promising modality also for one-dimensional input on higher scales. Furthermore, we discuss and outline implications for future user interfaces that beneft from pinching as an additional and complementary interaction modality.}, series = {CHI '22}, file={https://fileserver.tk.informatik.tu-darmstadt.de/Publications/2022/schmitz2022squeezyfeely.pdf}, teaservideo = {https://www.youtube.com/watch?v=DW23J3CalFw}, award={Best Paper}, note={Best Paper Award} }
@inproceedings{Willich2022VRrescuerobots, author = {Von Willich, Julius and Matviienko, Andrii and G\"{u}nther, Sebastian and M\"{u}hlh\"{a}user, Max}, title = {Comparing VR Exploration Support for Ground-Based Rescue Robots}, year = {2022}, isbn = {9781450393416}, publisher = {Association for Computing Machinery}, address = {New York, NY, USA}, url = {https://doi.org/10.1145/3528575.3551440}, doi = {10.1145/3528575.3551440}, abstract = {Rescue robots have been extensively used in crisis situations for exploring dangerous areas. This exploration is usually facilitated via a remote operation by the rescue team. Although Virtual Reality (VR) was proposed to facilitate remote control due to its high level of immersion and situation awareness, we still lack intuitive and easy-to-use operation modes for search and rescue teams in VR environments. In this work, we propose four operation modes for ground-based rescue robots to utilize an efficient search and rescue: (a) Handle Mode, (b) Lab Mode, (c) Remote Mode, and (d) UI Mode. We evaluated these operation modes in a controlled lab experiment (N = 8) in terms of robot collisions, number of rescued victims, and mental load. Our results indicate that control modes with robot automation (UI and Remote mode) outperform modes with full control given to participants. In particular, we discovered that UI and Remote Mode lead to the lowest number of collisions, driving time, visible victims remaining, rescued victims, and mental load.}, booktitle = {Adjunct Publication of the 24th International Conference on Human-Computer Interaction with Mobile Devices and Services}, articleno = {26}, numpages = {6}, keywords = {virtual reality, rescue robots, interaction techniques, operation concepts}, location = {Vancouver, BC, Canada}, series = {MobileHCI '22}, file={https://fileserver.tk.informatik.tu-darmstadt.de/Publications/2022/Willich2022VRrescuerobots.pdf} }
@inproceedings{Elsayed2021cameraready, abstract = {Computer-supported posture guidance is used in sports, dance training, expression of art with movements, and learning gestures for interaction. At present, the influence of display types and visualizations have not been investigated in the literature. These factors are important as they directly impact perception and cognitive load, and hence influence the performance of participants. In this paper, we conducted a controlled experiment with 20 participants to compare the use of five display types with different screen sizes: smartphones, tablets, desktop monitors, TVs, and large displays. On each device, we compared three common visualizations for posture guidance: skeletons, silhouettes, and 3d body models. To conduct our assessment, we developed a mobile and cross-platform system that only requires a single camera. Our results show that compared to a smartphone display, larger displays show a lower error. Regarding the choice of visualization, participants rated 3D body models as significantly more usable in comparison to a skeleton visualization.}, address = {New York, NY, USA}, author = {Elsayed, Hesham and Hoffmann, Philipp and G\"{u}nther, Sebastian and Schmitz, Martin and Weigel, Martin and M\"{u}hlh\"{a}user, Max and M\"{u}ller, Florian}, booktitle = {Designing Interactive Systems Conference 2021}, doi = {10.1145/3461778.3462026}, isbn = {9781450384766}, month = {jun}, pages = {1046--1055}, publisher = {ACM}, title = {CameraReady: Assessing the Influence of Display Types and Visualizations on Posture Guidance}, url = {https://dl.acm.org/doi/10.1145/3461778.3462026}, year = {2021}, series = {DIS '21} }
@inproceedings{Guenther2021actuboard, author = {G\"{u}nther, Sebastian and M\"{u}ller, Florian and H\"{u}bner, Felix and M\"{u}hlh\"{a}user, Max and Matviienko, Andrii}, title = {ActuBoard: An Open Rapid Prototyping Platform to Integrate Hardware Actuators in Remote Applications}, year = {2021}, isbn = {9781450384490}, publisher = {Association for Computing Machinery}, address = {New York, NY, USA}, url = {https://doi.org/10.1145/3459926.3464757}, doi = {10.1145/3459926.3464757}, abstract = { Prototyping is an essential step in developing tangible experiences and novel devices, ranging from haptic feedback to wearables. However, prototyping of actuated devices nowadays often requires repetitive and time-consuming steps, such as wiring, soldering, and programming basic communication, before HCI researchers and designers can focus on their primary interest: designing interaction. In this paper, we present ActuBoard, a prototyping platform to support 1) quick assembly, 2) less preparation work, and 3) the inclusion of non-tech-savvy users. With ActuBoard, users are not required to create complex circuitry, write a single line of firmware, or implementing communication protocols. Acknowledging existing systems, our platform combines the flexibility of low-level microcontrollers and ease-of-use of abstracted tinker platforms to control actuators from separate applications. As further contribution, we highlight the technical specifications and published the ActuBoard platform as Open Source.}, booktitle = {Companion of the 2021 ACM SIGCHI Symposium on Engineering Interactive Computing Systems}, pages = {70–76}, numpages = {7}, keywords = {hardware, tinkering, actuators, haptics, rapid prototyping, open source, virtual reality}, location = {Virtual Event, Netherlands}, series = {EICS '21} }
@article{elsayed2020vibromap, author = {Elsayed, Hesham and Weigel, Martin and M\"{u}ller, Florian and Schmitz, Martin and Marky, Karola and G\"{u}nther, Sebastian and Riemann, Jan and M\"{u}hlh\"{a}user, Max}, title = {VibroMap: Understanding the Spacing of Vibrotactile Actuators across the Body}, year = {2020}, issue_date = {December 2020}, publisher = {Association for Computing Machinery}, address = {New York, NY, USA}, volume = {4}, number = {4}, url = {https://doi.org/10.1145/3432189}, doi = {10.1145/3432189}, abstract = {In spite of the great potential of on-body vibrotactile displays for a variety of applications, research lacks an understanding of the spacing between vibrotactile actuators. Through two experiments, we systematically investigate vibrotactile perception on the wrist, forearm, upper arm, back, torso, thigh, and leg, each in transverse and longitudinal body orientation. In the first experiment, we address the maximum distance between vibration motors that still preserves the ability to generate phantom sensations. In the second experiment, we investigate the perceptual accuracy of localizing vibrations in order to establish the minimum distance between vibration motors. Based on the results, we derive VibroMap, a spatial map of the functional range of inter-motor distances across the body. VibroMap supports hardware and interaction designers with design guidelines for constructing body-worn vibrotactile displays.}, journal = {Proc. ACM Interact. Mob. Wearable Ubiquitous Technol.}, month = {dec}, articleno = {125}, numpages = {16}, keywords = {haptic output, design implications, wearable computing, vibrotactile interfaces, phantom sensation, actuator spacing, ERM vibration motors} }
@inproceedings{guenther2020therminator, author = {G{\"u}nther, Sebastian and M{\"u}ller, Florian and Sch{\"o}n, Dominik and Elmoghazy, Omar and Schmitz, Martin and M{\"u}hlh{\"a}user, Max}, title = {Therminator: Understanding the Interdependency of Visual and On-Body Thermal Feedback in Virtual Reality}, booktitle = {Proceedings of the 2020 CHI Conference on Human Factors in Computing Systems (to appear)}, series = {CHI '20}, year = {2020}, isbn = {978-1-4503-6708-0}, location = {Honolulu, HI, USA}, url = {http://dx.doi.org/10.1145/3313831.3376195}, video = {https://www.youtube.com/watch?v=q5lkmqAua78}, teaservideo = {https://youtu.be/w9FnG1eoWD8}, doi = {10.1145/3313831.3376195}, publisher = {ACM}, address = {New York, NY, USA}, keywords = {Haptics, Temperature, Thermal Feedback, Virtual Reality}, abstract = {Recent advances have made Virtual Reality (VR) more realistic than ever before. This improved realism is attributed to today's ability to increasingly appeal to human sensations, such as visual, auditory or tactile. While research also examines temperature sensation as an important aspect, the interdependency of visual and thermal perception in VR is still underexplored. In this paper, we propose Therminator, a thermal display concept that provides warm and cold on-body feedback in VR through heat conduction of flowing liquids with different temperatures. Further, we systematically evaluate the interdependency of different visual and thermal stimuli on the temperature perception of arm and abdomen with 25 participants. As part of the results, we found varying temperature perception depending on the stimuli, as well as increasing involvement of users during conditions with matching stimuli.} }
@inproceedings{guenther2020pneumovolley, author = {G{\"u}nther, Sebastian and Sch{\"o}n, Dominik and M{\"u}ller, Florian and M{\"u}hlh{\"a}user, Max and Schmitz, Martin}, title = {PneumoVolley: Pressure-based Haptic Feedback on the Head through Pneumatic Actuation}, booktitle = {Proceedings of the 2020 CHI Conference Extended Abstracts on Human Factors in Computing Systems}, series = {CHI EA '20}, year = {2020}, isbn = {978-1-4503-6708-0}, location = {Honolulu, HI, USA}, url = {http://dx.doi.org/10.1145/3334480.3382916}, file={https://fileserver.tk.informatik.tu-darmstadt.de/Publications/2020/guenther2020pneumovolley.pdf}, video = {https://www.youtube.com/watch?v=ZKnV8HrUx9M}, teaservideo = {https://www.youtube.com/watch?v=-SlrCqF-5m4}, doi = {10.1145/3334480.3382916}, publisher = {ACM}, address = {New York, NY, USA}, keywords = {Haptics, Pressure, Volleyball, Virtual Reality, Blobbyvolley}, abstract = {Haptic Feedback brings immersion and presence in Virtual Reality (VR) to the next level. While research proposes the usage of various tactile sensations, such as vibration or ultrasound approaches, the potential applicability of pressure feedback on the head is still underexplored. In this paper, we contribute concepts and design considerations for pressure-based feedback on the head through pneumatic actuation. As a proof-of-concept implementing our pressure-based haptics, we further present PneumoVolley: a VR experience similar to the classic Volleyball game but played with the head. In an exploratory user study with 9 participants, we evaluated our concepts and identified a significantly increased involvement compared to a no-haptics baseline along with high realism and enjoyment ratings using pressure-based feedback on the head in VR.} }
@InProceedings{koutny2020toolsupport, author={Koutny, Reinhard and G{\"u}nther, Sebastian and Dhingra, Naina and Kunz, Andreas and Miesenberger, Klaus and M{\"u}hlh{\"a}user, Max}, editor={Miesenberger, Klaus and Manduchi, Roberto and Covarrubias Rodriguez, Mario and Pe{\v{n}}{\'a}z, Petr}, title="Accessible Multimodal Tool Support for Brainstorming Meetings", booktitle="Computers Helping People with Special Needs", year="2020", publisher="Springer International Publishing", address="Cham", pages="11--20", abstract="In recent years, assistive technology and digital accessibility for blind and visually impaired people (BVIP) has been significantly improved. Yet, group discussions, especially in a business context, are still challenging as non-verbal communication (NVC) is often depicted on digital whiteboards, including deictic gestures paired with visual artifacts. However, as NVC heavily relies on the visual perception, whichrepresents a large amount of detail, an adaptive approach is required that identifies the most relevant information for BVIP. Additionally, visual artifacts usually rely on spatial properties such as position, orientation, and dimensions to convey essential information such as hierarchy, cohesion, and importance that is often not accessible to the BVIP. In this paper, we investigate the requirements of BVIP during brainstorming sessions and, based on our findings, provide an accessible multimodal tool that uses non-verbal and spatial cues as an additional layer of information. Further, we contribute by presenting a set of input and output modalities that encode and decode information with respect to the individual demands of BVIP and the requirements of different use cases.", isbn="978-3-030-58805-2", doi="10.1007/978-3-030-58805-2_2", url="https://doi.org/10.1007/978-3-030-58805-2_2", series="ICCHP '20" }
@ARTICLE{mueller2020aroundbodyinteraction, author={M{\"u}ller, Florian and G{\"u}nther, Sebastian and M{\"u}hlh{\"a}user, Max}, journal={IEEE Pervasive Computing}, title={Around-Body Interaction: Interacting While on the Go}, year={2020}, volume={19}, number={2}, pages={74-78}, doi = {10.1109/MPRV.2020.2977850}, url = {https://doi.org/10.1109/MPRV.2020.2977850} }
@inproceedings{mueller2020walktheline, author = {M{\"u}ller, Florian and Schmitz, Martin and Schmitt, Daniel and G{\"u}nther, Sebastian and Funk, Markus and M{\"u}hlh{\"a}user, Max}, title = {Walk The Line: Leveraging Lateral Shifts of the Walking Path as an Input Modality for Head-Mounted Displays}, booktitle = {Proceedings of the 2020 CHI Conference on Human Factors in Computing Systems (to appear)}, series = {CHI '20}, year = {2020}, isbn = {978-1-4503-6708-0}, location = {Honolulu, HI, USA}, url = {http://dx.doi.org/10.1145/3313831.3376852}, file={https://fileserver.tk.informatik.tu-darmstadt.de/Publications/2020/mueller2020walktheline.pdf}, video = {https://youtu.be/ylAlzFqWx7g}, teaservideo = {https://youtu.be/6-XrF6J9cTc}, doi = {10.1145/3313831.3376852}, publisher = {ACM}, address = {New York, NY, USA}, keywords = {Augmented Reality, Head-Mounted Display, Input, Walking}, abstract = {Recent technological advances have made head-mounted displays (HMDs) smaller and untethered, fostering the vision of ubiquitous interaction in a digitally augmented physical world. Consequently, a major part of the interaction with such devices will happen on the go, calling for interaction techniques that allow users to interact while walking. In this paper, we explore lateral shifts of the walking path as a hands-free input modality. The available input options are visualized as lanes on the ground parallel to the user's walking path. Users can select options by shifting the walking path sideways to the respective lane. We contribute the results of a controlled experiment with 18 participants, confirming the viability of our approach for fast, accurate, and joyful interactions. Further, based on the findings of the controlled experiment, we present three example applications.} }
@InProceedings{dhingra2020pointing, author={dhingra, Naina and Koutny, Reinhard and G{\"u}nther, Sebastian and Miesenberger, Klaus and M{\"u}hlh{\"a}user, Max and Kunz, Andreas}, editor={Miesenberger, Klaus and Manduchi, Roberto and Covarrubias Rodriguez, Mario and Pe{\v{n}}{\'a}z, Petr}, title="Pointing Gesture Based User Interaction of Tool Supported Brainstorming Meetings", booktitle="Computers Helping People with Special Needs", publisher="Springer International Publishing", address="Cham", pages="21--29", abstract="This paper presents a brainstorming tool combined with pointing gestures to improve the brainstorming meeting experience for blind and visually impaired people (BVIP). In brainstorming meetings, BVIPs are not able to participate in the conversation as well as sighted users because of the unavailability of supporting tools for understanding the explicit and implicit meaning of the non-verbal communication (NVC). Therefore, the proposed system assists BVIP in interpreting pointing gestures which play an important role in non-verbal communication. Our system will help BVIP to access the contents of a Metaplan card, a team member in the brainstorming meeting is referring to by pointing. The prototype of our system shows that targets on the screen a user is pointing at can be detected with 80{\%} accuracy.", isbn="978-3-030-58805-2", doi="10.1007/978-3-030-58805-2_3", url="https://doi.org/10.1007/978-3-030-58805-2_3", series="ICCHP '20", year="2020" }
@inproceedings{funk2019assessing, title={Assessing the Accuracy of Point \& Teleport Locomotion with Orientation Indication for Virtual Reality using Curved Trajectories}, author={Funk, Markus and M{\"u}ller, Florian and Fendrich, Marco and Shene, Megan and Kolvenbach, Moritz and Dobbertin, Niclas and G{\"u}nther, Sebastian and M{\"u}hlh{\"a}user, Max}, booktitle={Proceedings of the 2019 CHI Conference on Human Factors in Computing Systems}, doi={10.1145/3290605.3300377}, year={2019}, teaservideo={https://www.youtube.com/watch?v=klu82WxeBlA}, video={https://www.youtube.com/watch?v=uXctClcQu_g} }
@inproceedings{guenther2019mapvi, title = {MAPVI: Meeting Accessibility for Persons with Visual Impairments}, author = {G{\"u}nther, Sebastian and Koutny, Reinhard and Dhingra, Naina and Funk, Markus and Hirt, Christian and Miesenberger, Klaus and M{\"u}hlh{\"a}user, Max and Kunz, Andreas}, doi = {10.1145/3316782.3322747}, booktitle = {Proceedings of the 12th PErvasive Technologies Related to Assistive Environments Conference}, year = {2019}, series = {PETRA '19}, acmid = {3322747}, publisher = {ACM}, address = {New York, NY, USA}, }
@inproceedings{guenther2019pneumact, title = {PneumAct: Pneumatic Kinesthetic Actuation of Body Joints in Virtual Reality Environments}, author = {G{\"u}nther, Sebastian and Makhija, Mohit and M{\"u}ller, Florian and Sch{\"o}n, Dominik and M{\"u}hlh{\"a}user, Max and Funk, Markus}, doi = {10.1145/3322276.3322302}, booktitle = {Proceedings of the ACM Conference on Designing Interactive Systems, DIS '19}, keywords = {Compressed Air,Force Feedback,Kinesthetic,Pneumatic,haptics,virtual Reality}, year = {2019}, series = {DIS '19}, teaservideo = {https://youtu.be/4lRWxzs4Rgs}, abstract={Virtual Reality Environments (VRE) create an immersive user experience through visual, aural, and haptic sensations. However, the latter is often limited to vibrotactile sensations that are not able to actively provide kinesthetic motion actuation. Further, such sensations do not cover natural representations of physical forces, for example, when lifting a weight. We present PneumAct, a jacket to enable pneumatically actuated kinesthetic movements of arm joints in VRE. It integrates two types of actuators inflated through compressed air: a Contraction Actuator and an Extension Actuator. We evaluate our PneumAct jacket through two user studies with a total of 32 participants: First, we perform a technical evaluation measuring the contraction and extension angles of different inflation patterns and inflation durations. Second, we evaluate PneumAct in three VRE scenarios comparing our system to traditional controller-based vibrotactile and a baseline without haptic feedback.} }
@inproceedings{mueller2019mind, title={Mind the Tap: Assessing Foot-Taps for Interacting with Head-Mounted Displays}, author={M{\"u}ller, Florian and McManus, Joshua and G{\"u}nther, Sebastian and Schmitz, Martin and M{\"u}hlh{\"a}user, Max and Funk, Markus}, booktitle={Proceedings of the 2019 CHI Conference on Human Factors in Computing Systems}, doi={10.1145/3290605.3300707}, year={2019}, teaservideo={https://www.youtube.com/watch?v=RhabMsP0X14}, video={https://www.youtube.com/watch?v=D5hTVIEb7iA&t}, note={Honorable Mention Award} }
@inproceedings{mueller2019smartobjects, title={Proceedings of the 7th Workshop on Interacting with Smart Objects}, author={M{\"u}ller, Florian and Schnelle-Walka, Dirk and G{\"u}nther, Sebastian and Karola, Marky and Funk, Markus and M{\"u}hlh{\"a}user, Max}, year={2019}, booktitle = {Workshop Co-located with the 11th ACM SIGCHI Symposium on Engineering Interactive Computing Systems (EICS ’19)} }
@inproceedings{marky2019teachyverse, author = {Marky, Karola and M\"{u}ller, Florian and Funk, Markus and Geiss, Alex and G\"{u}nther, Sebastian and Schmitz, Martin and Riemann, Jan and M\"{u}hlh\"{a}user, Max}, title = {Teachyverse: Collaborative E-Learning in Virtual Reality Lecture Halls}, booktitle = {Proceedings of Mensch Und Computer 2019}, series = {MuC'19}, year = {2019}, isbn = {978-1-4503-7198-8}, location = {Hamburg, Germany}, pages = {831--834}, numpages = {4}, url = {http://doi.acm.org/10.1145/3340764.3344917}, doi = {10.1145/3340764.3344917}, acmid = {3344917}, publisher = {ACM}, address = {New York, NY, USA}, keywords = {E-Learning, Lecture halls, Virtual Lecture, Virtual Reality}, }
@inproceedings{marky2019music, booktitle = {Proceedings of the 7th Workshop on Interacting with Smart Objects (SmartObjects '19) in conjunction with EICS '19}, year = {2019}, author = {Marky, Karola and Weiss, Andreas and Gedeon, Julien and G{\"u}nther, Sebastian}, title = {Mastering Music Instruments through Technology in Solo Learning Sessions}, url = {http://tubiblio.ulb.tu-darmstadt.de/113376/} }
@inproceedings{willich2019vrchairracer, title={VRChairRacer: Using an Office Chair Backrest as a Locomotion Technique for VR Racing Games}, author={von Willich, Julius and Sch{\"o}n, Dominik and G{\"u}nther, Sebastian and M{\"u}ller, Florian and M{\"u}hlh{\"a}user, Max and Funk, Markus}, booktitle = {Proceedings of the 2019 CHI Conference Extended Abstracts on Human Factors in Computing Systems}, series = {CHI EA '19}, doi={10.1145/3290607.3313254}, year={2019}, teaservideo={https://www.youtube.com/watch?v=8ukVghWoTlE}, video={https://www.youtube.com/watch?v=v906aGntoKY} }
@inproceedings{guenther2018assistive, author = {G\"{u}nther, Sebastian and Kratz, Sven and Avrahami, Daniel and M\"{u}hlh\"{a}user, Max}, title = {Exploring Audio, Visual, and Tactile Cues for Synchronous Remote Assistance}, booktitle = {Proceedings of the 11th PErvasive Technologies Related to Assistive Environments Conference}, series = {PETRA '18}, year = {2018}, isbn = {978-1-4503-6390-7}, location = {Corfu, Greece}, pages = {339--344}, numpages = {6}, url = {http://doi.acm.org/10.1145/3197768.3201568}, doi = {10.1145/3197768.3201568}, acmid = {3201568}, publisher = {ACM}, address = {New York, NY, USA}, keywords = {3D-Space, Assistive Technology, Audio Cues, Augmented Reality, Haptics, Navigation, Remote Collaboration, Spatial Guidance, Vibrotactile Feedback}, }
@inproceedings{guenther2018tactileglove, author = {G\"{u}nther, Sebastian and M\"{u}ller, Florian and Funk, Markus and Kirchner, Jan and Dezfuli, Niloofar and M\"{u}hlh\"{a}user, Max}, title = {TactileGlove: Assistive Spatial Guidance in 3D Space Through Vibrotactile Navigation}, booktitle = {Proceedings of the 11th PErvasive Technologies Related to Assistive Environments Conference}, series = {PETRA '18}, year = {2018}, isbn = {978-1-4503-6390-7}, location = {Corfu, Greece}, pages = {273--280}, numpages = {8}, url = {http://doi.acm.org/10.1145/3197768.3197785}, doi = {10.1145/3197768.3197785}, acmid = {3197785}, publisher = {ACM}, address = {New York, NY, USA}, keywords = {3D-Space, Assistive Technology, Haptics, Navigation, Pull Push Metaphors, Spatial Guidance, Vibrotactile}, }
@inproceedings{guenther2018checkmate, author = {G\"{u}nther, Sebastian and M\"{u}ller, Florian and Schmitz, Martin and Riemann, Jan and Dezfuli, Niloofar and Funk, Markus and Sch\"{o}n, Dominik and M\"{u}hlh\"{a}user, Max}, title = {CheckMate: Exploring a Tangible Augmented Reality Interface for Remote Interaction}, booktitle = {Extended Abstracts of the 2018 CHI Conference on Human Factors in Computing Systems}, series = {CHI EA '18}, year = {2018}, isbn = {978-1-4503-5621-3}, location = {Montreal QC, Canada}, pages = {LBW570:1--LBW570:6}, articleno = {LBW570}, numpages = {6}, url = {http://doi.acm.org/10.1145/3170427.3188647}, doi = {10.1145/3170427.3188647}, acmid = {3188647}, publisher = {ACM}, address = {New York, NY, USA}, keywords = {3d fabrication, augmented reality, chess, mixed reality, remote collaboration, tabletops, tangibles}, teaservideo={https://www.youtube.com/watch?v=Geyr95Nl8mc} }
@inproceedings{mueller2018pucsbi, author = {M\"{u}ller, Florian and Schmitz, Martin and Funk, Markus and G\"{u}nther, Sebastian and Dezfuli, Niloofar and M\"{u}hlh\"{a}user, Max}, title = {Personalized User-Carried Single Button Interfaces As Shortcuts for Interacting with Smart Devices}, booktitle = {Extended Abstracts of the 2018 CHI Conference on Human Factors in Computing Systems}, series = {CHI EA '18}, year = {2018}, isbn = {978-1-4503-5621-3}, location = {Montreal QC, Canada}, pages = {LBW602:1--LBW602:6}, articleno = {LBW602}, numpages = {6}, url = {http://doi.acm.org/10.1145/3170427.3188661}, doi = {10.1145/3170427.3188661}, acmid = {3188661}, publisher = {ACM}, address = {New York, NY, USA}, keywords = {human factors, interaction, smart devices}, teaservideo={https://www.youtube.com/watch?v=Z5wicorfmxU} }
@book{mueller2018smartobjectsproceedings, number = {2082}, series = {CEUR Workshop Proceedings}, booktitle = {CEUR Workshop Proceedings (CHI '18 Workshop)}, year = {2018}, title = {Proceedings of the 6th Workshop on Interacting with Smart Objects (SmartObjects)}, issn = {1613-0073}, author = {M{\"u}ller, Florian and Schnelle-Walka, Dirk and G{\"u}nther, Sebastian and Funk, Markus}, address = {Montreal, Canada}, url = {http://tubiblio.ulb.tu-darmstadt.de/107555/} }
@inproceedings{mueller2018smartobjects, author = {M\"{u}ller, Florian and Schnelle-Walka, Dirk and Grosse-Puppendahl, Tobias and G\"{u}nther, Sebastian and Funk, Markus and Luyten, Kris and Brdiczka, Oliver and Dezfuli, Niloofar and M\"{u}hlh\"{a}user, Max}, title = {SmartObjects: Sixth Workshop on Interacting with Smart Objects}, booktitle = {Extended Abstracts of the 2018 CHI Conference on Human Factors in Computing Systems}, series = {CHI EA '18}, year = {2018}, isbn = {978-1-4503-5621-3}, location = {Montreal QC, Canada}, pages = {W20:1--W20:6}, articleno = {W20}, numpages = {6}, url = {http://doi.acm.org/10.1145/3170427.3170606}, doi = {10.1145/3170427.3170606}, acmid = {3170606}, publisher = {ACM}, address = {New York, NY, USA}, keywords = {context-awareness, embodied interaction, enabling techologies, hci, multimodal and adapter interaction, novel interaction, smart objects, tangible interaction}, }
@inproceedings{murauer2018language, author = {Murauer, Nela and M\"{u}ller, Florian and G\"{u}nther, Sebastian and Sch\"{o}n, Dominik and Pflanz, Nerina and Funk, Markus}, title = {An Analysis of Language Impact on Augmented Reality Order Picking Training}, booktitle = {Proceedings of the 11th PErvasive Technologies Related to Assistive Environments Conference}, series = {PETRA '18}, year = {2018}, isbn = {978-1-4503-6390-7}, location = {Corfu, Greece}, pages = {351--357}, numpages = {7}, url = {http://doi.acm.org/10.1145/3197768.3201570}, doi = {10.1145/3197768.3201570}, acmid = {3201570}, publisher = {ACM}, address = {New York, NY, USA}, keywords = {Assistive Systems, Augmented Reality, Order Picking}, }
@inproceedings{schmitz2018offline, author = {Schmitz, Martin and Herbers, Martin and Dezfuli, Niloofar and G\"{u}nther, Sebastian and M\"{u}hlh\"{a}user, Max}, title = {Off-Line Sensing: Memorizing Interactions in Passive 3D-Printed Objects}, booktitle = {Proceedings of the 2018 CHI Conference on Human Factors in Computing Systems}, series = {CHI '18}, year = {2018}, isbn = {978-1-4503-5620-6}, location = {Montreal QC, Canada}, pages = {182:1--182:8}, articleno = {182}, numpages = {8}, url = {http://doi.acm.org/10.1145/3173574.3173756}, doi = {10.1145/3173574.3173756}, acmid = {3173756}, publisher = {ACM}, address = {New York, NY, USA}, keywords = {3d printing, capacitive sensing, digital fabrication, input, mechanism, metamaterial, sensors}, teaservideo={https://www.youtube.com/watch?v=19dDaeBEnPM}, note={Honorable Mention Award} }
@inproceedings{guenther2017byo, author = {G\"{u}nther, Sebastian and Schmitz, Martin and M\"{u}ller, Florian and Riemann, Jan and M\"{u}hlh\"{a}user, Max}, title = {BYO*: Utilizing 3D Printed Tangible Tools for Interaction on Interactive Surfaces}, booktitle = {Proceedings of the 2017 ACM Workshop on Interacting with Smart Objects}, series = {SmartObject '17}, year = {2017}, isbn = {978-1-4503-4902-4}, location = {Limassol, Cyprus}, pages = {21--26}, numpages = {6}, url = {http://doi.acm.org/10.1145/3038450.3038456}, doi = {10.1145/3038450.3038456}, acmid = {3038456}, publisher = {ACM}, address = {New York, NY, USA}, keywords = {3D printing, capacitive sensing, data manipulation, data sharing, data visualization, digital fabrication, input sensing, rapid prototyping}, }
@inproceedings{mueller2017cloudbits, author = {M\"{u}ller, Florian and G\"{u}nther, Sebastian and Nejad, Azita Hosseini and Dezfuli, Niloofar and Khalilbeigi, Mohammadreza and M\"{u}hlh\"{a}user, Max}, title = {Cloudbits: Supporting Conversations Through Augmented Zero-query Search Visualization}, booktitle = {Proceedings of the 5th Symposium on Spatial User Interaction}, series = {SUI '17}, year = {2017}, isbn = {978-1-4503-5486-8}, location = {Brighton, United Kingdom}, pages = {30--38}, numpages = {9}, url = {http://doi.acm.org/10.1145/3131277.3132173}, doi = {10.1145/3131277.3132173}, acmid = {3132173}, publisher = {ACM}, address = {New York, NY, USA}, keywords = {HMD, design, human factors}, }
@inproceedings{meurisch2017, author = {Meurisch, Christian and Naeem, Usman and Scholl, Philipp M. and Azam, Muhammad Awais and G\"{u}nther, Sebastian and Baumann, Paul and R{\'e}hman, Shafiq ur and M\"{u}hlh\"{a}user, Max}, title = {SmartGuidance'17: 2Nd Workshop on Intelligent Personal Support of Human Behavior}, booktitle = {Proceedings of the 2017 ACM International Joint Conference on Pervasive and Ubiquitous Computing and Proceedings of the 2017 ACM International Symposium on Wearable Computers}, series = {UbiComp '17}, year = {2017}, isbn = {978-1-4503-5190-4}, location = {Maui, Hawaii}, pages = {623--626}, numpages = {4}, url = {http://doi.acm.org/10.1145/3123024.3124457}, doi = {10.1145/3123024.3124457}, acmid = {3124457}, publisher = {ACM}, address = {New York, NY, USA}, keywords = {anticipatory mobile computing, mobile sensing, personal assistance, pervasive environment, ubiquitous devices}, }
@inproceedings{riemann2017hybrid, author = {Riemann, Jan and M\"{u}ller, Florian and G\"{u}nther, Sebastian and M\"{u}hlh\"{a}user, Max}, title = {An Evaluation of Hybrid Stacking on Interactive Tabletops}, booktitle = {Proceedings of the 2017 ACM Workshop on Interacting with Smart Objects}, series = {SmartObject '17}, year = {2017}, isbn = {978-1-4503-4902-4}, location = {Limassol, Cyprus}, pages = {13--20}, numpages = {8}, url = {http://doi.acm.org/10.1145/3038450.3038451}, doi = {10.1145/3038450.3038451}, acmid = {3038451}, publisher = {ACM}, address = {New York, NY, USA}, keywords = {hybrid physical-digital interaction, interactive tabletop displays, multitouch, peripheral displays, piling, stacking}, }
@phdthesis{guenther2016proximity, title = {Proximity-based Interaction for Smartwatches}, author = {G\"{u}nther, Sebastian}, booktitle = {Master Thesis}, year = {2016} }
@inproceedings{mueller2016proxiwatch, author = {M\"{u}ller, Florian and G\"{u}nther, Sebastian and Dezfuli, Niloofar and Khalilbeigi, Mohammadreza and M\"{u}hlh\"{a}user, Max}, title = {ProxiWatch: Enhancing Smartwatch Interaction Through Proximity-based Hand Input}, booktitle = {Proceedings of the 2016 CHI Conference Extended Abstracts on Human Factors in Computing Systems}, series = {CHI EA '16}, year = {2016}, isbn = {978-1-4503-4082-3}, location = {San Jose, California, USA}, pages = {2617--2624}, numpages = {8}, url = {http://doi.acm.org/10.1145/2851581.2892450}, doi = {10.1145/2851581.2892450}, acmid = {2892450}, publisher = {ACM}, address = {New York, NY, USA}, keywords = {design, human factors, measurement, smartwatch}, }
@inproceedings{mueller2015proximity, author = {M\"{u}ller, Florian and Khalilbeigi, Mohammadreza and Dezfuli, Niloofar and Sahami Shirazi, Alireza and G\"{u}nther, Sebastian and M\"{u}hlh\"{a}user, Max}, title = {A Study on Proximity-based Hand Input for One-handed Mobile Interaction}, booktitle = {Proceedings of the 3rd ACM Symposium on Spatial User Interaction}, series = {SUI '15}, year = {2015}, isbn = {978-1-4503-3703-8}, location = {Los Angeles, California, USA}, pages = {53--56}, numpages = {4}, url = {http://doi.acm.org/10.1145/2788940.2788955}, doi = {10.1145/2788940.2788955}, acmid = {2788955}, publisher = {ACM}, address = {New York, NY, USA}, keywords = {design, human factors, measurement}, }
@inproceedings{dezfuli2013costream, author = {Dezfuli, Niloofar and G\"{u}nther, Sebastian and Khalilbeigi, Mohammadreza and M\"{u}hlh\"{a}user, Max and Huber, Jochen}, title = {CoStream@Home: Connected Live Event Experiences}, booktitle = {Proceedings of the 2Nd International Workshop on Socially-aware Multimedia}, series = {SAM '13}, year = {2013}, isbn = {978-1-4503-2394-9}, location = {Barcelona, Spain}, pages = {33--36}, numpages = {4}, url = {http://doi.acm.org/10.1145/2509916.2509927}, doi = {10.1145/2509916.2509927}, acmid = {2509927}, publisher = {ACM}, address = {New York, NY, USA}, keywords = {event, experience, experience co-construction, experience sharing, mobile live video sharing, multimedia sharing, user generated content}, video={https://www.youtube.com/watch?v=Ni8DqtHvLNE} }
@phdthesis{guenther2013, title = {Developing a Platform for Co-construction of Shared Experiences in Living Rooms Through Mobile Live User Generated Video Sharing}, author = {G\"{u}nther, Sebastian}, booktitle = {Bachelor Thesis}, year = {2013} }
@InProceedings{kauer2013, author="Kauer, Michaela and G{\"u}nther, Sebastian and Storck, Daniel and Volkamer, Melanie", editor="Marinos, Louis and Askoxylakis, Ioannis", title="A Comparison of American and German Folk Models of Home Computer Security", booktitle="Human Aspects of Information Security, Privacy, and Trust", year="2013", publisher="Springer Berlin Heidelberg", address="Berlin, Heidelberg", pages="100--109", abstract="Although many security solutions exist, home computer systems are vulnerable against different type of attacks. The main reason is that users are either not motivated to use these solutions or not able to correctly use them. In order to make security software more usable and hence computers more secure, we re-ran the study by Wash about ``Folk Models of Home Computer Security'' in Germany. We classified the different mental models in eleven folk models. Eight of the identified folk models are similar to the models Wash presented. We describe each folk model and illustrate how users think about computer security.", isbn="978-3-642-39345-7", doi="10.1007/978-3-642-39345-7_11" }
I'm a regular reviewer for a variety of conferences, journals, and other venues over the last years. I received special recognitions / highly useful ratings for outstanding reviews at CHI 2020 (2x), CHI 2021, CHI 2023, MobileHCI 2019, and UIST 2023.
I am always looking for new research collaborations and opportunities in HCI. If you are interested in discussing, brainstorming, tinkering, or just having a chat with me, feel free to contact. <3