We are happy to present you this year’s contributions from the German HCI labs to CHI 2021! Feel free to browse our list of publications: There are 53 full papers (including one Best Paper and five Honorable Mentions), 16 LBWs, and many demonstrations, workshops, or other interesting publications. Please send us a mail if you feel that your paper is missing or to correct any entries.
From Detectables to Inspectables: Understanding Qualitative Analysis of Audiovisual Data
Krishna Subramanian (RWTH Aachen University), Johannes Maas (RWTH Aachen University), Jan Borchers (RWTH Aachen University), and Jim Hollan (University of California, San Diego)
Abstract | Tags: Full Paper, Honorable Mention | Links:
@inproceedings{Subramanian2021detectables,
title = {From Detectables to Inspectables: Understanding Qualitative Analysis of Audiovisual Data},
author = {Krishna Subramanian (RWTH Aachen University) and Johannes Maas (RWTH Aachen University) and Jan Borchers (RWTH Aachen University) and and Jim Hollan (University of California, San Diego)},
url = {https://hci.rwth-aachen.de (lab page)
https://www.youtube.com/watch?v=YJw2gMJ8cSY (teaser)},
doi = {10.1145/3411764.3445458},
year = {2021},
date = {2021-05-07},
publisher = {ACM},
abstract = {Audiovisual recordings of user studies and interviews provide important data in qualitative HCI research. Even when a textual transcription is available, researchers frequently turn to these recordings due to their rich information content. However, the temporal, unstructured nature of audiovisual recordings makes them less efficient to work with than text. Through interviews and a survey, we explored how HCI researchers work with audiovisual recordings. We investigated researchers' transcription and annotation practice, their overall analysis workflow, and the prevalence of direct analysis of audiovisual recordings. We found that a key task was locating and analyzing inspectables, interesting segments in recordings. Since locating inspectables can be time consuming, participants look for detectables, visual or auditory cues that indicate the presence of an inspectable. Based on our findings, we discuss the potential for automation in locating detectables in qualitative audiovisual analysis.},
keywords = {Full Paper, Honorable Mention},
pubstate = {published},
tppubtype = {inproceedings}
}

It Takes More Than One Hand to Clap: On the Role of ‘Care’ in Maintaining Design Results
Max Krüger (Universität Siegen), Anne Weibert (Universität Siegen), Débora de Castro Leal (Universität Siegen), Dave Randall (Universität Siegen), Volker Wulf (Universität Siegen)
Abstract | Tags: Full Paper, Honorable Mention | Links:
@inproceedings{Krueger2021CareRole,
title = {It Takes More Than One Hand to Clap: On the Role of ‘Care’ in Maintaining Design Results},
author = {Max Krüger (Universität Siegen) and Anne Weibert (Universität Siegen) and Débora de Castro Leal (Universität Siegen) and Dave Randall (Universität Siegen) and Volker Wulf (Universität Siegen)},
url = {https://www.wineme.uni-siegen.de/ (lab page)},
doi = {10.1145/3411764.3445389},
year = {2021},
date = {2021-05-07},
abstract = {Within Participatory- and Co-Design projects, the issue of sustainability and maintenance of the co-designed artefacts is a crucial yet largely unresolved issue. In this paper, we look back on four years of work on co-designing tools that assist refugees and migrants in their efforts to settle in Germany, the last of which the project has been independently maintained by our community collaborators. We reflect on the role of pre-existing care practices amongst our community collaborators, and a continued openness throughout the project, that allowed a complex constellation of actors to be involved in its ongoing maintenance and our own, often mundane activities which have contributed to the sustainability of the results. Situating our account within an HCI for Social Justice agenda, we thereby contribute to an ongoing discussion about the sustainability of such activities.},
keywords = {Full Paper, Honorable Mention},
pubstate = {published},
tppubtype = {inproceedings}
}
Itsy-Bits: Fabrication and Recognition of 3D-Printed Tangibles with Small Footprints on Capacitive Touchscreens
Martin Schmitz (Technical University of Darmstadt), Florian Müller (Technical University of Darmstadt), Max Mühlhäuser (Technical University of Darmstadt), Jan Riemann (Technical University of Darmstadt), Huy Viet Le (University of Stuttgart)
Abstract | Tags: Full Paper, Honorable Mention | Links:
@inproceedings{2021SchmitzItsy,
title = {Itsy-Bits: Fabrication and Recognition of 3D-Printed Tangibles with Small Footprints on Capacitive Touchscreens},
author = {Martin Schmitz (Technical University of Darmstadt) and Florian Müller (Technical University of Darmstadt) and Max Mühlhäuser (Technical University of Darmstadt) and Jan Riemann (Technical University of Darmstadt) and Huy Viet Le (University of Stuttgart)},
url = {https://www.teamdarmstadt.de/, Website - Team Darmstadt
https://www.facebook.com/teamdarmstadt/, Facebook - Team Darmstadt
https://youtu.be/55vHxnOKl6k, YouTube - Teaser},
doi = {10.1145/3411764.3445502},
year = {2021},
date = {2021-05-01},
abstract = {Tangibles on capacitive touchscreens are a promising approach to overcome the limited expressiveness of touch input. While research has suggested many approaches to detect tangibles, the corresponding tangibles are either costly or have a considerable minimal size. This makes them bulky and unattractive for many applications. At the same time, they obscure valuable display space for interaction. To address these shortcomings, we contribute Itsy-Bits: a fabrication pipeline for 3D printing and recognition of tangibles on capacitive touchscreens with a footprint as small as a fingertip. Each Itsy-Bit consists of an enclosing 3D object and a unique conductive 2D shape on its bottom. Using only raw data of commodity capacitive touchscreens, Itsy-Bits reliably identifies and locates a variety of shapes in different sizes and estimates their orientation. Through example applications and a technical evaluation, we demonstrate the feasibility and applicability of Itsy-Bits for tangibles with small footprints.},
keywords = {Full Paper, Honorable Mention},
pubstate = {published},
tppubtype = {inproceedings}
}

The Development and Validation of the Technology-Supported Reflection Inventory
Marit Bentvelzen (Utrecht University), Jasmin Niess (University of Bremen), Mikołaj P. Woźniak (Lodz University of Technology), Paweł W. Woźniak (Utrecht University)
Abstract | Tags: Full Paper, Honorable Mention | Links:
@inproceedings{Bentvelzen2021,
title = {The Development and Validation of the Technology-Supported Reflection Inventory},
author = {Marit Bentvelzen (Utrecht University) and Jasmin Niess (University of Bremen) and Mikołaj P. Woźniak (Lodz University of Technology) and Paweł W. Woźniak (Utrecht University)},
url = {https://twitter.com/HCIBremen
https://hci.uni-bremen.de/ (lab page)
https://www.youtube.com/watch?v=ZuJRehDJf-4 (teaser)},
doi = {10.1145/3411764.3445673},
year = {2021},
date = {2021-05-07},
publisher = {ACM},
abstract = {Reflection is an often addressed design goal in Human-Computer Interaction (HCI) research. An increasing number of artefacts for reflection have been developed in recent years. However, evaluating if and how an interactive technology helps a user reflect is still complex. This makes it difficult to compare artefacts (or prototypes) for reflection, impeding future design efforts. To address this issue, we developed the Technology-Supported Reflection Inventory (TSRI), which is a scale that evaluates how effectively a system supports reflection. We first created a list of possible scale items based on past work in defining reflection. The items were then reviewed by experts. Next, we performed exploratory factor analysis to reduce the scale to its final length of nine items. Subsequently, we confirmed test-retest validity of our instrument, as well as its construct validity. The TSRI enables researchers and practitioners to compare prototypes designed to support reflection. },
keywords = {Full Paper, Honorable Mention},
pubstate = {published},
tppubtype = {inproceedings}
}
The Impact of Multiple Parallel Phrase Suggestions on Email Input and Composition Behaviour of Native and Non-Native English Writers
Daniel Buschek (University of Bayreuth), Martin Zürn (LMU Munich), Malin Eiband (LMU Munich)
Abstract | Tags: Full Paper, Honorable Mention | Links:
@inproceedings{2021BuschekImpact,
title = {The Impact of Multiple Parallel Phrase Suggestions on Email Input and Composition Behaviour of Native and Non-Native English Writers},
author = {Daniel Buschek (University of Bayreuth) and Martin Zürn (LMU Munich) and Malin Eiband (LMU Munich)},
url = {https://www.hciai.uni-bayreuth.de/, Website - HCI+AI University of Bayreuth
https://twitter.com/DBuschek, Twitter - Daniel Buschek
https://www.youtube.com/watch?v=i-McZMd93fk, YouTube - Teaser Video
https://www.youtube.com/watch?v=NyDy3FtWpo0, YouTube - Video Figure
https://uxdesign.cc/how-can-ai-help-you-write-emails-eedd901bcdf2, Blogpost},
doi = {10.1145/3411764.3445372},
year = {2021},
date = {2021-05-01},
abstract = {We present an in-depth analysis of the impact of multi-word suggestion choices from a neural language model on user behaviour regarding input and text composition in email writing. Our study for the first time compares different numbers of parallel suggestions, and use by native and non-native English writers, to explore a trade-off of "efficiency vs ideation", emerging from recent literature. We built a text editor prototype with a neural language model (GPT-2), refined in a prestudy with 30 people. In an online study (N=156), people composed emails in four conditions (0/1/3/6 parallel suggestions). Our results reveal (1) benefits for ideation, and costs for efficiency, when suggesting multiple phrases; (2) that non-native speakers benefit more from more suggestions; and (3) further insights into behaviour patterns. We discuss implications for research, the design of interactive suggestion systems, and the vision of supporting writers with AI instead of replacing them.},
keywords = {Full Paper, Honorable Mention},
pubstate = {published},
tppubtype = {inproceedings}
}
