@inproceedings{10.1007/978-3-032-05002-1_5, Author = {Aljuneidi, Saja and Heuten, Wilko and Wolters, Maria and Boll, Susanne}, Title = {From Explaining to\ Engaging: The Effect of\ Interactive AI Explanations on\ Citizens’ Fairness and\ Adoption Perceptions}, Year = {2025}, Pages = {87–108}, Month = {}, Publisher = {Springer-Verlag}, Booktitle = {Human-Computer Interaction – INTERACT 2025: 20th IFIP TC 13 International Conference, Belo Horizonte, Brazil, September 8–12, 2025, Proceedings, Part II}, Doi = {10.1007/978-3-032-05002-1_5}, type = {inproceedings}, Abstract = {Integrating Artificial Intelligence (AI) into public administration decision-making requires clear explanations to ensure citizens maintain positive perceptions, particularly when AI systems make decisions that require discretion without human intervention. While existing research focuses on explanation content, the role of explanation interactivity in shaping citizens’ perceptions remains underexplored. This gap is especially critical in high-stakes contexts like child welfare, where there are several plausible legal decisions. Interactive explanations can help citizens better understand, question, and engage directly with the AI systems. This study investigates how interactive explanations influence citizens’ fairness perceptions and willingness to adopt AI systems in the high-stakes child welfare scenario. Through an online vignette survey (N = 562), we compared three levels of explanation interactivity: none (static), moderate (allowing citizens to reorder decision factors), and enhanced (allowing for a Q\&A session). Surprisingly, interactivity does not significantly affect citizens’ perception of fairness or willingness to adopt the AI system. Instead, explanation content and citizens’ prior attitudes toward AI play a more decisive role. This work contributes to understanding how interactive explanations influence public perceptions. It further underscores the role of pre-existing biases, individual differences, and attitudes in shaping how citizens perceive AI-based decisions in high-stakes public administration contexts.} } @COMMENT{Bibtex file generated on }