@inproceedings{aba2f2c2bede42aab3a2bc8bdc413e2a,
title = "Evaluating Real-Time Emotional Responses Using Bullet Screen Sentiment Analysis: Evidence from Electrodermal Activity",
abstract = "Bullet screens are attracting increasing attention as a way to express emotions and interact on short video platforms. Prior studies have used natural language processing (NLP) to analyze bullet screen sentiment in order to evaluate public opinion trends regarding a specific topic, movie, or product. However, few studies have investigated the effectiveness of using bullet screen sentiment analysis to predict real-time emotional responses. Thus, this study examined whether and to what extent bullet screen sentiment analysis can be used to evaluate and predict real-time emotional responses to videos by employing physiological electrodermal activity (EDA) measurements. A behavioral experiment was conducted in which eight college students wore a set of wireless galvanic skin sensors while watching three music videos (MVs) in random or-der. The participants{\textquoteright} EDA data, including skin conductance responses and peak amplitudes, were then analyzed. Meanwhile, the sentiments expressed in the bullet screen comments on the three MVs were analyzed using three dictionary-based sentiment analysis algorithms: SnowNLP, BosonNLP, and Hel-loNLP. The bullet screen sentiment analysis and physiological measurement results were then compared using descriptive and correlation analyses. The bullet screen sentiment parameters were found to significantly correlate with the EDA measurements. This study confirms the effectiveness of using bullet screen sentiment analysis to predict participants{\textquoteright} real-time emotional responses, providing a convenient and flexible way for enterprises and governments to detect public opinion trends and take action accordingly.",
keywords = "Bullet Screen, EDA, Emotional Response, Sentiment Analysis",
author = "Zhao Xu and Qingchuan Li and Yao Song",
note = "This study was funded by the National Natural Science Foundation of China [grant number 62207008] and General Program of Stable Support Plan for Universities in Shenzhen [grant number GXWD20231129154726002]. Publisher Copyright: {\textcopyright} The Author(s), under exclusive license to Springer Nature Switzerland AG 2025.; 26th International Conference on Human-Computer Interaction, HCII 2024 ; Conference date: 29-06-2024 Through 04-07-2024",
year = "2025",
month = dec,
day = "16",
doi = "10.1007/978-3-031-76806-4\_18",
language = "English",
isbn = "9783031768057",
series = "Lecture Notes in Computer Science",
publisher = "Springer Cham",
pages = "240--253",
editor = "Adela Coman and Simona Vasilache and \{Fui-Hoon Nah\}, Fiona and Siau, \{Keng Leng\} and June Wei and George Margetis",
booktitle = "HCI International 2024 – Late Breaking Papers",
edition = "1st",
}