@inproceedings{bd5a16b899f04f10875eb8b096467ca1,
title = "User-centric affective video tagging from MEG and peripheral physiological responses",
abstract = "This paper presents a new multimodal database and the associated results for characterization of affect (valence, arousal and dominance) using the Magneto encephalogram (MEG) brain signals and peripheral physiological signals (horizontal EOG, ECG, trapezius EMG). We attempt single-trial classification of affect in movie and music video clips employing emotional responses extracted from eighteen participants. The main findings of this study are that: (i) the MEG signal effectively encodes affective viewer responses, (ii) clip arousal is better predicted by MEG, while peripheral physiological signals are more effective for predicting valence and (iii) prediction performance is better for movie clips as compared to music video clips.",
keywords = "Affective video tagging, MEG, Movie vs. music clips, User-centric",
author = "Abadi, {Mojtaba Khomami} and Kia, {Seyed Mostafa} and Ramanathan Subramanian and Paolo Avesani and Nicu Sebe",
year = "2013",
doi = "10.1109/ACII.2013.102",
language = "English",
isbn = "9780769550480",
series = "Proceedings - 2013 Humaine Association Conference on Affective Computing and Intelligent Interaction, ACII 2013",
publisher = "IEEE, Institute of Electrical and Electronics Engineers",
pages = "582--587",
editor = "Anton Nijholt and Maja Pantic and Sidney D'Mello",
booktitle = "Proceedings - 2013 Humaine Association Conference on Affective Computing and Intelligent Interaction, ACII 2013",
address = "United States",
note = "2013 5th Humaine Association Conference on Affective Computing and Intelligent Interaction, ACII 2013 ; Conference date: 02-09-2013 Through 05-09-2013",
}