@article{173436, author = {Yotam Erel and Christine Potter and Sagi Jaffe-Dax and Casey Lew-Williams and Amit Bermano}, title = {iCatcher: A neural network approach for automated coding of young children{\textquoteright}s eye movements.}, abstract = {
Infants{\textquoteright} looking behaviors are often used for measuring attention, real-time processing, and learning-often using low-resolution videos. Despite the ubiquity of gaze-related methods in developmental science, current analysis techniques usually involve laborious post hoc coding, imprecise real-time coding, or expensive eye trackers that may increase data loss and require a calibration phase. As an alternative, we propose using computer vision methods to perform automatic gaze estimation from low-resolution videos. At the core of our approach is a neural network that classifies gaze directions in real time. We compared our method, called iCatcher, to manually annotated videos from a prior study in which infants looked at one of two pictures on a screen. We demonstrated that the accuracy of iCatcher approximates that of human annotators and that it replicates the prior study{\textquoteright}s results. Our method is publicly available as an open-source repository at https://github.com/yoterel/iCatcher.
}, year = {2022}, journal = {Infancy : the official journal of the International Society on Infant Studies}, volume = {27}, pages = {765-779}, month = {12/2022}, issn = {1532-7078}, doi = {10.1111/infa.12468}, language = {eng}, }