@article {1410756, title = {The PAIR-R24M Dataset for Multi-animal 3D Pose Estimation}, journal = {NeurIPS}, year = {2021}, abstract = {

Understanding the biological basis of social and collective behaviors in animals is a key goal of the life sciences, and may yield important insights for engineering intelligent multi-agent systems. A critical step in understanding the mechanisms underlying social behaviors is a precise readout of the full 3D pose of interacting animals. While approaches for multi-animal pose estimation are beginning to emerge, they remain challenging to compare due to the lack of standardized benchmark datasets for multi-animal 3D pose estimation. Here we introduce the PAIR-R24M (Paired Acquisition of Interacting Rats) dataset for multi-animal 3D pose estimation, which contains 21.5 million frames of RGB video and 3D ground-truth motion capture of dyadic interactions in laboratory rats. PAIR-R24M contains data from 18 distinct pairs of rats across diverse behaviors, from 30 different viewpoints. The data are temporally contiguous and annotated with 11 behavioral categories, and 3 interaction behavioral categories, using a multi-animal extension of a recently developed behavioral segmentation approach. We used a novel multi-animal version of the recently published DANNCE network to establish a strong baseline for multi-animal 3D pose estimation without motion capture. These recordings are of sufficient resolution to allow us to examine cross-pair differences in social interactions, and identify different conserved patterns of social interaction across rats.

}, url = {https://openreview.net/forum?id=-wVVl_UPr8}, author = {Marshall, Jesse D and Klibaite, Ugne and Gellis, Amanda and Aldarondo, Diego E and Bence P {\"O}lveczky and Dunn, Tim} }