@article {Hebart2022.07.22.501123, author = {M.N. Hebart and O. Contier and L. Teichmann and A.H. Rockter and C.Y. Zheng and A. Kidder and A. Corriveau and M. Vaziri-Pashkam and C.I. Baker}, title = {THINGS-data: A multimodal collection of large-scale datasets for investigating object representations in brain and behavior}, elocation-id = {2022.07.22.501123}, year = {2022}, doi = {10.1101/2022.07.22.501123}, publisher = {Cold Spring Harbor Laboratory}, abstract = {Understanding object representations requires a broad, comprehensive sampling of the objects in our visual world with dense measurements of brain activity and behavior. Here we present THINGS-data, a multimodal collection of large-scale datasets comprising functional MRI, magnetoencephalographic recordings, and 4.70 million similarity judgments in response to thousands of photographic images for up to 1,854 object concepts. THINGS-data is unique in its breadth of richly-annotated objects, allowing for testing countless hypotheses at scale while assessing the reproducibility of previous findings. Beyond the unique insights promised by each individual dataset, the multimodality of THINGS-data allows combining datasets for a much broader view into object processing than previously possible. Our analyses demonstrate the high quality of the datasets and provide five examples of hypothesis-driven and data-driven applications. THINGS-data constitutes the core release of the THINGS initiative (https://things-initiative.org) for bridging the gap between disciplines and the advancement of cognitive neuroscience.Competing Interest StatementThe authors have declared no competing interest.}, URL = {https://www.biorxiv.org/content/early/2022/07/23/2022.07.22.501123}, eprint = {https://www.biorxiv.org/content/early/2022/07/23/2022.07.22.501123.full.pdf}, journal = {bioRxiv} }