@article {Laschowski2021.04.02.438126, author = {Brokoslaw Laschowski and William McNally and Alexander Wong and John McPhee}, title = {Computer Vision and Deep Learning for Environment-Adaptive Control of Robotic Lower-Limb Exoskeletons}, elocation-id = {2021.04.02.438126}, year = {2021}, doi = {10.1101/2021.04.02.438126}, publisher = {Cold Spring Harbor Laboratory}, abstract = {Robotic exoskeletons require human control and decision making to switch between different locomotion modes, which can be inconvenient and cognitively demanding. To support the development of automated locomotion mode recognition systems (i.e., high-level controllers), we designed an environment recognition system using computer vision and deep learning. We collected over 5.6 million images of indoor and outdoor real-world walking environments using a wearable camera system, of which ~923,000 images were annotated using a 12-class hierarchical labelling architecture (called the ExoNet database). We then trained and tested the EfficientNetB0 convolutional neural network, designed for efficiency using neural architecture search, to predict the different walking environments. Our environment recognition system achieved ~73\% image classification accuracy. While these preliminary results benchmark Efficient-NetB0 on the ExoNet database, further research is needed to compare different image classification algorithms to develop an accurate and real-time environment-adaptive locomotion mode recognition system for robotic exoskeleton control.Competing Interest StatementThe authors have declared no competing interest.}, URL = {https://www.biorxiv.org/content/early/2021/04/04/2021.04.02.438126}, eprint = {https://www.biorxiv.org/content/early/2021/04/04/2021.04.02.438126.full.pdf}, journal = {bioRxiv} }