@article{roy_zheng_kamper_hu_2023, title={Concurrent and Continuous Prediction of Finger Kinetics and Kinematics via Motoneuron Activities}, volume={70}, ISSN={["1558-2531"]}, DOI={10.1109/TBME.2022.3232067}, abstractNote={Objective: Robust neural decoding of intended motor output is crucial to enable intuitive control of assistive devices, such as robotic hands, to perform daily tasks. Few existing neural decoders can predict kinetic and kinematic variables simultaneously. The current study developed a continuous neural decoding approach that can concurrently predict fingertip forces and joint angles of multiple fingers. Methods: We obtained motoneuron firing activities by decomposing high-density electromyogram (HD EMG) signals of the extrinsic finger muscles. The identified motoneurons were first grouped and then refined specific to each finger (index or middle) and task (finger force and dynamic movement) combination. The refined motoneuron groups (separate matrix) were then applied directly to new EMG data in real-time involving both finger force and dynamic movement tasks produced by both fingers. EMG-amplitude-based prediction was also performed as a comparison. Results: We found that the newly developed decoding approach outperformed the EMG-amplitude method for both finger force and joint angle estimations with a lower prediction error (Force: 3.47±0.43 vs 6.64±0.69% MVC, Joint Angle: 5.40±0.50° vs 12.8±0.65°) and a higher correlation (Force: 0.75±0.02 vs 0.66±0.05, Joint Angle: 0.94±0.01 vs 0.5±0.05) between the estimated and recorded motor output. The performance was also consistent for both fingers. Conclusion: The developed neural decoding algorithm allowed us to accurately and concurrently predict finger forces and joint angles of multiple fingers in real-time. Significance: Our approach can enable intuitive interactions with assistive robotic hands, and allow the performance of dexterous hand skills involving both force control tasks and dynamic movement control tasks.}, number={6}, journal={IEEE TRANSACTIONS ON BIOMEDICAL ENGINEERING}, author={Roy, Rinku and Zheng, Yang and Kamper, Derek G. G. and Hu, Xiaogang}, year={2023}, month={Jun}, pages={1911–1920} } @article{roy_kamper_hu_2023, title={Optimized Model Selection for Concurrent Decoding of Finger Kinetics and Kinematics}, volume={11}, ISSN={["2169-3536"]}, DOI={10.1109/ACCESS.2023.3246950}, abstractNote={Myoelectric-based motor intent detection is typically used to interface with assistive devices. However, the intent detection performance is sensitive to interference of electromyogram (EMG) signals. Recently, EMG signals are decomposed into motor units (MU) firing activities, and neuron binary firing activities can be used to predict motor output in a continuous manner. Different functions that map MU firings to motor output have been implemented, and both composite MU firing frequency and individual MU firing frequency have been used. It is unclear whether one mapping function outperform others. Accordingly, we evaluated three MU-based finger kinetic and kinematic prediction models, by varying the number of MUs and the method of including MU firings into the regression model. We also compared the performance of three EMG amplitude-based models with varying number of channels. We performed MU decomposition in advance for real-time implementations. Our results showed that individual firing frequency of five MUs provided the lowest estimation error (force: 4.66±0.36 %MVC; joint angle: 4.81±0.49°) and highest correlation (force: 0.86±0.01; joint angle: 0.93±0.01) with the measured motor outputs, when compared with mapping method using the populational firing frequency of all MUs or the populational firing frequency of a group of MUs with similar firing activities. The results indicated that firing information at the population level may mask critical information of individual MU firings. These findings allowed us to identify the optimal models for concurrent and continuous finger force and joint angle estimation. A combination of the minimal level of complexity and high accuracy make these models suitable for real-time control of assistive robotic devices.}, journal={IEEE ACCESS}, author={Roy, Rinku and Kamper, Derek G. G. and Hu, Xiaogang}, year={2023}, pages={17348–17358} }