@INPROCEEDINGS{rahat-multimodal,
author={S. {Mahmud} and X. {Lin} and J. {Kim} and H. {Iqbal} and M. {Rahat-Uz-Zaman} and S. {Reza} and M. A. {Rahman}},
booktitle={2019 IEEE 7th Conference on Systems, Process and Control (ICSPC)},
title={A Multi-Modal Human Machine Interface for Controlling a Smart Wheelchair},
year={2019},
volume={},
number={},
pages={10-13},
abstract={As the number of disabled people all over the world is increasing very fast, the role of an electric wheelchair is becoming crucial to improve the mobility for them. Independent mobility is a vital aspect of self-respect and plays an important role in the life of a disabled person. The smart wheelchair is an endeavor to provide an self-supporting mobility to those people who are not able to move freely. Typical electric powered wheelchairs are usually controlled by the traditional joysticks which cannot fulfill the needs of a person who has motor disabilities and some specific types of disabilities like paralysis who can only move their eyes. This paper aims to develop a multi-modal human machine interface for the larger domain of disabled persons to control the wheelchair efficiently. The interface comprises joystick, smart hand-glove, head movement tracker and eye tracker. The system presented in this paper can support a wide variety of users with different types of disabilities.},
keywords={control engineering computing;handicapped aids;human computer interaction;interactive devices;wheelchairs;multimodal human machine interface;smart wheelchair;disabled people;electric wheelchair;independent mobility;vital aspect;disabled person;typical electric powered wheelchairs;motor disabilities;smart hand-glove;Wheelchairs;Tracking;Magnetic heads;Head;Cameras;Control systems;Prototypes;Eye Gaze;Hand Gesture;Head Movement;IMU;Flex Sensor},
doi={10.1109/ICSPC47137.2019.9068027},
ISSN={},
month={Dec},}