@article {2151, title = {Decoding vowels and consonants in spoken and imagined words using electrocorticographic signals in humans.}, journal = {J Neural Eng}, volume = {8}, year = {2011}, month = {08/2011}, pages = {046028}, abstract = {

Several stories in the popular media have speculated that it may be possible to infer from the brain which word a person is speaking or even thinking. While recent studies have demonstrated that brain signals can give detailed information about actual and imagined actions, such as different types of limb movements or spoken words, concrete experimental evidence for the possibility to {\textquoteright}read the mind{\textquoteright}, i.e. to interpret internally-generated speech, has been scarce. In this study, we found that it is possible to use signals recorded from the surface of the brain (electrocorticography) to discriminate the vowels and consonants embedded in spoken and in imagined words, and we defined the cortical areas that held the most information about discrimination of vowels and consonants. The results shed light on the distinct mechanisms associated with production of vowels and consonants, and could provide the basis for brain-based communication using imagined speech.

}, keywords = {Adolescent, Adult, Brain, Brain Mapping, Cerebral Cortex, Communication Aids for Disabled, Data Interpretation, Statistical, Discrimination (Psychology), Electrodes, Implanted, Electroencephalography, Epilepsy, Female, Functional Laterality, Humans, Male, Middle Aged, Movement, Speech Perception, User-Computer Interface}, issn = {1741-2552}, doi = {10.1088/1741-2560/8/4/046028}, url = {http://www.ncbi.nlm.nih.gov/pubmed/21750369}, author = {Pei, Xiao-Mei and Barbour, Dennis L and Leuthardt, E C and Gerwin Schalk} }