@article {4495, title = {Brain-computer interface-based assessment of color vision.}, journal = {J Neural Eng}, volume = {18}, year = {2021}, month = {2021 Nov 26}, abstract = {

Present methods for assessing color vision require the person{\textquoteright}s active participation. Here we describe a brain-computer interface-based method for assessing color vision that does not require the person{\textquoteright}s participation.This method uses steady-state visual evoked potentials to identify metamers-two light sources that have different spectral distributions but appear to the person to be the same color.We demonstrate that: minimization of the visual evoked potential elicited by two flickering light sources identifies the metamer; this approach can distinguish people with color-vision deficits from those with normal color vision; and this metamer-identification process can be automated.This new method has numerous potential clinical, scientific, and industrial applications.

}, keywords = {brain-computer interfaces, Color Vision, Electroencephalography, Evoked Potentials, Visual, Humans, Light, Photic Stimulation, Research Design}, issn = {1741-2552}, doi = {10.1088/1741-2552/ac3264}, author = {Norton, James J S and DiRisio, Grace F and Carp, Jonathan S and Norton, Amanda E and Kochan, Nicholas S and Wolpaw, Jonathan R} } @article {4362, title = {A Voting-Enhanced Dynamic-Window-Length Classifier for SSVEP-Based BCIs.}, journal = {IEEE Trans Neural Syst Rehabil Eng}, volume = {29}, year = {2021}, month = {2021}, pages = {1766-1773}, abstract = {

We present a dynamic window-length classifier for steady-state visual evoked potential (SSVEP)-based brain-computer interfaces (BCIs) that does not require the user to choose a feature extraction method or channel set. Instead, the classifier uses multiple feature extraction methods and channel selections to infer the SSVEP and relies on majority voting to pick the most likely target. The classifier extends the window length dynamically if no target obtains the majority of votes. Compared with existing solutions, our classifier: (i) does not assume that any single feature extraction method will consistently outperform the others; (ii) adapts the channel selection to individual users or tasks; (iii) uses dynamic window lengths; (iv) is unsupervised (i.e., does not need training). Collectively, these characteristics make the classifier easy-to-use, especially for caregivers and others with limited technical expertise. We evaluated the performance of our classifier on a publicly available benchmark dataset from 35 healthy participants. We compared the information transfer rate (ITR) of this new classifier to those of the minimum energy combination (MEC), maximum synchronization index (MSI), and filter bank canonical correlation analysis (FBCCA). The new classifier increases average ITR to 123.5 bits-per-minute (bpm), 47.5, 51.2, and 19.5 bpm greater than the MEC, MSI, and FBCCA classifiers, respectively.

}, keywords = {Algorithms, brain-computer interfaces, Electroencephalography, Evoked Potentials, Visual, Humans, Photic Stimulation}, issn = {1558-0210}, doi = {10.1109/TNSRE.2021.3106876}, author = {Habibzadeh, Hadi and Norton, James J S and Vaughan, Theresa M and Soyata, Tolga and Zois, Daphney-Stavroula} } @article {2133, title = {A graphical model framework for decoding in the visual ERP-based BCI speller.}, journal = {Neural Comput}, volume = {23}, year = {2011}, month = {01/2011}, pages = {160-82}, abstract = {

We present a graphical model framework for decoding in the visual ERP-based speller system. The proposed framework allows researchers to build generative models from which the decoding rules are obtained in a straightforward manner. We suggest two models for generating\ brain\ signals conditioned on the stimulus events. Both models incorporate letter frequency information but assume different dependencies between\ brain\ signals and stimulus events. For both models, we derive decoding rules and perform a discriminative training. We show on real visual speller data how decoding performance improves by incorporating letter frequency information and using a more realistic graphical model for the dependencies between the\ brain\ signals and the stimulus events. Furthermore, we discuss how the standard approach to decoding can be seen as a special case of the graphical model framework. The letter also gives more insight into the discriminative approach for decoding in the visual speller system.

}, keywords = {Artificial Intelligence, Computer User Training, Discrimination Learning, Electroencephalography, Evoked Potentials, Evoked Potentials, Visual, Humans, Language, Models, Neurological, Models, Theoretical, Reading, Signal Processing, Computer-Assisted, User-Computer Interface, Visual Cortex, Visual Perception}, issn = {1530-888X}, doi = {10.1162/NECO_a_00066}, url = {http://www.ncbi.nlm.nih.gov/pubmed/20964540}, author = {Martens, S M M and Mooij, J M and Jeremy Jeremy Hill and Farquhar, Jason and Sch{\"o}lkopf, B} } @proceedings {2241, title = {Detection of spontaneous class-specific visual stimuli with high temporal accuracy in human electrocorticography.}, volume = {2009}, year = {2009}, month = {2009}, pages = {6465-8}, abstract = {Most brain-computer interface classification experiments from electrical potential recordings have been focused on the identification of classes of stimuli or behavior where the timing of experimental parameters is known or pre-designated. Real world experience, however, is spontaneous, and to this end we describe an experiment predicting the occurrence, timing, and types of visual stimuli perceived by a human subject from electrocorticographic recordings. All 300 of 300 presented stimuli were correctly detected, with a temporal precision of order 20 ms. The type of stimulus (face/house) was correctly identified in 95\% of these cases. There were approximately 20 false alarm events, corresponding to a late 2nd neuronal response to a previously identified event.}, keywords = {Algorithms, Electrocardiography, Evoked Potentials, Visual, Humans, Male, Pattern Recognition, Automated, Pattern Recognition, Visual, Photic Stimulation, Reproducibility of Results, Sensitivity and Specificity, User-Computer Interface, Visual Cortex}, issn = {1557-170X}, doi = {10.1109/IEMBS.2009.5333546}, author = {Miller, John W and Hermes, Dora and Gerwin Schalk and Ramsey, Nick F and Jagadeesh, Bharathi and den Nijs, Marcel and Ojemann, J G and Rao, Rajesh P N} } @article {2184, title = {Towards an independent brain-computer interface using steady state visual evoked potentials.}, journal = {Clin Neurophysiol}, volume = {119}, year = {2008}, month = {02/2008}, pages = {399-408}, abstract = {

OBJECTIVE:\ 

Brain-computer interface (BCI) systems using steady state visual evoked potentials (SSVEPs) have allowed healthy subjects to communicate. However, these systems may not work in severely disabled users because they may depend on gaze shifting. This study evaluates the hypothesis that overlapping stimuli can evoke changes in SSVEP activity sufficient to control a BCI. This would provide evidence that SSVEP BCIs could be used without shifting gaze.

METHODS:\ 

Subjects viewed a display containing two images that each oscillated at a different frequency. Different conditions used overlapping or non-overlapping images to explore dependence on gaze function. Subjects were asked to direct attention to one or the other of these images during each of 12 one-minute runs.

RESULTS:\ 

Half of the subjects produced differences in SSVEP activity elicited by overlapping stimuli that could support BCI control. In all remaining users, differences did exist at corresponding frequencies but were not strong enough to allow effective control.

CONCLUSIONS:\ 

The\ data\ demonstrate that SSVEP differences sufficient for BCI control may be elicited by selective attention to one of two overlapping stimuli. Thus, some SSVEP-based BCI approaches may not depend on gaze control. The nature and extent of any BCI{\textquoteright}s dependence on muscle activity is a function of many factors, including the display, task, environment, and user.

SIGNIFICANCE:\ 

SSVEP BCIs might function in severely disabled users unable to reliably control gaze. Further research with these users is necessary to explore the optimal parameters of such a system and validate online performance in a home environment.

}, keywords = {Adolescent, Adult, Attention, Brain, Brain Mapping, Dose-Response Relationship, Radiation, Electroencephalography, Evoked Potentials, Visual, Female, Humans, Male, Pattern Recognition, Visual, Photic Stimulation, Spectrum Analysis, User-Computer Interface}, issn = {1388-2457}, doi = {10.1016/j.clinph.2007.09.121}, url = {http://www.ncbi.nlm.nih.gov/pubmed/18077208}, author = {Brendan Z. Allison and Dennis J. McFarland and Gerwin Schalk and Zheng, Shi Dong and Moore-Jackson, Melody and Jonathan Wolpaw} } @article {2165, title = {The Wadsworth Center brain-computer interface (BCI) research and development program.}, journal = {IEEE Trans Neural Syst Rehabil Eng}, volume = {11}, year = {2003}, month = {06/2003}, pages = {204-7}, abstract = {

Brain-computer interface (BCI) research at the Wadsworth Center has focused primarily on using electroencephalogram (EEG) rhythms recorded from the scalp over sensorimotor cortex to\ control\ cursor movement in one or two dimensions. Recent and current studies seek to improve the speed and accuracy of this\ control\ by improving the selection of signal features and their translation into device commands, by incorporating additional signal features, and by optimizing the adaptive interaction between the user and system. In addition, to facilitate the evaluation, comparison, and combination of alternative BCI methods, we have developed a general-purpose BCI system called BCI-2000 and have made it available to other research\ groups. Finally, in collaboration with several other\ groups, we are developing simple BCI applications and are testing their practicality and long-term value for people with severe motor disabilities.

}, keywords = {Academic Medical Centers, Adult, Algorithms, Artifacts, Brain, Brain Mapping, Electroencephalography, Evoked Potentials, Visual, Feedback, Humans, Middle Aged, Nervous System Diseases, Research, Research Design, User-Computer Interface, Visual Perception}, issn = {1534-4320}, doi = {10.1109/TNSRE.2003.814442}, url = {http://www.ncbi.nlm.nih.gov/pubmed/12899275}, author = {Jonathan Wolpaw and Dennis J. McFarland and Theresa M Vaughan and Gerwin Schalk} }