Microsaccades are small, involuntary eye movements essential for visual perception and neural processing. Traditional microsaccade research often relies on eye trackers and frame-based video analysis. While eye trackers offer high precision, they can be expensive and have limitations in scalability and temporal resolution. In contrast, event-based sensing offers a more efficient and precise alternative, capturing high-resolution spatial and temporal information with minimal latency. This work introduces a pioneering event-based microsaccade dataset, simulating angular displacements from 0.5° to 2.0° using Blender and v2e. We evaluate the dataset using Spiking-VGG11, VGG13, VGG16, and a novel Spiking-VGG16Flow using SpikingJelly. The models achieve ~90% accuracy, establishing a strong benchmark for event-based eye movement research.
@dataset{microsaccade_dataset_2025,
title={Microsaccade Recognition with Event Cameras: A Novel Dataset},
author={Waseem Shariff and Timothy Hanley and Maciej Stec and Hossein Javidnia and Peter Corcoran},
year={2025},
doi={https://doi.org/10.57967/hf/6965},
publisher={Hugging Face},
note={Presented at BMVC 2025}
}
@inproceedings{Shariff_2025_BMVC,
author = {Waseem Shariff and Timothy Hanley and Maciej Stec and Hossein Javidnia and Peter Corcoran},
title = {Benchmarking Microsaccade Recognition with Event Cameras: A Novel Dataset and Evaluation},
booktitle = {36th British Machine Vision Conference 2025, {BMVC} 2025, Sheffield, UK, November 24-27, 2025},
publisher = {BMVA},
year = {2025},
url = {https://bmva-archive.org.uk/bmvc/2025/assets/papers/Paper_288/paper.pdf}
}
@article{microsaccade_benchmarking_2025,
title={Benchmarking Microsaccade Recognition with Event Cameras: A Novel Dataset and Evaluation},
author={Shariff, Waseem and Hanley, Timothy and Stec, Maciej and Javidnia, Hossein and Corcoran, Peter},
journal={arXiv preprint arXiv:2510.24231},
year={2025}
}