@inproceedings{40de003b720f4f2ab4919937952c9d73,
title = "Performance Analysis of Synthetic Events via Visual Object Trackers",
abstract = "This study investigated the effectiveness of synthetic events in enhancing the accuracy of visual object trackers, particularly in scenarios where conventional RGB data encounters difficulties, such as with rapidly moving objects, motion blur, and varying lighting conditions, thereby highlighting the potential of event cameras in tracking applications. Synthetic events were generated from RGB Visual Object Tracking (VOT) datasets using the v2e toolbox and post processed in Inivation Dynamic Vision (DV) software. This post processed data was subsequently fused with traditional RGB data. Evaluation was conducted through the Pytracking library to measure potential tracking improvements. The results showed a notable increase in tracking efficacy upon the integration of post processed synthetic events with RGB data. Conclusively, synthetically generated events have the capacity to augment current state-of-the-art (SOTA) VOT frameworks with minimal Neural Network (NN) adjustments.",
keywords = "Event cameras, Fusion technique, Neuromorphic vision, Visual object tracking (VOT)",
author = "Mohamad Alansari and Hamad AlRemeithi and Sara Alansari and Naoufel Werghi and Sajid Javed",
note = "Publisher Copyright: {\textcopyright} The Author(s), under exclusive license to Springer Nature Switzerland AG 2024.; Science and Information Conference, SAI 2024 ; Conference date: 11-07-2024 Through 12-07-2024",
year = "2024",
doi = "10.1007/978-3-031-62269-4_26",
language = "British English",
isbn = "9783031622687",
series = "Lecture Notes in Networks and Systems",
publisher = "Springer Science and Business Media Deutschland GmbH",
pages = "364--384",
editor = "Kohei Arai",
booktitle = "Intelligent Computing - Proceedings of the 2024 Computing Conference",
address = "Germany",
}