@inproceedings{b7abe822f983415cbbb55e3821e076ef,
title = "Greedy incremental support vector regression",
abstract = "Support Vector Regression (SVR) is a powerful supervised machine learning model especially well suited to the normalized or binarized data. However, its quadratic complexity in the number of training examples eliminates it from training on large datasets, especially high dimensional with frequent retraining requirement. We propose a simple two-stage greedy selection of training data for SVR to maximize its validation set accuracy at the minimum number of training examples and illustrate the performance of such strategy in the context of Clash Royale Challenge 2019, concerned with efficient decks' win rate prediction. Hundreds of thousands of labelled data examples were reduced to hundreds, optimized SVR was trained on to maximize the validation R2 score. The proposed model scored the first place in the Cash Royale 2019 challenge, outperforming over hundred of competitive teams from around the world.",
keywords = "Data editing, Greedy backward-forward search, Hyperparameters optimization, Support vector regression",
author = "Dymitr Ruta and Ling Cen and Vu, \{Quang Hieu\}",
note = "Publisher Copyright: {\textcopyright} 2019 Polish Information Processing Society - as since.; 2019 Federated Conference on Computer Science and Information Systems, FedCSIS 2019 ; Conference date: 01-09-2019 Through 04-09-2019",
year = "2019",
month = sep,
doi = "10.15439/2019F364",
language = "British English",
series = "Proceedings of the 2019 Federated Conference on Computer Science and Information Systems, FedCSIS 2019",
publisher = "Institute of Electrical and Electronics Engineers Inc.",
pages = "7--9",
editor = "Maria Ganzha and Leszek Maciaszek and Leszek Maciaszek and Marcin Paprzycki",
booktitle = "Proceedings of the 2019 Federated Conference on Computer Science and Information Systems, FedCSIS 2019",
address = "United States",
}