@inproceedings{0a747b066259412facb68c8f594a4363,
title = "Hyper-parameter Tuning for Progressive Learning and its Application to Network Cyber Security",
abstract = "The long-term deployment of data-driven AI technology using artificial neural networks (ANNs) should be scalable and maintainable when new data becomes available. To insure smooth adaptation, the learning must be cumulative so that the network consumes new data without compromising its inference performance based on past data. Such incremental accumulation of learning experience is known as progressive learning. In this paper, we address the open problem of tuning the hyperparameters of neural networks during progressive learning. A hyper-parameter optimization framework is proposed that selects the best hyper-parameter values on a task-by-task basis. The neural network model adapts to each progressive learning task by adjusting the hyper-parameters under which the neural architecture is incrementally grown. Several hyper-parameter search strategies are explored and compared in support of progressive learning. In contrast to the predominant practice of using imaging datasets in machine learning, we have used cybersecurity datasets to illustrate the advantages of the proposed hyper-parameter tuning algorithms.",
author = "Karn, {Rupesh Raj} and Matthew Ziegler and Jinwook Jung and Elfadel, {Ibrahim Abe M.}",
note = "Publisher Copyright: {\textcopyright} 2022 IEEE.; 2022 IEEE International Symposium on Circuits and Systems, ISCAS 2022 ; Conference date: 27-05-2022 Through 01-06-2022",
year = "2022",
doi = "10.1109/ISCAS48785.2022.9937433",
language = "British English",
series = "Proceedings - IEEE International Symposium on Circuits and Systems",
publisher = "Institute of Electrical and Electronics Engineers Inc.",
pages = "1220--1224",
booktitle = "IEEE International Symposium on Circuits and Systems, ISCAS 2022",
address = "United States",
}