@inproceedings{f6a9fd3ea9f748adb447d7415bfdfff9,
title = "Incremental ELMVIS for Unsupervised Learning",
abstract = "An incremental version of the ELMVIS+ method is proposed in this paper. It iteratively selects a few best fitting data samples from a large pool, and adds them to the model. The method keeps high speed of ELMVIS+ while allowing for much larger possible sample pools due to lower memory requirements. The extension is useful for reaching a better local optimum with greedy optimization of ELMVIS, and the data structure can be specified in semi-supervised optimization. The major new application of incremental ELMVIS is not to visualization, but to a general dataset processing. The method is capable of learning dependencies from non-organized unsupervised data—either reconstructing a shuffled dataset, or learning dependencies in complex high-dimensional space. The results are interesting and promising, although there is space for improvements.",
keywords = "engineering, artificial intelligence, computational intelligence",
author = "Anton Akusok and Emil Eirola and Yoan Miche and Ian Oliver and Kaj-Mikael Bj{\"o}rk and Andrey Gritsenko and Stephen Baek and Amaury Lendasse",
year = "2018",
doi = "10.1007/978-3-319-57421-9_15",
language = "English",
isbn = "978-3-319-57421-9",
series = "Proceedings in Adaptation, Learning and Optimization",
publisher = "Springer",
pages = "183--193",
editor = "Jiuwen Cao and Erik Cambria and Amaury Lendasse and Yoan Miche and Vong, {Chi Man}",
booktitle = "Proceedings of ELM-2016",
address = "Germany",
note = "International Conference on Extreme Learning Machines, ELM ; Conference date: 13-12-2016 Through 15-12-2016",
}