@inproceedings{3830fa7c55d44ad5bb75a2bc69c7f673,
title = "Least square support vector machine for large scale dataset",
abstract = "Support Vector Machine (SVM) is a very well-known tool for classification and regression problems. Many applications require SVMs with non-linear kernels for accurate classification. Training time complexity for SVMs with non-linear kernels is typically quadratic in the size of the training dataset. In this paper, we depart from the very well-known variation of SVM, the so-called Least Square Support Vector Machine, and apply Steepest Sub-gradient Descent method to propose Steepest Sub-gradient Descent Least Square Support Vector Machine (SGD-LSSVM). It is theoretically proven that the convergent rate of the proposed method to gain ε - precision solution is O (log (1/ε)). The experiments established on the large-scale datasets indicate that the proposed method offers the comparable classification accuracies while being faster than the baselines",
keywords = "Support Vector Machine, kernel method, solver, steepest gradient descent",
author = "Khanh Nguyen and Trung Le and Vinh Lai and Duy Nguyen and Dat TRAN and Wanli MA",
year = "2015",
doi = "10.1109/ijcnn.2015.7280575",
language = "English",
isbn = "9781479919611",
volume = "1",
series = "Proceedings of the International Joint Conference on Neural Networks",
publisher = "IEEE, Institute of Electrical and Electronics Engineers",
pages = "2057--2065",
editor = "Amir Hussain",
booktitle = "2015 International Joint Conference on Neural Networks, IJCNN 2015",
address = "United States",
note = "2015 International Joint Conference on Neural Networks, IJCNN 2015 ; Conference date: 12-07-2015 Through 17-07-2015",
}