@article{e8ad6e69427d4e65badfa2d24e4a7fbd,
title = "Updating kernel methods in spectral decomposition by affinity perturbations",
abstract = "Many machine learning based algorithms contain a training step that is done once. The training step is usually computational expensive since it involves processing of huge matrices. If the training profile is extracted from an evolving dynamic dataset, it has to be updated as some features of the training dataset are changed. This paper proposes a solution how to update this profile efficiently. Therefore, we investigate how to update the training profile when the data is constantly evolving. We assume that the data is modeled by a kernel method and processed by a spectral decomposition. In many algorithms for clustering and classification, a low dimensional representation of the affinity (kernel) graph of the embedded training dataset is computed. Then, it is used for classifying newly arrived data points. We present methods for updating such embeddings of the training datasets in an incremental way without the need to perform the entire computation upon the occurrences of changes in a small number of the training samples. Efficient computation of such an algorithm is critical in many web based applications.",
keywords = "Diffusion Maps, Dimensionality reduction, Eigenvalue problem, Perturbation theory",
author = "Yaniv Shmueli and Guy Wolf and Amir Averbuch",
note = "Funding Information: This research was supported by the Israel Science Foundation (Grant No. 1041/10) and Eshkol Fellowship from the Israeli Ministry of Science & Technology.",
year = "2012",
month = sep,
day = "15",
doi = "10.1016/j.laa.2012.04.035",
language = "אנגלית",
volume = "437",
pages = "1356--1365",
journal = "Linear Algebra and Its Applications",
issn = "0024-3795",
publisher = "Elsevier Inc.",
number = "6",
}