@inproceedings{187b39f845e1463eabca006aaa7cc90d,
title = "L0-Constrained Regression for Data Mining",
abstract = "L2 and L1 constrained regression methods, such as ridge regression and Lasso, have been generally known for their fitting ability. Recently, L0-constrained classifications have been used for feature selection and classifier construction. This paper proposes an L0 constrained regression method, which aims to minimize both the epsilon-insensitive fitting errors and L0 constraints on regression coefficients. Our L0-constrained regression can be efficiently approximated by successive linearization algorithm, and shows the favorable properties of selecting a compact set of fitting coefficients and tolerating small fitting errors. To make our L0 constrained regression generally applicable, the extension to nonlinear regression is also addressed in this paper.",
keywords = "Feature Selection, Ordinary Little Square, Nonlinear Regression, Support Vector Regression, Ridge Regression",
author = "Zhili Wu and Li, {Chun Hung}",
note = "Publisher copyright: {\textcopyright} Springer-Verlag Berlin Heidelberg 2007; 11th Pacific-Asia Conference on Knowledge Discovery and Data Mining, PAKDD 2007 ; Conference date: 22-05-2007 Through 25-05-2007",
year = "2007",
month = apr,
day = "27",
doi = "10.1007/978-3-540-71701-0_110",
language = "English",
isbn = "9783540717003",
series = "Lecture Notes in Computer Science (including subseries Lecture Notes in Artificial Intelligence and Lecture Notes in Bioinformatics)",
publisher = "Springer Verlag",
pages = "981--988",
editor = "Zhi-Hua Zhou and Hang Li and Qiang Yang",
booktitle = "Advances in Knowledge Discovery and Data Mining - 11th Pacific-Asia Conference, PAKDD 2007, Proceedings",
address = "Germany",
}