@inproceedings{765685035470438cb406d9dc387969dc,
title = "Sparseness and a reduction from Totally Nonnegative Least Squares to SVM",
abstract = "Nonnegative Least Squares (NNLS) is a general form for many important problems. We consider a special case of NNLS where the input is nonnegative. It is called Totally Nonnegative Least Squares (TNNLS) in the literature. We show a reduction of TNNLS to a single class Support Vector Machine (SVM), thus relating the sparsity of a TNNLS solution to the sparsity of supports in a SVM. This allows us to apply any SVM solver to the TNNLS problem. We get an order of magnitude improvement in running time by first obtaining a smaller version of our original problem with the same solution using a fast approximate SVM solver. Second, we use an exact NNLS solver to obtain the solution. We present experimental evidence that this approach improves the performance of state-of-the-art NNLS solvers by applying it to both randomly generated problems as well as to real datasets, calculating radiation therapy dosages for cancer patients.",
author = "Potluru, {Vamsi K.} and Plis, {Sergey M.} and Shuang Luan and Calhoun, {Vince D.} and Hayes, {Thomas P.}",
year = "2011",
doi = "10.1109/IJCNN.2011.6033459",
language = "English (US)",
isbn = "9781457710865",
series = "Proceedings of the International Joint Conference on Neural Networks",
pages = "1922--1929",
booktitle = "2011 International Joint Conference on Neural Networks, IJCNN 2011 - Final Program",
note = "2011 International Joint Conference on Neural Network, IJCNN 2011 ; Conference date: 31-07-2011 Through 05-08-2011",
}