@inproceedings{abfdeec2d222465c8697cf2998a2cd59,
title = "Soft-Margin Softmax for Deep Classification",
abstract = "In deep classification, the softmax loss (Softmax) is arguably one of the most commonly used components to train deep convolutional neural networks (CNNs). However, such a widely used loss is limited due to its lack of encouraging the discriminability of features. Recently, the large-margin softmax loss (L-Softmax [1]) is proposed to explicitly enhance the feature discrimination, with hard margin and complex forward and backward computation. In this paper, we propose a novel soft-margin softmax (SM-Softmax) loss to improve the discriminative power of features. Specifically, SM-Softamx only modifies the forward of Softmax by introducing a non-negative real number m, without changing the backward. Thus it can not only adjust the desired continuous soft margin but also be easily optimized by the typical stochastic gradient descent (SGD). Experimental results on three benchmark datasets have demonstrated the superiority of our SM-Softmax over the baseline Softmax, the alternative L-Softmax and several state-of-the-art competitors.",
keywords = "Classification, CNN, L-Softmax, SM-Softmax, Softmax",
author = "Xuezhi Liang and Xiaobo Wang and Zhen Lei and Shengcai Liao and Li, {Stan Z.}",
note = "Publisher Copyright: {\textcopyright} 2017, Springer International Publishing AG.; 24th International Conference on Neural Information Processing, ICONIP 2017 ; Conference date: 14-11-2017 Through 18-11-2017",
year = "2017",
doi = "10.1007/978-3-319-70096-0_43",
language = "English",
isbn = "9783319700953",
series = "Lecture Notes in Computer Science (including subseries Lecture Notes in Artificial Intelligence and Lecture Notes in Bioinformatics)",
publisher = "Springer Verlag",
pages = "413--421",
editor = "Dongbin Zhao and El-Alfy, {El-Sayed M.} and Derong Liu and Shengli Xie and Yuanqing Li",
booktitle = "Neural Information Processing - 24th International Conference, ICONIP 2017, Proceedings",
}