Skip to content
This repository has been archived by the owner on Nov 17, 2023. It is now read-only.

Commit

Permalink
change name from log loss to nll
Browse files Browse the repository at this point in the history
  • Loading branch information
eric-haibin-lin committed Sep 28, 2017
1 parent 8f66a7d commit 40a0550
Show file tree
Hide file tree
Showing 2 changed files with 18 additions and 18 deletions.
32 changes: 16 additions & 16 deletions python/mxnet/metric.py
Original file line number Diff line number Diff line change
Expand Up @@ -918,23 +918,23 @@ def update(self, labels, preds):
self.num_inst += label.shape[0]

@register
@alias('log_loss')
class LogarithmicLoss(EvalMetric):
"""Computes logarithmic loss.
@alias('nll_loss')
class NegativeLogLikelihood(EvalMetric):
"""Computes the negative log-likelihood loss.
The cross entropy over a batch of sample size :math:`N` is given by
The negative log-likelihoodd loss over a batch of sample size :math:`N` is given by
.. math::
-\\sum_{n=1}^{N}\\sum_{k=1}^{K}t_{nk}\\log (y_{nk}),
where :math:`t_{nk}=1` if and only if sample :math:`n` belongs to class :math:`k`.
:math:`y_{nk}` denotes the probability of sample :math:`n` belonging to
class :math:`k`.
where :math:`K` is the number of classes, :math:`y_{nk}` is the prediceted probability for
:math:`k`-th class for :math:`n`-th sample. :math:`t_{nk}=1` if and only if sample
:math:`n` belongs to class :math:`k`.
Parameters
----------
eps : float
Cross Entropy loss is undefined for predicted value is 0 or 1,
Negative log-likelihood loss is undefined for predicted value is 0,
so predicted values are added with the small constant.
name : str
Name of this metric instance for display.
Expand All @@ -949,16 +949,16 @@ class :math:`k`.
--------
>>> predicts = [mx.nd.array([[0.3, 0.7], [0, 1.], [0.4, 0.6]])]
>>> labels = [mx.nd.array([0, 1, 1])]
>>> log_loss = mx.metric.LogarithmicLoss()
>>> log_loss.update(labels, predicts)
>>> print log_loss.get()
('log-loss', 0.57159948348999023)
>>> nll_loss = mx.metric.NegativeLogLikelihood()
>>> nll_loss.update(labels, predicts)
>>> print nll_loss.get()
('nll-loss', 0.57159948348999023)
"""
def __init__(self, eps=1e-12, name='log-loss',
def __init__(self, eps=1e-12, name='nll-loss',
output_names=None, label_names=None):
super(LogarithmicLoss, self).__init__(
name, eps=eps,
output_names=output_names, label_names=label_names)
super(NegativeLogLikelihood, self).__init__(
name, eps=eps,
output_names=output_names, label_names=label_names)
self.eps = eps

def update(self, labels, preds):
Expand Down
4 changes: 2 additions & 2 deletions tests/python/unittest/test_metric.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,8 +35,8 @@ def test_metrics():
composite = mx.metric.create(['acc', 'f1'])
check_metric(composite)

def test_log_loss():
metric = mx.metric.create('log_loss')
def test_nll_loss():
metric = mx.metric.create('nll_loss')
pred = mx.nd.array([[0.2, 0.3, 0.5], [0.6, 0.1, 0.3]])
label = mx.nd.array([2, 1])
metric.update([label], [pred])
Expand Down

0 comments on commit 40a0550

Please sign in to comment.