Skip to content
This repository was archived by the owner on Jul 10, 2021. It is now read-only.

Commit 65aaf82

Browse files
committed
Better tests and documentation for the logging.
1 parent 4c9d849 commit 65aaf82

File tree

3 files changed

+34
-21
lines changed

3 files changed

+34
-21
lines changed

docs/guide_intermediate.rst

Lines changed: 3 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -4,9 +4,7 @@ Misc. Additions
44
Verbose Mode
55
------------
66

7-
To see the output of the neural network's training, you need to configure two things: first setting up the Python logger (mandatory), and secondly to specify a verbose mode if you want more information during training (optional).
8-
9-
The first step is to configure either the ``sknn`` logger specifically, or do so globally (easier) as follows:
7+
To see the output of the neural network's training, configure the Python logger called ``sknn`` or the default root logger. This is possible using the standard ``logging`` module which you can setup as follows:
108

119
.. code:: python
1210
@@ -18,20 +16,9 @@ The first step is to configure either the ``sknn`` logger specifically, or do so
1816
level=logging.DEBUG,
1917
stream=sys.stdout)
2018
21-
Then you can optionally create your neural networks using an additional ``verbose`` parameter to show the output during training:
22-
23-
.. code:: python
24-
25-
from sknn.mlp import Regressor, Layer
26-
27-
nn = Regressor(
28-
layers=[Layer("Linear")],
29-
n_iter=20,
30-
verbose=True,
31-
valid_size=0.25)
32-
nn.fit(X, y)
19+
Change the log level to ``logging.INFO`` for less information about each epoch, or ``logging.WARNING`` only to receive messages about problems or failures.
3320

34-
This code will output a table containing validation scores at each of the twenty epochs. The ``valid_size`` parameter is a ratio of the data to be used internally for validation; in short, the ``fit()`` function is automatically splitting the data into ``X_train`` and ``y_train`` as well as ``X_valid`` and ``y_valid``.
21+
Using the flag ``verbose=True`` on either :class:`sknn.mlp.Classifier` and :class:`sknn.mlp.Regressor` will setup a default logger at ``DEBUG`` level if it does not exist, and ``verbose=False`` will setup a default logger at level ``WARNING`` if no logging has been configured.
3522

3623

3724
Saving & Loading

sknn/nn.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -336,9 +336,10 @@ class NeuralNetwork(object):
336336
337337
* ``False`` — Setup new logger that shows only warnings and errors.
338338
* ``True`` — Setup a new logger that displays all debug messages.
339+
* ``None`` — Don't setup a new logger under any condition (default).
339340
340341
Using the built-in python ``logging`` module, you can control the detail and style of
341-
output by customising the logger level and formatter for ``sknn`` logger.
342+
output by customising the verbosity level and formatter for ``sknn`` logger.
342343
"""
343344

344345
def __init__(
@@ -359,7 +360,7 @@ def __init__(
359360
valid_size=0.0,
360361
loss_type='mse',
361362
debug=False,
362-
verbose=False,
363+
verbose=None,
363364
**params):
364365

365366
self.layers = []
@@ -445,7 +446,7 @@ def is_convolution(self):
445446

446447
def _create_logger(self):
447448
# If users have configured logging already, assume they know best.
448-
if len(log.handlers) > 0 or len(log.parent.handlers) > 0:
449+
if len(log.handlers) > 0 or len(log.parent.handlers) > 0 or self.verbose is None:
449450
return
450451

451452
# Otherwise setup a default handler and formatter based on verbosity.
@@ -454,8 +455,8 @@ def _create_logger(self):
454455
hnd = logging.StreamHandler(stream=sys.stdout)
455456

456457
hnd.setFormatter(fmt)
458+
hnd.setLevel(lvl)
457459
log.addHandler(hnd)
458-
log.setLevel(lvl)
459460

460461
def _create_matrix_input(self, X, y=None):
461462
if self.is_convolution:

sknn/tests/test_training.py

Lines changed: 26 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
import unittest
2-
from nose.tools import (assert_in, assert_raises)
2+
from nose.tools import (assert_in, assert_raises, assert_equals)
33

44
import io
55
import logging
@@ -34,6 +34,31 @@ def test_FitAutomaticValidation(self):
3434
self.nn._fit(a_in, a_out)
3535

3636

37+
class TestCustomLogging(unittest.TestCase):
38+
39+
def setUp(self):
40+
self.log = logging.getLogger('sknn')
41+
self.log.handlers = []
42+
self.backup, self.log.parent.handlers = self.log.parent.handlers, []
43+
44+
def tearDown(self):
45+
self.log.parent.handlers = self.backup
46+
47+
def test_DefaultLogVerbose(self):
48+
nn = MLPR(layers=[L("Linear")], verbose=True)
49+
assert_equals(1, len(self.log.handlers))
50+
assert_equals(logging.DEBUG, self.log.handlers[0].level)
51+
52+
def test_DefaultLogQuiet(self):
53+
nn = MLPR(layers=[L("Linear")], verbose=False)
54+
assert_equals(1, len(self.log.handlers))
55+
assert_equals(logging.WARNING, self.log.handlers[0].level)
56+
57+
def test_VerboseNoneNoLog(self):
58+
nn = MLPR(layers=[L("Linear")], verbose=None)
59+
assert_equals(0, len(self.log.handlers))
60+
61+
3762
class TestTrainingOutput(unittest.TestCase):
3863

3964
def setUp(self):

0 commit comments

Comments
 (0)