File: index.html

package info (click to toggle)
keras 2.3.1%2Bdfsg-3
  • links: PTS, VCS
  • area: main
  • in suites: bullseye
  • size: 9,288 kB
  • sloc: python: 48,266; javascript: 1,794; xml: 297; makefile: 36; sh: 30
file content (770 lines) | stat: -rw-r--r-- 38,830 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
<!DOCTYPE html>
<!--[if IE 8]><html class="no-js lt-ie9" lang="en" > <![endif]-->
<!--[if gt IE 8]><!--> <html class="no-js" lang="en" > <!--<![endif]-->
<head>
  <meta charset="utf-8">
  <meta http-equiv="X-UA-Compatible" content="IE=edge">
  <meta name="viewport" content="width=device-width, initial-scale=1.0">
  
  
  <link rel="canonical" href="http://keras.io/callbacks/">
  <link rel="shortcut icon" href="../img/favicon.ico">
  <title>Callbacks - Keras Documentation</title>
  <link href='https://fonts.googleapis.com/css?family=Lato:400,700|Source+Sans+Pro:400,700|Inconsolata:400,700' rel='stylesheet' type='text/css'>

  <link rel="stylesheet" href="../css/theme.css" type="text/css" />
  <link rel="stylesheet" href="../css/theme_extra.css" type="text/css" />
  <link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/9.12.0/styles/github.min.css">
  
  <script>
    // Current page data
    var mkdocs_page_name = "Callbacks";
    var mkdocs_page_input_path = "callbacks.md";
    var mkdocs_page_url = "/callbacks/";
  </script>
  
  <script src="../js/jquery-2.1.1.min.js" defer></script>
  <script src="../js/modernizr-2.8.3.min.js" defer></script>
  <script src="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/9.12.0/highlight.min.js"></script>
  <script>hljs.initHighlightingOnLoad();</script> 
  
  <script>
      (function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){
      (i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o),
      m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m)
      })(window,document,'script','https://www.google-analytics.com/analytics.js','ga');

      ga('create', 'UA-61785484-1', 'keras.io');
      ga('send', 'pageview');
  </script>
  
</head>

<body class="wy-body-for-nav" role="document">

  <div class="wy-grid-for-nav">

    
    <nav data-toggle="wy-nav-shift" class="wy-nav-side stickynav">
    <div class="wy-side-scroll">
      <a href="">
        <div class="keras-logo">
          <img src="/img/keras-logo-small.jpg" class="keras-logo-img">
          Keras Documentation
        </div>
      </a>

      <div class="wy-side-nav-search">
        <div role="search">
  <form id ="rtd-search-form" class="wy-form" action="../search.html" method="get">
    <input type="text" name="q" placeholder="Search docs" title="Type search term here" />
  </form>
</div>
      </div>

      <div class="wy-menu wy-menu-vertical" data-spy="affix" role="navigation" aria-label="main navigation">
                <ul>
                    <li class="toctree-l1"><a class="reference internal" href="..">Home</a>
                    </li>
                </ul>
                <ul>
                    <li class="toctree-l1"><a class="reference internal" href="../why-use-keras/">Why use Keras</a>
                    </li>
                </ul>
                <p class="caption"><span class="caption-text">Getting started</span></p>
                <ul>
                    <li class="toctree-l1"><a class="reference internal" href="../getting-started/sequential-model-guide/">Guide to the Sequential model</a>
                    </li>
                    <li class="toctree-l1"><a class="reference internal" href="../getting-started/functional-api-guide/">Guide to the Functional API</a>
                    </li>
                    <li class="toctree-l1"><a class="reference internal" href="../getting-started/faq/">FAQ</a>
                    </li>
                </ul>
                <p class="caption"><span class="caption-text">Models</span></p>
                <ul>
                    <li class="toctree-l1"><a class="reference internal" href="../models/about-keras-models/">About Keras models</a>
                    </li>
                    <li class="toctree-l1"><a class="reference internal" href="../models/sequential/">Sequential</a>
                    </li>
                    <li class="toctree-l1"><a class="reference internal" href="../models/model/">Model (functional API)</a>
                    </li>
                </ul>
                <p class="caption"><span class="caption-text">Layers</span></p>
                <ul>
                    <li class="toctree-l1"><a class="reference internal" href="../layers/about-keras-layers/">About Keras layers</a>
                    </li>
                    <li class="toctree-l1"><a class="reference internal" href="../layers/core/">Core Layers</a>
                    </li>
                    <li class="toctree-l1"><a class="reference internal" href="../layers/convolutional/">Convolutional Layers</a>
                    </li>
                    <li class="toctree-l1"><a class="reference internal" href="../layers/pooling/">Pooling Layers</a>
                    </li>
                    <li class="toctree-l1"><a class="reference internal" href="../layers/local/">Locally-connected Layers</a>
                    </li>
                    <li class="toctree-l1"><a class="reference internal" href="../layers/recurrent/">Recurrent Layers</a>
                    </li>
                    <li class="toctree-l1"><a class="reference internal" href="../layers/embeddings/">Embedding Layers</a>
                    </li>
                    <li class="toctree-l1"><a class="reference internal" href="../layers/merge/">Merge Layers</a>
                    </li>
                    <li class="toctree-l1"><a class="reference internal" href="../layers/advanced-activations/">Advanced Activations Layers</a>
                    </li>
                    <li class="toctree-l1"><a class="reference internal" href="../layers/normalization/">Normalization Layers</a>
                    </li>
                    <li class="toctree-l1"><a class="reference internal" href="../layers/noise/">Noise layers</a>
                    </li>
                    <li class="toctree-l1"><a class="reference internal" href="../layers/wrappers/">Layer wrappers</a>
                    </li>
                    <li class="toctree-l1"><a class="reference internal" href="../layers/writing-your-own-keras-layers/">Writing your own Keras layers</a>
                    </li>
                </ul>
                <p class="caption"><span class="caption-text">Preprocessing</span></p>
                <ul>
                    <li class="toctree-l1"><a class="reference internal" href="../preprocessing/sequence/">Sequence Preprocessing</a>
                    </li>
                    <li class="toctree-l1"><a class="reference internal" href="../preprocessing/text/">Text Preprocessing</a>
                    </li>
                    <li class="toctree-l1"><a class="reference internal" href="../preprocessing/image/">Image Preprocessing</a>
                    </li>
                </ul>
                <ul>
                    <li class="toctree-l1"><a class="reference internal" href="../losses/">Losses</a>
                    </li>
                </ul>
                <ul>
                    <li class="toctree-l1"><a class="reference internal" href="../metrics/">Metrics</a>
                    </li>
                </ul>
                <ul>
                    <li class="toctree-l1"><a class="reference internal" href="../optimizers/">Optimizers</a>
                    </li>
                </ul>
                <ul>
                    <li class="toctree-l1"><a class="reference internal" href="../activations/">Activations</a>
                    </li>
                </ul>
                <ul class="current">
                    <li class="toctree-l1 current"><a class="reference internal current" href="./">Callbacks</a>
    <ul class="current">
    <li class="toctree-l2"><a class="reference internal" href="#callback">Callback</a>
    </li>
    <li class="toctree-l2"><a class="reference internal" href="#baselogger">BaseLogger</a>
    </li>
    <li class="toctree-l2"><a class="reference internal" href="#terminateonnan">TerminateOnNaN</a>
    </li>
    <li class="toctree-l2"><a class="reference internal" href="#progbarlogger">ProgbarLogger</a>
    </li>
    <li class="toctree-l2"><a class="reference internal" href="#history">History</a>
    </li>
    <li class="toctree-l2"><a class="reference internal" href="#modelcheckpoint">ModelCheckpoint</a>
    </li>
    <li class="toctree-l2"><a class="reference internal" href="#earlystopping">EarlyStopping</a>
    </li>
    <li class="toctree-l2"><a class="reference internal" href="#remotemonitor">RemoteMonitor</a>
    </li>
    <li class="toctree-l2"><a class="reference internal" href="#learningratescheduler">LearningRateScheduler</a>
    </li>
    <li class="toctree-l2"><a class="reference internal" href="#reducelronplateau">ReduceLROnPlateau</a>
    </li>
    <li class="toctree-l2"><a class="reference internal" href="#csvlogger">CSVLogger</a>
    </li>
    <li class="toctree-l2"><a class="reference internal" href="#lambdacallback">LambdaCallback</a>
    </li>
    <li class="toctree-l2"><a class="reference internal" href="#tensorboard">TensorBoard</a>
    </li>
    </ul>
                    </li>
                </ul>
                <ul>
                    <li class="toctree-l1"><a class="reference internal" href="../datasets/">Datasets</a>
                    </li>
                </ul>
                <ul>
                    <li class="toctree-l1"><a class="reference internal" href="../applications/">Applications</a>
                    </li>
                </ul>
                <ul>
                    <li class="toctree-l1"><a class="reference internal" href="../backend/">Backend</a>
                    </li>
                </ul>
                <ul>
                    <li class="toctree-l1"><a class="reference internal" href="../initializers/">Initializers</a>
                    </li>
                </ul>
                <ul>
                    <li class="toctree-l1"><a class="reference internal" href="../regularizers/">Regularizers</a>
                    </li>
                </ul>
                <ul>
                    <li class="toctree-l1"><a class="reference internal" href="../constraints/">Constraints</a>
                    </li>
                </ul>
                <ul>
                    <li class="toctree-l1"><a class="reference internal" href="../visualization/">Visualization</a>
                    </li>
                </ul>
                <ul>
                    <li class="toctree-l1"><a class="reference internal" href="../scikit-learn-api/">Scikit-learn API</a>
                    </li>
                </ul>
                <ul>
                    <li class="toctree-l1"><a class="reference internal" href="../utils/">Utils</a>
                    </li>
                </ul>
                <ul>
                    <li class="toctree-l1"><a class="reference internal" href="../contributing/">Contributing</a>
                    </li>
                </ul>
                <p class="caption"><span class="caption-text">Examples</span></p>
                <ul>
                    <li class="toctree-l1"><a class="reference internal" href="../examples/addition_rnn/">Addition RNN</a>
                    </li>
                    <li class="toctree-l1"><a class="reference internal" href="../examples/antirectifier/">Custom layer - antirectifier</a>
                    </li>
                    <li class="toctree-l1"><a class="reference internal" href="../examples/babi_rnn/">Baby RNN</a>
                    </li>
                    <li class="toctree-l1"><a class="reference internal" href="../examples/babi_memnn/">Baby MemNN</a>
                    </li>
                    <li class="toctree-l1"><a class="reference internal" href="../examples/cifar10_cnn/">CIFAR-10 CNN</a>
                    </li>
                    <li class="toctree-l1"><a class="reference internal" href="../examples/cifar10_resnet/">CIFAR-10 ResNet</a>
                    </li>
                    <li class="toctree-l1"><a class="reference internal" href="../examples/conv_filter_visualization/">Convolution filter visualization</a>
                    </li>
                    <li class="toctree-l1"><a class="reference internal" href="../examples/conv_lstm/">Convolutional LSTM</a>
                    </li>
                    <li class="toctree-l1"><a class="reference internal" href="../examples/deep_dream/">Deep Dream</a>
                    </li>
                    <li class="toctree-l1"><a class="reference internal" href="../examples/image_ocr/">Image OCR</a>
                    </li>
                    <li class="toctree-l1"><a class="reference internal" href="../examples/imdb_bidirectional_lstm/">Bidirectional LSTM</a>
                    </li>
                    <li class="toctree-l1"><a class="reference internal" href="../examples/imdb_cnn/">1D CNN for text classification</a>
                    </li>
                    <li class="toctree-l1"><a class="reference internal" href="../examples/imdb_cnn_lstm/">Sentiment classification CNN-LSTM</a>
                    </li>
                    <li class="toctree-l1"><a class="reference internal" href="../examples/imdb_fasttext/">Fasttext for text classification</a>
                    </li>
                    <li class="toctree-l1"><a class="reference internal" href="../examples/imdb_lstm/">Sentiment classification LSTM</a>
                    </li>
                    <li class="toctree-l1"><a class="reference internal" href="../examples/lstm_seq2seq/">Sequence to sequence - training</a>
                    </li>
                    <li class="toctree-l1"><a class="reference internal" href="../examples/lstm_seq2seq_restore/">Sequence to sequence - prediction</a>
                    </li>
                    <li class="toctree-l1"><a class="reference internal" href="../examples/lstm_stateful/">Stateful LSTM</a>
                    </li>
                    <li class="toctree-l1"><a class="reference internal" href="../examples/lstm_text_generation/">LSTM for text generation</a>
                    </li>
                    <li class="toctree-l1"><a class="reference internal" href="../examples/mnist_acgan/">Auxiliary Classifier GAN</a>
                    </li>
                </ul>
      </div>
    </div>
    </nav>

    <section data-toggle="wy-nav-shift" class="wy-nav-content-wrap">

      
      <nav class="wy-nav-top" role="navigation" aria-label="top navigation">
        <i data-toggle="wy-nav-top" class="fa fa-bars"></i>
        <a href="..">Keras Documentation</a>
      </nav>

      
      <div class="wy-nav-content">
        <div class="rst-content">
          <div role="navigation" aria-label="breadcrumbs navigation">
  <ul class="wy-breadcrumbs">
    <li><a href="..">Docs</a> &raquo;</li>
    
      
    
    <li>Callbacks</li>
    <li class="wy-breadcrumbs-aside">
      
        <a href="https://github.com/keras-team/keras/tree/master/docs"
          class="icon icon-github"> Edit on GitHub</a>
      
    </li>
  </ul>
  
  <hr/>
</div>
          <div role="main">
            <div class="section">
              
                <h2 id="usage-of-callbacks">Usage of callbacks</h2>
<p>A callback is a set of functions to be applied at given stages of the training procedure. You can use callbacks to get a view on internal states and statistics of the model during training. You can pass a list of callbacks (as the keyword argument <code>callbacks</code>) to the <code>.fit()</code> method of the <code>Sequential</code> or <code>Model</code> classes. The relevant methods of the callbacks will then be called at each stage of the training. </p>
<hr />
<p><span style="float:right;"><a href="https://github.com/keras-team/keras/blob/master/keras/callbacks/callbacks.py#L275">[source]</a></span></p>
<h3 id="callback">Callback</h3>
<pre><code class="python">keras.callbacks.callbacks.Callback()
</code></pre>

<p>Abstract base class used to build new callbacks.</p>
<p><strong>Properties</strong></p>
<ul>
<li><strong>params</strong>: dict. Training parameters
    (eg. verbosity, batch size, number of epochs...).</li>
<li><strong>model</strong>: instance of <code>keras.models.Model</code>.
    Reference of the model being trained.</li>
</ul>
<p>The <code>logs</code> dictionary that callback methods
take as argument will contain keys for quantities relevant to
the current batch or epoch.</p>
<p>Currently, the <code>.fit()</code> method of the <code>Sequential</code> model class
will include the following quantities in the <code>logs</code> that
it passes to its callbacks:</p>
<p>on_epoch_end: logs include <code>acc</code> and <code>loss</code>, and
optionally include <code>val_loss</code>
(if validation is enabled in <code>fit</code>), and <code>val_acc</code>
(if validation and accuracy monitoring are enabled).
on_batch_begin: logs include <code>size</code>,
the number of samples in the current batch.
on_batch_end: logs include <code>loss</code>, and optionally <code>acc</code>
(if accuracy monitoring is enabled).</p>
<hr />
<p><span style="float:right;"><a href="https://github.com/keras-team/keras/blob/master/keras/callbacks/callbacks.py#L477">[source]</a></span></p>
<h3 id="baselogger">BaseLogger</h3>
<pre><code class="python">keras.callbacks.callbacks.BaseLogger(stateful_metrics=None)
</code></pre>

<p>Callback that accumulates epoch averages of metrics.</p>
<p>This callback is automatically applied to every Keras model.</p>
<p><strong>Arguments</strong></p>
<ul>
<li><strong>stateful_metrics</strong>: Iterable of string names of metrics that
    should <em>not</em> be averaged over an epoch.
    Metrics in this list will be logged as-is in <code>on_epoch_end</code>.
    All others will be averaged in <code>on_epoch_end</code>.</li>
</ul>
<hr />
<p><span style="float:right;"><a href="https://github.com/keras-team/keras/blob/master/keras/callbacks/callbacks.py#L524">[source]</a></span></p>
<h3 id="terminateonnan">TerminateOnNaN</h3>
<pre><code class="python">keras.callbacks.callbacks.TerminateOnNaN()
</code></pre>

<p>Callback that terminates training when a NaN loss is encountered.</p>
<hr />
<p><span style="float:right;"><a href="https://github.com/keras-team/keras/blob/master/keras/callbacks/callbacks.py#L537">[source]</a></span></p>
<h3 id="progbarlogger">ProgbarLogger</h3>
<pre><code class="python">keras.callbacks.callbacks.ProgbarLogger(count_mode='samples', stateful_metrics=None)
</code></pre>

<p>Callback that prints metrics to stdout.</p>
<p><strong>Arguments</strong></p>
<ul>
<li><strong>count_mode</strong>: One of "steps" or "samples".
    Whether the progress bar should
    count samples seen or steps (batches) seen.</li>
<li><strong>stateful_metrics</strong>: Iterable of string names of metrics that
    should <em>not</em> be averaged over an epoch.
    Metrics in this list will be logged as-is.
    All others will be averaged over time (e.g. loss, etc).</li>
</ul>
<p><strong>Raises</strong></p>
<ul>
<li><strong>ValueError</strong>: In case of invalid <code>count_mode</code>.</li>
</ul>
<hr />
<p><span style="float:right;"><a href="https://github.com/keras-team/keras/blob/master/keras/callbacks/callbacks.py#L614">[source]</a></span></p>
<h3 id="history">History</h3>
<pre><code class="python">keras.callbacks.callbacks.History()
</code></pre>

<p>Callback that records events into a <code>History</code> object.</p>
<p>This callback is automatically applied to
every Keras model. The <code>History</code> object
gets returned by the <code>fit</code> method of models.</p>
<hr />
<p><span style="float:right;"><a href="https://github.com/keras-team/keras/blob/master/keras/callbacks/callbacks.py#L633">[source]</a></span></p>
<h3 id="modelcheckpoint">ModelCheckpoint</h3>
<pre><code class="python">keras.callbacks.callbacks.ModelCheckpoint(filepath, monitor='val_loss', verbose=0, save_best_only=False, save_weights_only=False, mode='auto', period=1)
</code></pre>

<p>Save the model after every epoch.</p>
<p><code>filepath</code> can contain named formatting options,
which will be filled with the values of <code>epoch</code> and
keys in <code>logs</code> (passed in <code>on_epoch_end</code>).</p>
<p>For example: if <code>filepath</code> is <code>weights.{epoch:02d}-{val_loss:.2f}.hdf5</code>,
then the model checkpoints will be saved with the epoch number and
the validation loss in the filename.</p>
<p><strong>Arguments</strong></p>
<ul>
<li><strong>filepath</strong>: string, path to save the model file.</li>
<li><strong>monitor</strong>: quantity to monitor.</li>
<li><strong>verbose</strong>: verbosity mode, 0 or 1.</li>
<li><strong>save_best_only</strong>: if <code>save_best_only=True</code>,
    the latest best model according to
    the quantity monitored will not be overwritten.</li>
<li><strong>save_weights_only</strong>: if True, then only the model's weights will be
    saved (<code>model.save_weights(filepath)</code>), else the full model
    is saved (<code>model.save(filepath)</code>).</li>
<li><strong>mode</strong>: one of {auto, min, max}.
    If <code>save_best_only=True</code>, the decision
    to overwrite the current save file is made
    based on either the maximization or the
    minimization of the monitored quantity. For <code>val_acc</code>,
    this should be <code>max</code>, for <code>val_loss</code> this should
    be <code>min</code>, etc. In <code>auto</code> mode, the direction is
    automatically inferred from the name of the monitored quantity.</li>
<li><strong>period</strong>: Interval (number of epochs) between checkpoints.</li>
</ul>
<hr />
<p><span style="float:right;"><a href="https://github.com/keras-team/keras/blob/master/keras/callbacks/callbacks.py#L733">[source]</a></span></p>
<h3 id="earlystopping">EarlyStopping</h3>
<pre><code class="python">keras.callbacks.callbacks.EarlyStopping(monitor='val_loss', min_delta=0, patience=0, verbose=0, mode='auto', baseline=None, restore_best_weights=False)
</code></pre>

<p>Stop training when a monitored quantity has stopped improving.</p>
<p><strong>Arguments</strong></p>
<ul>
<li><strong>monitor</strong>: quantity to be monitored.</li>
<li><strong>min_delta</strong>: minimum change in the monitored quantity
    to qualify as an improvement, i.e. an absolute
    change of less than min_delta, will count as no
    improvement.</li>
<li><strong>patience</strong>: number of epochs that produced the monitored
    quantity with no improvement after which training will
    be stopped.
    Validation quantities may not be produced for every
    epoch, if the validation frequency
    (<code>model.fit(validation_freq=5)</code>) is greater than one.</li>
<li><strong>verbose</strong>: verbosity mode.</li>
<li><strong>mode</strong>: one of {auto, min, max}. In <code>min</code> mode,
    training will stop when the quantity
    monitored has stopped decreasing; in <code>max</code>
    mode it will stop when the quantity
    monitored has stopped increasing; in <code>auto</code>
    mode, the direction is automatically inferred
    from the name of the monitored quantity.</li>
<li><strong>baseline</strong>: Baseline value for the monitored quantity to reach.
    Training will stop if the model doesn't show improvement
    over the baseline.</li>
<li><strong>restore_best_weights</strong>: whether to restore model weights from
    the epoch with the best value of the monitored quantity.
    If False, the model weights obtained at the last step of
    training are used.</li>
</ul>
<hr />
<p><span style="float:right;"><a href="https://github.com/keras-team/keras/blob/master/keras/callbacks/callbacks.py#L851">[source]</a></span></p>
<h3 id="remotemonitor">RemoteMonitor</h3>
<pre><code class="python">keras.callbacks.callbacks.RemoteMonitor(root='http://localhost:9000', path='/publish/epoch/end/', field='data', headers=None, send_as_json=False)
</code></pre>

<p>Callback used to stream events to a server.</p>
<p>Requires the <code>requests</code> library.
Events are sent to <code>root + '/publish/epoch/end/'</code> by default. Calls are
HTTP POST, with a <code>data</code> argument which is a
JSON-encoded dictionary of event data.
If send_as_json is set to True, the content type of the request will be
application/json. Otherwise the serialized JSON will be send within a form</p>
<p><strong>Arguments</strong></p>
<ul>
<li><strong>root</strong>: String; root url of the target server.</li>
<li><strong>path</strong>: String; path relative to <code>root</code> to which the events will be sent.</li>
<li><strong>field</strong>: String; JSON field under which the data will be stored.
    The field is used only if the payload is sent within a form
    (i.e. send_as_json is set to False).</li>
<li><strong>headers</strong>: Dictionary; optional custom HTTP headers.</li>
<li><strong>send_as_json</strong>: Boolean; whether the request should be send as
    application/json.</li>
</ul>
<hr />
<p><span style="float:right;"><a href="https://github.com/keras-team/keras/blob/master/keras/callbacks/callbacks.py#L910">[source]</a></span></p>
<h3 id="learningratescheduler">LearningRateScheduler</h3>
<pre><code class="python">keras.callbacks.callbacks.LearningRateScheduler(schedule, verbose=0)
</code></pre>

<p>Learning rate scheduler.</p>
<p><strong>Arguments</strong></p>
<ul>
<li><strong>schedule</strong>: a function that takes an epoch index as input
    (integer, indexed from 0) and current learning rate
    and returns a new learning rate as output (float).</li>
<li><strong>verbose</strong>: int. 0: quiet, 1: update messages.</li>
</ul>
<hr />
<p><span style="float:right;"><a href="https://github.com/keras-team/keras/blob/master/keras/callbacks/callbacks.py#L946">[source]</a></span></p>
<h3 id="reducelronplateau">ReduceLROnPlateau</h3>
<pre><code class="python">keras.callbacks.callbacks.ReduceLROnPlateau(monitor='val_loss', factor=0.1, patience=10, verbose=0, mode='auto', min_delta=0.0001, cooldown=0, min_lr=0)
</code></pre>

<p>Reduce learning rate when a metric has stopped improving.</p>
<p>Models often benefit from reducing the learning rate by a factor
of 2-10 once learning stagnates. This callback monitors a
quantity and if no improvement is seen for a 'patience' number
of epochs, the learning rate is reduced.</p>
<p><strong>Example</strong></p>
<pre><code class="python">reduce_lr = ReduceLROnPlateau(monitor='val_loss', factor=0.2,
                              patience=5, min_lr=0.001)
model.fit(X_train, Y_train, callbacks=[reduce_lr])
</code></pre>

<p><strong>Arguments</strong></p>
<ul>
<li><strong>monitor</strong>: quantity to be monitored.</li>
<li><strong>factor</strong>: factor by which the learning rate will
    be reduced. new_lr = lr * factor</li>
<li><strong>patience</strong>: number of epochs that produced the monitored
    quantity with no improvement after which training will
    be stopped.
    Validation quantities may not be produced for every
    epoch, if the validation frequency
    (<code>model.fit(validation_freq=5)</code>) is greater than one.</li>
<li><strong>verbose</strong>: int. 0: quiet, 1: update messages.</li>
<li><strong>mode</strong>: one of {auto, min, max}. In <code>min</code> mode,
    lr will be reduced when the quantity
    monitored has stopped decreasing; in <code>max</code>
    mode it will be reduced when the quantity
    monitored has stopped increasing; in <code>auto</code>
    mode, the direction is automatically inferred
    from the name of the monitored quantity.</li>
<li><strong>min_delta</strong>: threshold for measuring the new optimum,
    to only focus on significant changes.</li>
<li><strong>cooldown</strong>: number of epochs to wait before resuming
    normal operation after lr has been reduced.</li>
<li><strong>min_lr</strong>: lower bound on the learning rate.</li>
</ul>
<hr />
<p><span style="float:right;"><a href="https://github.com/keras-team/keras/blob/master/keras/callbacks/callbacks.py#L1071">[source]</a></span></p>
<h3 id="csvlogger">CSVLogger</h3>
<pre><code class="python">keras.callbacks.callbacks.CSVLogger(filename, separator=',', append=False)
</code></pre>

<p>Callback that streams epoch results to a csv file.</p>
<p>Supports all values that can be represented as a string,
including 1D iterables such as np.ndarray.</p>
<p><strong>Example</strong></p>
<pre><code class="python">csv_logger = CSVLogger('training.log')
model.fit(X_train, Y_train, callbacks=[csv_logger])
</code></pre>

<p><strong>Arguments</strong></p>
<ul>
<li><strong>filename</strong>: filename of the csv file, e.g. 'run/log.csv'.</li>
<li><strong>separator</strong>: string used to separate elements in the csv file.</li>
<li><strong>append</strong>: True: append if file exists (useful for continuing
    training). False: overwrite existing file,</li>
</ul>
<hr />
<p><span style="float:right;"><a href="https://github.com/keras-team/keras/blob/master/keras/callbacks/callbacks.py#L1159">[source]</a></span></p>
<h3 id="lambdacallback">LambdaCallback</h3>
<pre><code class="python">keras.callbacks.callbacks.LambdaCallback(on_epoch_begin=None, on_epoch_end=None, on_batch_begin=None, on_batch_end=None, on_train_begin=None, on_train_end=None)
</code></pre>

<p>Callback for creating simple, custom callbacks on-the-fly.</p>
<p>This callback is constructed with anonymous functions that will be called
at the appropriate time. Note that the callbacks expects positional
arguments, as:</p>
<ul>
<li><code>on_epoch_begin</code> and <code>on_epoch_end</code> expect two positional arguments:
<code>epoch</code>, <code>logs</code></li>
<li><code>on_batch_begin</code> and <code>on_batch_end</code> expect two positional arguments:
<code>batch</code>, <code>logs</code></li>
<li><code>on_train_begin</code> and <code>on_train_end</code> expect one positional argument:
<code>logs</code></li>
</ul>
<p><strong>Arguments</strong></p>
<ul>
<li><strong>on_epoch_begin</strong>: called at the beginning of every epoch.</li>
<li><strong>on_epoch_end</strong>: called at the end of every epoch.</li>
<li><strong>on_batch_begin</strong>: called at the beginning of every batch.</li>
<li><strong>on_batch_end</strong>: called at the end of every batch.</li>
<li><strong>on_train_begin</strong>: called at the beginning of model training.</li>
<li><strong>on_train_end</strong>: called at the end of model training.</li>
</ul>
<p><strong>Example</strong></p>
<pre><code class="python"># Print the batch number at the beginning of every batch.
batch_print_callback = LambdaCallback(
    on_batch_begin=lambda batch,logs: print(batch))

# Stream the epoch loss to a file in JSON format. The file content
# is not well-formed JSON but rather has a JSON object per line.
import json
json_log = open('loss_log.json', mode='wt', buffering=1)
json_logging_callback = LambdaCallback(
    on_epoch_end=lambda epoch, logs: json_log.write(
        json.dumps({'epoch': epoch, 'loss': logs['loss']}) + '\n'),
    on_train_end=lambda logs: json_log.close()
)

# Terminate some processes after having finished model training.
processes = ...
cleanup_callback = LambdaCallback(
    on_train_end=lambda logs: [
        p.terminate() for p in processes if p.is_alive()])

model.fit(...,
          callbacks=[batch_print_callback,
                     json_logging_callback,
                     cleanup_callback])
</code></pre>

<hr />
<p><span style="float:right;"><a href="https://github.com/keras-team/keras/blob/master/keras/callbacks/tensorboard_v1.py#L20">[source]</a></span></p>
<h3 id="tensorboard">TensorBoard</h3>
<pre><code class="python">keras.callbacks.tensorboard_v1.TensorBoard(log_dir='./logs', histogram_freq=0, batch_size=32, write_graph=True, write_grads=False, write_images=False, embeddings_freq=0, embeddings_layer_names=None, embeddings_metadata=None, embeddings_data=None, update_freq='epoch')
</code></pre>

<p>TensorBoard basic visualizations.</p>
<p><a href="https://www.tensorflow.org/guide/summaries_and_tensorboard">TensorBoard</a>
is a visualization tool provided with TensorFlow.</p>
<p>This callback writes a log for TensorBoard, which allows
you to visualize dynamic graphs of your training and test
metrics, as well as activation histograms for the different
layers in your model.</p>
<p>If you have installed TensorFlow with pip, you should be able
to launch TensorBoard from the command line:</p>
<pre><code class="sh">tensorboard --logdir=/full_path_to_your_logs
</code></pre>

<p>When using a backend other than TensorFlow, TensorBoard will still work
(if you have TensorFlow installed), but the only feature available will
be the display of the losses and metrics plots.</p>
<p><strong>Arguments</strong></p>
<ul>
<li><strong>log_dir</strong>: the path of the directory where to save the log
    files to be parsed by TensorBoard.</li>
<li><strong>histogram_freq</strong>: frequency (in epochs) at which to compute activation
    and weight histograms for the layers of the model. If set to 0,
    histograms won't be computed. Validation data (or split) must be
    specified for histogram visualizations.</li>
<li><strong>batch_size</strong>: size of batch of inputs to feed to the network
    for histograms computation.</li>
<li><strong>write_graph</strong>: whether to visualize the graph in TensorBoard.
    The log file can become quite large when
    write_graph is set to True.</li>
<li><strong>write_grads</strong>: whether to visualize gradient histograms in TensorBoard.
    <code>histogram_freq</code> must be greater than 0.</li>
<li><strong>write_images</strong>: whether to write model weights to visualize as
    image in TensorBoard.</li>
<li><strong>embeddings_freq</strong>: frequency (in epochs) at which selected embedding
    layers will be saved. If set to 0, embeddings won't be computed.
    Data to be visualized in TensorBoard's Embedding tab must be passed
    as <code>embeddings_data</code>.</li>
<li><strong>embeddings_layer_names</strong>: a list of names of layers to keep eye on. If
    None or empty list all the embedding layer will be watched.</li>
<li><strong>embeddings_metadata</strong>: a dictionary which maps layer name to a file name
    in which metadata for this embedding layer is saved. See the
    <a href="https://www.tensorflow.org/guide/embedding#metadata">details</a>
    about metadata files format. In case if the same metadata file is
    used for all embedding layers, string can be passed.</li>
<li><strong>embeddings_data</strong>: data to be embedded at layers specified in
    <code>embeddings_layer_names</code>. Numpy array (if the model has a single
    input) or list of Numpy arrays (if the model has multiple inputs).
    Learn <a href="https://www.tensorflow.org/guide/embedding">more about embeddings</a>.</li>
<li><strong>update_freq</strong>: <code>'batch'</code> or <code>'epoch'</code> or integer. When using <code>'batch'</code>, writes
    the losses and metrics to TensorBoard after each batch. The same
    applies for <code>'epoch'</code>. If using an integer, let's say <code>10000</code>,
    the callback will write the metrics and losses to TensorBoard every
    10000 samples. Note that writing too frequently to TensorBoard
    can slow down your training.</li>
</ul>
<hr />
<h1 id="create-a-callback">Create a callback</h1>
<p>You can create a custom callback by extending the base class <code>keras.callbacks.Callback</code>. A callback has access to its associated model through the class property <code>self.model</code>.</p>
<p>Here's a simple example saving a list of losses over each batch during training:</p>
<pre><code class="python">class LossHistory(keras.callbacks.Callback):
    def on_train_begin(self, logs={}):
        self.losses = []

    def on_batch_end(self, batch, logs={}):
        self.losses.append(logs.get('loss'))
</code></pre>

<hr />
<h3 id="example-recording-loss-history">Example: recording loss history</h3>
<pre><code class="python">class LossHistory(keras.callbacks.Callback):
    def on_train_begin(self, logs={}):
        self.losses = []

    def on_batch_end(self, batch, logs={}):
        self.losses.append(logs.get('loss'))

model = Sequential()
model.add(Dense(10, input_dim=784, kernel_initializer='uniform'))
model.add(Activation('softmax'))
model.compile(loss='categorical_crossentropy', optimizer='rmsprop')

history = LossHistory()
model.fit(x_train, y_train, batch_size=128, epochs=20, verbose=0, callbacks=[history])

print(history.losses)
# outputs
'''
[0.66047596406559383, 0.3547245744908703, ..., 0.25953155204159617, 0.25901699725311789]
'''
</code></pre>

<hr />
<h3 id="example-model-checkpoints">Example: model checkpoints</h3>
<pre><code class="python">from keras.callbacks import ModelCheckpoint

model = Sequential()
model.add(Dense(10, input_dim=784, kernel_initializer='uniform'))
model.add(Activation('softmax'))
model.compile(loss='categorical_crossentropy', optimizer='rmsprop')

'''
saves the model weights after each epoch if the validation loss decreased
'''
checkpointer = ModelCheckpoint(filepath='/tmp/weights.hdf5', verbose=1, save_best_only=True)
model.fit(x_train, y_train, batch_size=128, epochs=20, verbose=0, validation_data=(X_test, Y_test), callbacks=[checkpointer])
</code></pre>
              
            </div>
          </div>
          <footer>
  
    <div class="rst-footer-buttons" role="navigation" aria-label="footer navigation">
      
        <a href="../datasets/" class="btn btn-neutral float-right" title="Datasets">Next <span class="icon icon-circle-arrow-right"></span></a>
      
      
        <a href="../activations/" class="btn btn-neutral" title="Activations"><span class="icon icon-circle-arrow-left"></span> Previous</a>
      
    </div>
  

  <hr/>

  <div role="contentinfo">
    <!-- Copyright etc -->
    
  </div>

  Built with <a href="https://www.mkdocs.org/">MkDocs</a> using a <a href="https://github.com/snide/sphinx_rtd_theme">theme</a> provided by <a href="https://readthedocs.org">Read the Docs</a>.
</footer>
      
        </div>
      </div>

    </section>

  </div>

  <div class="rst-versions" role="note" aria-label="versions">
    <span class="rst-current-version" data-toggle="rst-current-version">
      
          <a href="http://github.com/keras-team/keras/" class="fa fa-github" style="float: left; color: #fcfcfc"> GitHub</a>
      
      
        <span><a href="../activations/" style="color: #fcfcfc;">&laquo; Previous</a></span>
      
      
        <span style="margin-left: 15px"><a href="../datasets/" style="color: #fcfcfc">Next &raquo;</a></span>
      
    </span>
</div>
    <script>var base_url = '..';</script>
    <script src="../js/theme.js" defer></script>
      <script src="../search/main.js" defer></script>
    <script type="text/javascript" defer>
        window.onload = function () {
            SphinxRtdTheme.Navigation.enable(true);
        };
    </script>

</body>
</html>