keras.py 3.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105
  1. from __future__ import absolute_import, division
  2. from .auto import tqdm as tqdm_auto
  3. from copy import copy
  4. try:
  5. import keras
  6. except ImportError as e:
  7. try:
  8. from tensorflow import keras
  9. except ImportError:
  10. raise e
  11. __author__ = {"github.com/": ["casperdcl"]}
  12. __all__ = ['TqdmCallback']
  13. class TqdmCallback(keras.callbacks.Callback):
  14. """`keras` callback for epoch and batch progress"""
  15. @staticmethod
  16. def bar2callback(bar, pop=None, delta=(lambda logs: 1)):
  17. def callback(_, logs=None):
  18. n = delta(logs)
  19. if logs:
  20. if pop:
  21. logs = copy(logs)
  22. [logs.pop(i, 0) for i in pop]
  23. bar.set_postfix(logs, refresh=False)
  24. bar.update(n)
  25. return callback
  26. def __init__(self, epochs=None, data_size=None, batch_size=None, verbose=1,
  27. tqdm_class=tqdm_auto):
  28. """
  29. Parameters
  30. ----------
  31. epochs : int, optional
  32. data_size : int, optional
  33. Number of training pairs.
  34. batch_size : int, optional
  35. Number of training pairs per batch.
  36. verbose : int
  37. 0: epoch, 1: batch (transient), 2: batch. [default: 1].
  38. Will be set to `0` unless both `data_size` and `batch_size`
  39. are given.
  40. tqdm_class : optional
  41. `tqdm` class to use for bars [default: `tqdm.auto.tqdm`].
  42. """
  43. self.tqdm_class = tqdm_class
  44. self.epoch_bar = tqdm_class(total=epochs, unit='epoch')
  45. self.on_epoch_end = self.bar2callback(self.epoch_bar)
  46. if data_size and batch_size:
  47. self.batches = batches = (data_size + batch_size - 1) // batch_size
  48. else:
  49. self.batches = batches = None
  50. self.verbose = verbose
  51. if verbose == 1:
  52. self.batch_bar = tqdm_class(total=batches, unit='batch',
  53. leave=False)
  54. self.on_batch_end = self.bar2callback(
  55. self.batch_bar,
  56. pop=['batch', 'size'],
  57. delta=lambda logs: logs.get('size', 1))
  58. def on_train_begin(self, *_, **__):
  59. params = self.params.get
  60. auto_total = params('epochs', params('nb_epoch', None))
  61. if auto_total is not None:
  62. self.epoch_bar.reset(total=auto_total)
  63. def on_epoch_begin(self, *_, **__):
  64. if self.verbose:
  65. params = self.params.get
  66. total = params('samples', params(
  67. 'nb_sample', params('steps', None))) or self.batches
  68. if self.verbose == 2:
  69. if hasattr(self, 'batch_bar'):
  70. self.batch_bar.close()
  71. self.batch_bar = self.tqdm_class(
  72. total=total, unit='batch', leave=True,
  73. unit_scale=1 / (params('batch_size', 1) or 1))
  74. self.on_batch_end = self.bar2callback(
  75. self.batch_bar,
  76. pop=['batch', 'size'],
  77. delta=lambda logs: logs.get('size', 1))
  78. elif self.verbose == 1:
  79. self.batch_bar.unit_scale = 1 / (params('batch_size', 1) or 1)
  80. self.batch_bar.reset(total=total)
  81. else:
  82. raise KeyError('Unknown verbosity')
  83. def on_train_end(self, *_, **__):
  84. if self.verbose:
  85. self.batch_bar.close()
  86. self.epoch_bar.close()
  87. @staticmethod
  88. def _implements_train_batch_hooks():
  89. return True
  90. @staticmethod
  91. def _implements_test_batch_hooks():
  92. return True
  93. @staticmethod
  94. def _implements_predict_batch_hooks():
  95. return True