pip install keras-gradient-accumulation
from keras_gradient_accumulation import GradientAccumulation
optimizer = GradientAccumulation('adam', accumulation_steps=8)
from keras_gradient_accumulation import AdamAccumulated
optimizer = AdamAccumulated(accumulation_steps=8)
- Not available for batch normalization
- Not compatible with
OptimizerV2