Skip to content

Commit

Permalink
Standardize optimizer definitions.
Browse files Browse the repository at this point in the history
Copy Keras optimizer definitions.
All optimizers keep track of an iteration count and compute learning rate as:
`lr = initial_lr * 1 / (1 + decay * step)`.
  • Loading branch information
dan-zheng committed Jan 22, 2019
1 parent 3084fa6 commit e1c4921
Showing 1 changed file with 6 additions and 0 deletions.
6 changes: 6 additions & 0 deletions Sources/DeepLearning/Optimizer.swift
Original file line number Diff line number Diff line change
Expand Up @@ -97,10 +97,13 @@ public class RMSProp<Model: Layer, Scalar: BinaryFloatingPoint & TensorFlowScala
self.decay = decay
}

private var step: Scalar = 0
private var alpha = Model.AllDifferentiableVariables.zero

public func update(_ model: inout Model.AllDifferentiableVariables,
along gradient: Model.CotangentVector) {
step += 1
let learningRate = self.learningRate * 1 / (1 + decay * step)
for kp in model.recursivelyAllWritableKeyPaths(to: Tensor<Scalar>.self) {
alpha[keyPath: kp] =
rho * alpha[keyPath: kp] + (1 - rho) * pow(gradient[keyPath: kp], 2)
Expand Down Expand Up @@ -134,10 +137,13 @@ public class SGD<Model: Layer, Scalar: BinaryFloatingPoint & TensorFlowScalar>:
self.nesterov = nesterov
}

private var step: Scalar = 0
private var velocity = Model.AllDifferentiableVariables.zero

public func update(_ model: inout Model.AllDifferentiableVariables,
along gradients: Model.CotangentVector) {
step += 1
let learningRate = self.learningRate * 1 / (1 + decay * step)
for kp in model.recursivelyAllWritableKeyPaths(to: Tensor<Scalar>.self) {
velocity[keyPath: kp] =
momentum * velocity[keyPath: kp] - learningRate * gradients[keyPath: kp]
Expand Down

0 comments on commit e1c4921

Please sign in to comment.