-
-
Notifications
You must be signed in to change notification settings - Fork 32
Gradient descent optimizer should find optimal coefficient values and regularize it
g = (8, 8, 8)
c1 = (1 - 2 * eta * λ) * c1’ - η * g1 = 0 - 2 * 8 = -16
c2 = (1 - 2 * eta * λ) * c2’ - η * g2 = 0 - 2 * 8 = -16
c3 = (1 - 2 * eta * λ) * c3’ - η * g3 = 0 - 2 * 8 = -16
c = (-16, -16, -16)
g = (8, 8, 8)
c1 = (1 - 2 * η * λ) * c1’ - η * g1 = (1 - 2 * 2 * 10) * -16 - 2 * 8 = -39 * -16 - 16 = 608
c2 = (1 - 2 * η * λ) * c2’ - η * g2 = (1 - 2 * 2 * 10) * -16 - 2 * 8 = -39 * -16 - 16 = 608
c3 = (1 - 2 * η * λ) * c3’ - η * g3 = (1 - 2 * 2 * 10) * -16 - 2 * 8 = -39 * -16 - 16 = 608
c = (608.0, 608.0, 608.0)
g = (8, 8, 8)
c1 = (1 - 2 * η * λ) * c1’ - η * g1 = (1 - 2 * 2 * 10) * 608 - 2 * 8 = -39 * 608 - 16 = -23728
c2 = (1 - 2 * η * λ) * c2’ - η * g1 = (1 - 2 * 2 * 10) * 608 - 2 * 8 = -39 * 608 - 16 = -23728
c3 = (1 - 2 * η * λ) * c3’ - η * g3 = (1 - 2 * 2 * 10) * 608 - 2 * 8 = -39 * 608 - 16 = -23728
c = (-23728.0, -23728.0, -23728.0)