You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
Support Interop between DiffSharp and MathNet and FSharp Stats For optimization Problems
Issue #382 | Created by @AndyAbok | 2021-09-16 15:49:47 UTC |
if diffsharp could support interop between mathnet mostly for the the optimization module so as to take advantage of automatic differentiation capability for optimization problems.
#r "nuget: DiffSharp-lite,1.0.0-preview-781810429"
#r "nuget: MathNet.Numerics.FSharp"
open System
open MathNet.Numerics
open MathNet.Numerics.Optimization
open MathNet.Numerics.LinearAlgebra
open DiffSharp
let rosenbrockFunction (xs: Vector<float>) =
let x, y = xs.[0], xs.[1]
pown (1.0 - x) 2 + 100.0 * pown (y - pown x 2) 2
let gradient(x:Vector<float>) =
//redefine this funcion to consume tensor
let rosenbrockGrad (x:Tensor) =
let x, y = x.[0], x.[1]
pown (1.0 - x) 2 + 100.0 * pown (y - pown x 2) 2
let toTensor = dsharp.tensor(x)
let diffObjectiveFun = dsharp.grad rosenbrockGrad toTensor
//given it outputs a Tensor we can convert it to a vector given that's the input mathnet would need
diffObjectiveFun |> vector
let LBFGS f grad initialVal =
let objectiveFunction =
new System.Func<Vector<float>,float>(f)
let gradientFunction =
new System.Func<Vector<float>,Vector<float>>(grad)
let obj = ObjectiveFunction.Gradient(objectiveFunction,gradientFunction)
let solver = LimitedMemoryBfgsMinimizer(1e-5, 1e-5, 1e-5, 5, 1000)
let result = solver.FindMinimum(obj,initialVal)
result.MinimizingPoint
|> Vector.toSeq
let initialVal = [|0.0;0.0|] |> vector
let res = LBFGS rosenbrockFunction gradient initialVal'''
I think this would apply to Fsharp stats as well, as it also does use vectors in such kind of problems.
Support Interop between DiffSharp and MathNet and FSharp Stats For optimization Problems
Issue #382 | Created by @AndyAbok | 2021-09-16 15:49:47 UTC |
if diffsharp could support interop between mathnet mostly for the the optimization module so as to take advantage of automatic differentiation capability for optimization problems.
I think this would apply to Fsharp stats as well, as it also does use vectors in such kind of problems.
Comment by @gbaydin | 2021-09-16 19:53:49 UTC
Hi @AndyAbok thanks for sharing this idea with some example code. Really helpful to understand how this type of scenario can work.
The text was updated successfully, but these errors were encountered: