@@ -8,16 +8,21 @@ struct AutoZygote <: AbstractADType end
88struct AutoFiniteDiff <: AbstractADType end
99struct AutoModelingToolkit <: AbstractADType end
1010
11- struct OptimizationFunction{F,G,H,HV,K} <: AbstractOptimizationFunction
11+ struct OptimizationFunction{F,G,H,HV,C,CJ,CH, K} <: AbstractOptimizationFunction
1212 f:: F
1313 grad:: G
1414 hess:: H
1515 hv:: HV
1616 adtype:: AbstractADType
17+ cons:: C
18+ cons_j:: CJ
19+ cons_h:: CH
20+ num_cons:: Int
1721 kwargs:: K
1822end
1923
20- function OptimizationFunction (f, x, :: AutoForwardDiff ; grad= nothing ,hess= nothing , p= DiffEqBase. NullParameters (), chunksize = 1 , hv = nothing , kwargs... )
24+ function OptimizationFunction (f, x, :: AutoForwardDiff ; grad= nothing , hess= nothing , cons = nothing , cons_j = nothing , cons_h = nothing ,
25+ num_cons = 0 , p= DiffEqBase. NullParameters (), chunksize = 1 , hv = nothing , kwargs... )
2126 _f = θ -> f (θ,p)[1 ]
2227 if grad === nothing
2328 gradcfg = ForwardDiff. GradientConfig (_f, x, ForwardDiff. Chunk {chunksize} ())
@@ -37,10 +42,41 @@ function OptimizationFunction(f, x, ::AutoForwardDiff; grad=nothing,hess=nothing
3742 end
3843 end
3944
40- return OptimizationFunction {typeof(f),typeof(grad),typeof(hess),typeof(hv),typeof(kwargs)} (f,grad,hess,hv,AutoForwardDiff (),kwargs)
45+ if cons != = nothing && cons_j === nothing
46+ if num_cons == 1
47+ cjconfig = ForwardDiff. JacobianConfig (cons, x, ForwardDiff. Chunk {chunksize} ())
48+ cons_j = (res,θ) -> ForwardDiff. jacobian! (res, cons, θ, cjconfig)
49+ else
50+ cons_j = function (res, θ)
51+ for i in 1 : num_cons
52+ cjconfig = ForwardDiff. JacobianConfig (x -> cons (x)[i], θ, ForwardDiff. Chunk {chunksize} ())
53+ ForwardDiff. jacobian! (res[i], x -> cons (x)[i], θ, cjconfig, Val {false} ())
54+ end
55+ end
56+ end
57+ end
58+
59+ if cons != = nothing && cons_h === nothing
60+ if num_cons == 1
61+ cons_h = function (res, θ)
62+ hess_config_cache = ForwardDiff. HessianConfig (cons, θ, ForwardDiff. Chunk {chunksize} ())
63+ ForwardDiff. hessian! (res, cons, θ, hess_config_cache)
64+ end
65+ else
66+ cons_h = function (res, θ)
67+ for i in 1 : num_cons
68+ hess_config_cache = ForwardDiff. HessianConfig (x -> cons (x)[i], θ, ForwardDiff. Chunk {chunksize} ())
69+ ForwardDiff. hessian! (res[i], x -> cons (x)[i], θ, hess_config_cache, Val {false} ())
70+ end
71+ end
72+ end
73+ end
74+
75+ return OptimizationFunction {typeof(f),typeof(grad),typeof(hess),typeof(hv),typeof(cons),typeof(cons_j),typeof(cons_h),typeof(kwargs)} (f,grad,hess,hv,AutoForwardDiff (),cons,cons_j,cons_h,num_cons,kwargs)
4176end
4277
43- function OptimizationFunction (f, x, :: AutoZygote ; grad= nothing , hess= nothing , p= DiffEqBase. NullParameters (), hv = nothing , kwargs... )
78+ function OptimizationFunction (f, x, :: AutoZygote ; grad= nothing , hess= nothing , cons = nothing , cons_j = nothing , cons_h = nothing ,
79+ num_cons = 0 , p= DiffEqBase. NullParameters (), hv = nothing , kwargs... )
4480 _f = θ -> f (θ,p)[1 ]
4581 if grad === nothing
4682 grad = (res,θ) -> res isa DiffResults. DiffResult ? DiffResults. gradient! (res, Zygote. gradient (_f, θ)[1 ]) : res .= Zygote. gradient (_f, θ)[1 ]
@@ -68,10 +104,11 @@ function OptimizationFunction(f, x, ::AutoZygote; grad=nothing, hess=nothing, p=
68104 H .= getindex .(ForwardDiff. partials .(DiffResults. gradient (res)),1 )
69105 end
70106 end
71- return OptimizationFunction {typeof(f),typeof(grad),typeof(hess),typeof(hv),typeof(kwargs)} (f,grad,hess,hv,AutoZygote (),kwargs)
107+ return OptimizationFunction {typeof(f),typeof(grad),typeof(hess),typeof(hv),typeof(cons),typeof(cons_j),typeof(cons_h),typeof( kwargs)} (f,grad,hess,hv,AutoZygote (),cons,cons_j,cons_h,num_cons ,kwargs)
72108end
73109
74- function OptimizationFunction (f, x, :: AutoReverseDiff ; grad= nothing ,hess= nothing , p= DiffEqBase. NullParameters (), hv = nothing , kwargs... )
110+ function OptimizationFunction (f, x, :: AutoReverseDiff ; grad= nothing ,hess= nothing , cons = nothing , cons_j = nothing , cons_h = nothing ,
111+ num_cons = 0 , p= DiffEqBase. NullParameters (), hv = nothing , kwargs... )
75112 _f = θ -> f (θ,p)[1 ]
76113 if grad === nothing
77114 grad = (res,θ) -> ReverseDiff. gradient! (res, _f, θ, ReverseDiff. GradientConfig (θ))
@@ -100,11 +137,12 @@ function OptimizationFunction(f, x, ::AutoReverseDiff; grad=nothing,hess=nothing
100137 end
101138 end
102139
103- return OptimizationFunction {typeof(f),typeof(grad),typeof(hess),typeof(hv),typeof(kwargs)} (f,grad,hess,hv,AutoReverseDiff (),kwargs)
140+ return OptimizationFunction {typeof(f),typeof(grad),typeof(hess),typeof(hv),typeof(cons),typeof(cons_j),typeof(cons_h),typeof( kwargs)} (f,grad,hess,hv,AutoReverseDiff (),cons,cons_j,cons_h,num_cons ,kwargs)
104141end
105142
106143
107- function OptimizationFunction (f, x, :: AutoTracker ; grad= nothing ,hess= nothing , p= DiffEqBase. NullParameters (), hv = nothing , kwargs... )
144+ function OptimizationFunction (f, x, :: AutoTracker ; grad= nothing ,hess= nothing , cons = nothing , cons_j = nothing , cons_h = nothing ,
145+ num_cons = 0 , p= DiffEqBase. NullParameters (), hv = nothing , kwargs... )
108146 _f = θ -> f (θ,p)[1 ]
109147 if grad === nothing
110148 grad = (res,θ) -> res isa DiffResults. DiffResult ? DiffResults. gradient! (res, Tracker. data (Tracker. gradient (_f, θ)[1 ])) : res .= Tracker. data (Tracker. gradient (_f, θ)[1 ])
@@ -119,10 +157,11 @@ function OptimizationFunction(f, x, ::AutoTracker; grad=nothing,hess=nothing, p=
119157 end
120158
121159
122- return OptimizationFunction {typeof(f),typeof(grad),typeof(hess),typeof(hv),typeof(kwargs)} (f,grad,hess,hv,AutoTracker (),kwargs)
160+ return OptimizationFunction {typeof(f),typeof(grad),typeof(hess),typeof(hv),typeof(cons),typeof(cons_j),typeof(cons_h),typeof( kwargs)} (f,grad,hess,hv,AutoTracker (),cons,cons_j,cons_h,num_cons ,kwargs)
123161end
124162
125- function OptimizationFunction (f, x, adtype:: AutoFiniteDiff ; grad= nothing ,hess= nothing , p= DiffEqBase. NullParameters (), hv = nothing , fdtype = :forward , fdhtype = :hcentral , kwargs... )
163+ function OptimizationFunction (f, x, adtype:: AutoFiniteDiff ; grad= nothing ,hess= nothing , cons = nothing , cons_j = nothing , cons_h = nothing ,
164+ num_cons = 0 , p= DiffEqBase. NullParameters (), hv = nothing , fdtype = :forward , fdhtype = :hcentral , kwargs... )
126165 _f = θ -> f (θ,p)[1 ]
127166 if grad === nothing
128167 grad = (res,θ) -> FiniteDiff. finite_difference_gradient! (res, _f, θ, FiniteDiff. GradientCache (res, x, Val{fdtype}))
@@ -140,5 +179,5 @@ function OptimizationFunction(f, x, adtype::AutoFiniteDiff; grad=nothing,hess=no
140179 end
141180 end
142181
143- return OptimizationFunction {typeof(f),typeof(grad),typeof(hess),typeof(hv),typeof(kwargs)} (f,grad,hess,hv,adtype,kwargs)
182+ return OptimizationFunction {typeof(f),typeof(grad),typeof(hess),typeof(hv),typeof(cons),typeof(cons_j),typeof(cons_h),typeof( kwargs)} (f,grad,hess,hv,adtype,cons,cons_j,cons_h,num_cons ,kwargs)
144183end
0 commit comments