diff --git a/Project.toml b/Project.toml index a987f1f..12b00e7 100644 --- a/Project.toml +++ b/Project.toml @@ -3,6 +3,14 @@ uuid = "2aa259a0-4d4c-11e9-0909-ad9b1ef308f5" authors = ["florian oswald "] version = "0.1.0" +[deps] +DataFrames = "a93c6f00-e57d-5684-b7b6-d8193f3e46c0" +ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210" +Ipopt = "b6b21f68-93f8-5de0-b562-5493be1d77c9" +JuMP = "4076af6c-e467-56ae-b986-b466b2749572" +LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" +NLopt = "76087f3c-5699-56af-9a33-bf431cd00edd" + [extras] Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" diff --git a/src/HWconstrained.jl b/src/HWconstrained.jl index 3b6faa4..7ddc102 100644 --- a/src/HWconstrained.jl +++ b/src/HWconstrained.jl @@ -5,25 +5,32 @@ greet() = print("Hello World!") using JuMP, NLopt, DataFrames, Ipopt using LinearAlgebra - export data, table_NLopt, table_JuMP + export data, table_NLopt, table_JuMP, obj, constr, max_NLopt, max_JuMP function data(a=0.5) - - - - - - - - - - return Dict("a"=>a,"na"=>na,"nc"=>nc,"ns"=>ns,"nss"=>nss,"e"=>e,"p"=>p,"z"=>z,"pi"=>pi) + n = 3 + p = [1, 1, 1] + e = [2, 0, 0] + ns = 4 + nss = ns^2 + z1 = [0.72, 0.92, 1.12, 1.32] + z2 = [0.86, 0.96, 1.06, 1.16] + za = [[1, i, j] for i in z1 for j in z2] + z = vcat(za'...) + pi = repeat([1/nss], nss) + return Dict("a"=>a,"ns"=>ns,"nss"=>nss,"e"=>e,"p"=>p,"z"=>z,"pi"=>pi, "za"=>za) end function max_JuMP(a=0.5) - - return Dict("obj"=>objective_value(m),"c"=>value(c),"omegas"=>[value(omega[i]) for i in 1:length(omega)]) + d = data(a) + m = Model(with_optimizer(Ipopt.Optimizer)) + @variable(m, c >= 0) + @variable(m, x[1:3]) + @NLconstraint(m, c + sum(d["p"][i] * (x[i] - d["e"][i]) for i in 1:3) == 0) + @NLobjective(m, Max, -exp(-a * c) + sum(-exp(-a * sum(d["za"][j][i] * x[i] for i in 1:3)) * d["pi"][j] for j in 1:16)) + JuMP.optimize!(m) + return Dict("obj"=>objective_value(m),"c"=>value(c),"omegas"=>[value(x[i]) for i in 1:length(x)]) end function table_JuMP() @@ -39,66 +46,34 @@ greet() = print("Hello World!") return d end - - - - - - - function obj(x::Vector,grad::Vector,data::Dict) - - - - - - - - - - - - - - - - - - - - - - - - + a = data["a"] + z = data["z"] + u(x) = -exp(-a * x) + up(x) = a * exp(-a * x) + if length(grad) > 0 + grad[1] = up(x[1]) + grad[2:end] = sum(data["pi"] .* z .* up.(z * x[2:end]), dims=1) + end + return u(x[1]) + sum(data["pi"] .* u.(z * x[2:end])) end function constr(x::Vector,grad::Vector,data::Dict) - - - - - - - - - - - - - - + if length(grad) > 0 + grad[1] = 1 + grad[2:end] = data["p"] + end + return x[1] + sum(data["p"] .* (x[2:end] - data["e"])) end function max_NLopt(a=0.5) - - - - - - - - + d = data(a) + opt = Opt(:LD_MMA, 4) + lower_bounds!(opt, [0., -Inf, -Inf, -Inf]) + max_objective!(opt, (x, g)->obj(x, g, d)) + inequality_constraint!(opt, (x, g)->constr(x, g, d)) + ftol_rel!(opt, 1e-9) + NLopt.optimize(opt, vcat(0, d["e"])) end function table_NLopt() diff --git a/test/runtests.jl b/test/runtests.jl index 1793ffc..e38950a 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -1,26 +1,25 @@ using HWconstrained -using Test +using Test, ForwardDiff @testset "HWconstrained.jl" begin @testset "testing components" begin + d = data() + u(x) = -exp(-d["a"] * x) + grad = zeros(4) @testset "tests gradient of objective function" begin - - - - - - + objfun(x) = u(x[1]) + sum(d["pi"] .* u.(d["z"] * x[2:end])) + t = ForwardDiff.gradient(objfun, ones(4)) + obj(ones(4), grad, d) + @test grad == t end @testset "tests gradient of constraint function" begin - - - - - - + consfun(x) = x[1] + sum(d["p"] .* (x[2:end] - d["e"])) + t = ForwardDiff.gradient(consfun, ones(4)) + constr(ones(4), grad, d) + @test grad == t end end @@ -32,6 +31,7 @@ using Test omega2=[0.801455;0.400728;0.0801455], omega3=[1.60291;0.801455;0.160291], fval=[-1.20821;-0.732819;-0.013422]) + tol2 = 1e-2 @testset "checking result of NLopt maximization" begin