Skip to content

Commit 2fe6bb9

Browse files
klamikeblegat
andauthored
Fix ReverseConstraintDual for variable bounds in NonLinearProgram (#332)
* fix NLP bounds * add test * Remove redundant test * Fix format * get full coverage * Apply suggestion from @klamike --------- Co-authored-by: Benoît Legat <benoit.legat@gmail.com>
1 parent 1b3cfe1 commit 2fe6bb9

File tree

2 files changed

+32
-2
lines changed

2 files changed

+32
-2
lines changed

src/NonLinearProgram/NonLinearProgram.jl

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -574,13 +574,13 @@ function DiffOpt.reverse_differentiate!(model::Model; tol = 1e-6)
574574
end
575575
end
576576
for (i, var_idx) in enumerate(cache.primal_vars[cache.has_low])
577-
idx = form.constraint_lower_bounds[var_idx.value].value
577+
idx = form.constraint_lower_bounds[var_idx.value]
578578
if haskey(model.input_cache.dy, idx)
579579
Δdual[num_constraints+i] = model.input_cache.dy[idx]
580580
end
581581
end
582582
for (i, var_idx) in enumerate(cache.primal_vars[cache.has_up])
583-
idx = form.constraint_upper_bounds[var_idx.value].value
583+
idx = form.constraint_upper_bounds[var_idx.value]
584584
if haskey(model.input_cache.dy, idx)
585585
Δdual[num_constraints+num_low+i] = model.input_cache.dy[idx]
586586
end

test/nlp_program.jl

Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -977,6 +977,36 @@ function test_changing_factorization()
977977
)
978978
end
979979

980+
function test_reverse_bounds_lower()
981+
model = DiffOpt.nonlinear_diff_model(Ipopt.Optimizer)
982+
set_silent(model)
983+
@variable(model, x[1:3] >= 0) # x[3] ≥ 0 is active
984+
@variable(model, p in MOI.Parameter(4.5))
985+
@constraint(model, 6x[1] + 3x[2] + 2x[3] == p)
986+
@constraint(model, x[1] + x[2] - x[3] == 1)
987+
@objective(model, Min, sum(x .^ 2))
988+
optimize!(model)
989+
MOI.set(model, DiffOpt.ReverseConstraintDual(), LowerBoundRef(x[3]), 1.0)
990+
DiffOpt.reverse_differentiate!(model)
991+
dp = MOI.get(model, DiffOpt.ReverseConstraintSet(), ParameterRef(p)).value
992+
@test isapprox(dp, -2.88888; atol = 1e-4)
993+
end
994+
995+
function test_reverse_bounds_upper()
996+
model = DiffOpt.nonlinear_diff_model(Ipopt.Optimizer)
997+
set_silent(model)
998+
@variable(model, x[1:3] <= 0) # x[3] ≤ 0 is active
999+
@variable(model, p in MOI.Parameter(4.5))
1000+
@constraint(model, 6x[1] + 3x[2] + 2x[3] == -p)
1001+
@constraint(model, x[1] + x[2] - x[3] == -1)
1002+
@objective(model, Min, sum(x .^ 2))
1003+
optimize!(model)
1004+
MOI.set(model, DiffOpt.ReverseConstraintDual(), UpperBoundRef(x[3]), 1.0)
1005+
DiffOpt.reverse_differentiate!(model)
1006+
dp = MOI.get(model, DiffOpt.ReverseConstraintSet(), ParameterRef(p)).value
1007+
@test isapprox(dp, 2.88888; atol = 1e-4)
1008+
end
1009+
9801010
end # module
9811011

9821012
TestNLPProgram.runtests()

0 commit comments

Comments
 (0)