Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[WIP] POI + DiffOpt = S2 #143

Open
wants to merge 13 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ version = "0.7.0"

[deps]
MathOptInterface = "b8f27783-ece8-5eb3-8dc8-9495eed66fee"
DiffOpt = "930fe3bc-9c6b-11ea-2d94-6184641e85e7"

[compat]
GLPK = "1"
Expand Down
6 changes: 5 additions & 1 deletion src/MOI_wrapper.jl
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,9 @@ function MOI.is_empty(model::Optimizer)
#
isempty(model.multiplicative_parameters) &&
isempty(model.dual_value_of_parameters) &&
model.number_of_parameters_in_model == 0
model.number_of_parameters_in_model == 0 &&
isempty(model.parameter_input_forward) &&
isempty(model.parameter_output_backward)
end

function MOI.empty!(model::Optimizer{T}) where {T}
Expand Down Expand Up @@ -133,6 +135,8 @@ function MOI.empty!(model::Optimizer{T}) where {T}
empty!(model.dual_value_of_parameters)
#
model.number_of_parameters_in_model = 0
empty!(model.parameter_input_forward)
empty!(model.parameter_output_backward)
return
end

Expand Down
7 changes: 7 additions & 0 deletions src/ParametricOptInterface.jl
Original file line number Diff line number Diff line change
Expand Up @@ -162,6 +162,10 @@ mutable struct Optimizer{T,OT<:MOI.ModelLike} <: MOI.AbstractOptimizer
number_of_parameters_in_model::Int64
constraints_interpretation::ConstraintsInterpretationCode
save_original_objective_and_constraints::Bool

# sensitivity data
parameter_input_forward::Dict{ParameterIndex,T}
parameter_output_backward::Dict{ParameterIndex,T}
function Optimizer(
optimizer::OT;
evaluate_duals::Bool = true,
Expand Down Expand Up @@ -219,6 +223,8 @@ mutable struct Optimizer{T,OT<:MOI.ModelLike} <: MOI.AbstractOptimizer
0,
ONLY_CONSTRAINTS,
save_original_objective_and_constraints,
Dict{ParameterIndex,T}(),
Dict{ParameterIndex,T}(),
)
end
end
Expand Down Expand Up @@ -248,5 +254,6 @@ end
include("duals.jl")
include("update_parameters.jl")
include("MOI_wrapper.jl")
include("diff.jl")

end # module
228 changes: 228 additions & 0 deletions src/diff.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,228 @@
using DiffOpt

# forward mode

function DiffOpt.forward_differentiate!(model::Optimizer{T}) where {T}
# TODO: add a reset option
for (F, S) in keys(model.affine_constraint_cache.dict)
affine_constraint_cache_inner = model.affine_constraint_cache[F, S]
if !isempty(affine_constraint_cache_inner)
# TODO add: barrier to avoid type instability of inner dicts
for (inner_ci, pf) in affine_constraint_cache_inner
cte = zero(T)
terms = MOI.ScalarAffineTerm{T}[]
sizehint!(terms, 0)
if length(pf.p) != 0
for term in pf.p
p = p_idx(term.variable)
sensitivity = get(model.parameter_input_forward, p, 0.0)
# TODO: check sign
cte += sensitivity * term.coefficient
end
# TODO: if cte != 0
MOI.set(
model.optimizer,
DiffOpt.ForwardConstraintFunction(),
inner_ci,
MOI.ScalarAffineFunction{T}(terms, cte),
)
end
end
end
end
for (F, S) in keys(model.vector_affine_constraint_cache.dict)
vector_affine_constraint_cache_inner =

Check warning on line 34 in src/diff.jl

View check run for this annotation

Codecov / codecov/patch

src/diff.jl#L34

Added line #L34 was not covered by tests
model.vector_affine_constraint_cache[F, S]
if !isempty(vector_affine_constraint_cache_inner)

Check warning on line 36 in src/diff.jl

View check run for this annotation

Codecov / codecov/patch

src/diff.jl#L36

Added line #L36 was not covered by tests
# barrier to avoid type instability of inner dicts
for (inner_ci, pf) in vector_affine_constraint_cache_inner
cte = zeros(T, length(pf.c))
terms = MOI.VectorAffineTerm{T}[]
sizehint!(terms, 0)
if length(pf.p) != 0
for term in pf.p
p = p_idx(term.scalar_term.variable)
sensitivity = get(model.parameter_input_forward, p, 0.0)

Check warning on line 45 in src/diff.jl

View check run for this annotation

Codecov / codecov/patch

src/diff.jl#L38-L45

Added lines #L38 - L45 were not covered by tests
# TODO: check sign
cte[term.output_index] += sensitivity * term.scalar_term.coefficient

Check warning on line 47 in src/diff.jl

View check run for this annotation

Codecov / codecov/patch

src/diff.jl#L47

Added line #L47 was not covered by tests
end
# TODO: if cte != 0
MOI.set(

Check warning on line 50 in src/diff.jl

View check run for this annotation

Codecov / codecov/patch

src/diff.jl#L50

Added line #L50 was not covered by tests
model.optimizer,
DiffOpt.ForwardConstraintFunction(),
inner_ci,
MOI.ScalarAffineFunction{T}(terms, cte),
)
end
end
end
end
for (F, S) in keys(model.quadratic_constraint_cache.dict)
quadratic_constraint_cache_inner =
model.quadratic_constraint_cache[F, S]
if !isempty(quadratic_constraint_cache_inner)
# TODO add: barrier to avoid type instability of inner dicts
for (inner_ci, pf) in quadratic_constraint_cache_inner
cte = zero(T)
terms = MOI.ScalarAffineTerm{T}[]
# terms_dict = Dict{MOI.VariableIndex,T}() # canonicalize?
sizehint!(terms, length(pf.pv))
if length(pf.p) != 0 || length(pf.pv) != 0 || length(pf.pp) != 0
for term in pf.p
p = p_idx(term.variable)
sensitivity = get(model.parameter_input_forward, p, 0.0)
# TODO: check sign
cte += sensitivity * term.coefficient
end
for term in pf.pp
p_1 = p_idx(term.variable_1)
p_2 = p_idx(term.variable_2)
sensitivity_1 = get(model.parameter_input_forward, p_1, 0.0)
sensitivity_2 = get(model.parameter_input_forward, p_2, 0.0)
cte += sensitivity_1 * sensitivity_2 * term.coefficient

Check warning on line 82 in src/diff.jl

View check run for this annotation

Codecov / codecov/patch

src/diff.jl#L78-L82

Added lines #L78 - L82 were not covered by tests
end
# canonicalize?
for term in pf.pv
p = p_idx(term.variable_1)
sensitivity = get(model.parameter_input_forward, p, NaN)
if !isnan(sensitivity)
push!(
terms,
MOI.ScalarAffineTerm{T}(
sensitivity * term.coefficient,
term.variable_2,
),
)
end
end
MOI.set(
model.optimizer,
DiffOpt.ForwardConstraintFunction(),
inner_ci,
MOI.ScalarAffineFunction{T}(terms, cte),
)
end
end
end
end
if model.affine_objective_cache !== nothing
cte = zero(T)
terms = MOI.ScalarAffineTerm{T}[]
pf = model.affine_objective_cache
sizehint!(terms, 0)
if length(pf.p) != 0
for term in pf.p
p = p_idx(term.variable)
sensitivity = get(model.parameter_input_forward, p, 0.0)

Check warning on line 116 in src/diff.jl

View check run for this annotation

Codecov / codecov/patch

src/diff.jl#L109-L116

Added lines #L109 - L116 were not covered by tests
# TODO: check sign
cte += sensitivity * term.coefficient

Check warning on line 118 in src/diff.jl

View check run for this annotation

Codecov / codecov/patch

src/diff.jl#L118

Added line #L118 was not covered by tests
end
# TODO: if cte != 0
MOI.set(

Check warning on line 121 in src/diff.jl

View check run for this annotation

Codecov / codecov/patch

src/diff.jl#L121

Added line #L121 was not covered by tests
model.optimizer,
DiffOpt.ForwardObjectiveFunction(),
inner_ci,
MOI.ScalarAffineFunction{T}(terms, cte),
)
end
elseif model.quadratic_objective_cache !== nothing
cte = zero(T)
terms = MOI.ScalarAffineTerm{T}[]
pf = model.quadratic_objective_cache
sizehint!(terms, length(pf.pv))
if length(pf.p) != 0
for term in pf.p
p = p_idx(term.variable)
sensitivity = get(model.parameter_input_forward, p, 0.0)

Check warning on line 136 in src/diff.jl

View check run for this annotation

Codecov / codecov/patch

src/diff.jl#L129-L136

Added lines #L129 - L136 were not covered by tests
# TODO: check sign
cte += sensitivity * term.coefficient

Check warning on line 138 in src/diff.jl

View check run for this annotation

Codecov / codecov/patch

src/diff.jl#L138

Added line #L138 was not covered by tests
end
for term in pf.pp
p_1 = p_idx(term.variable_1)
p_2 = p_idx(term.variable_2)
sensitivity_1 = get(model.parameter_input_forward, p_1, 0.0)
sensitivity_2 = get(model.parameter_input_forward, p_2, 0.0)
cte += sensitivity_1 * sensitivity_2 * term.coefficient

Check warning on line 145 in src/diff.jl

View check run for this annotation

Codecov / codecov/patch

src/diff.jl#L140-L145

Added lines #L140 - L145 were not covered by tests
end
# canonicalize?
for term in pf.pv
p = p_idx(term.variable_1)
sensitivity = get(model.parameter_input_forward, p, NaN)
if !isnan(sensitivity)
push!(

Check warning on line 152 in src/diff.jl

View check run for this annotation

Codecov / codecov/patch

src/diff.jl#L148-L152

Added lines #L148 - L152 were not covered by tests
terms,
MOI.ScalarAffineTerm{T}(
sensitivity * term.coefficient,
term.variable_2,
),
)
end
end
# TODO: if cte != 0
MOI.set(

Check warning on line 162 in src/diff.jl

View check run for this annotation

Codecov / codecov/patch

src/diff.jl#L162

Added line #L162 was not covered by tests
model.optimizer,
DiffOpt.ForwardObjectiveFunction(),
inner_ci,
MOI.ScalarAffineFunction{T}(terms, cte),
)
end
end
DiffOpt.forward_differentiate!(model.optimizer)
return
end

struct ForwardParameter <: MOI.AbstractVariableAttribute end
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why having a different attribute ? We could just use ForwardVariablePrimal for parameters as well

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We can consider that. On the other hand, ForwardVariablePrimal is for output sensitivity, while ForwardParameter is for input sensitivity. Having both different would be good for validation, since we gave up on names like: ForwardOutputVariablePrimal.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

You can different between input and output depending on whether it's a set or a get. Note that defining a new struct or a new function isn't so natural for an extension, it's more designed to add methods for existing ones. However, it is possible, see the hack in NLopt.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Adding more details to my previous comment:
I find it strange that:
For parameters:
set ForwardVariablePrimal sets an input value that can be get to check which value was there.
While, for actual variables:
set ForwardVariablePrimal always errors, and get ForwardVariablePrimal only makes sense after forward_differentiate!
This was the main motivation for the new attribute.

About:

Note that defining a new struct or a new function isn't so natural for an extension, it's more designed to add methods for existing ones.

I think I did not understand completely. DiffOpt adds new structs. Also, a few solvers define new structs (like Gurobi.NumberOfObjectives, GLPK.CallbackFunction, COSMO.ADMMIterations).

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Ah! now I get it, you mean Julia extensions like NLoptMathOptInterfaceExt.jl

Copy link
Member

@blegat blegat Dec 5, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yes, I meant Julia extensions :) The workaround in NLopt works quite well but it becomes tricky when you try to include their docstring in the documentation. It was quite hard to make it work in JuliaManifolds/Manopt.jl#264 for instance.
Also, as from the MOI level, these are also variables and the set can be bridged to EqualTo, it makes sense to consider it as ForwardVariablePrimal.

Actually, what we could do it add ForwardConstraintSet that is defined for MOI.Parameter, MOI.EqualTo, MOI.LessThan, MOI.GreaterThan, MOI.Interval. I think we worked around it in DiffOpt by using the constant in the function but if you have a VariableIndex-in-S then you can't modify the function right ?
We could disallow ForwardConstraintSet for non-VariableIndex to avoid having two ways to set the same thing. Even if that's not consistent with the ConstraintFunction/ConstraintSet attributes, that's backward compatible. Or we can change this and tag v0.5 of DiffOpt.

The advantage of this design is that we can implement ForwardConstraintSet in the bridge that transforms Parameter to EqualTo so that the same user code works with both a POI-based solver and a solver using the bridge.


function MOI.set(
model::Optimizer,
::ForwardParameter,
variable::MOI.VariableIndex,
value::Number,
)
if _is_variable(variable)
error("Trying to set a parameter sensitivity for a variable")

Check warning on line 183 in src/diff.jl

View check run for this annotation

Codecov / codecov/patch

src/diff.jl#L183

Added line #L183 was not covered by tests
end
parameter = p_idx(variable)
model.parameter_input_forward[parameter] = value
return
end

function MOI.get(
model::Optimizer,
attr::DiffOpt.ForwardVariablePrimal,
variable::MOI.VariableIndex,
)
if _is_parameter(variable)
error("Trying to get a variable sensitivity for a parameter")

Check warning on line 196 in src/diff.jl

View check run for this annotation

Codecov / codecov/patch

src/diff.jl#L196

Added line #L196 was not covered by tests
end
return MOI.get(model.optimizer, attr, model.variables[variable])
end

# reverse mode

function DiffOpt.reverse_differentiate!(model::Optimizer)
error("Not implemented")
DiffOpt.reverse_differentiate!(model.optimizer)
return

Check warning on line 206 in src/diff.jl

View check run for this annotation

Codecov / codecov/patch

src/diff.jl#L203-L206

Added lines #L203 - L206 were not covered by tests
end

function MOI.set(

Check warning on line 209 in src/diff.jl

View check run for this annotation

Codecov / codecov/patch

src/diff.jl#L209

Added line #L209 was not covered by tests
model::Optimizer,
attr::DiffOpt.ReverseVariablePrimal,
variable::MOI.VariableIndex,
value::Number,
)
MOI.set(model.optimizer, attr, variable, value)
return

Check warning on line 216 in src/diff.jl

View check run for this annotation

Codecov / codecov/patch

src/diff.jl#L215-L216

Added lines #L215 - L216 were not covered by tests
end

struct ReverseParameter <: MOI.AbstractVariableAttribute end

function MOI.get(

Check warning on line 221 in src/diff.jl

View check run for this annotation

Codecov / codecov/patch

src/diff.jl#L221

Added line #L221 was not covered by tests
model::Optimizer,
attr::ReverseParameter,
variable::MOI.VariableIndex,
)
error("Not implemented")
return

Check warning on line 227 in src/diff.jl

View check run for this annotation

Codecov / codecov/patch

src/diff.jl#L226-L227

Added lines #L226 - L227 were not covered by tests
end
Loading
Loading