Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
34 changes: 34 additions & 0 deletions src/diff_opt.jl
Original file line number Diff line number Diff line change
Expand Up @@ -314,6 +314,40 @@ the differentiation information.
"""
struct DifferentiateTimeSec <: MOI.AbstractModelAttribute end

"""
BackwardDifferentiate <: MOI.AbstractOptimizerAttribute

An `MOI.AbstractOptimizerAttribute` that triggers backward differentiation
on the solver. If `MOI.supports(optimizer, DiffOpt.BackwardDifferentiate())`
returns `true`, then the solver natively supports backward differentiation
through the DiffOpt attribute interface, and DiffOpt will delegate
differentiation directly to the solver instead of using its own
differentiation backend.

Trigger the computation with:
```julia
MOI.set(optimizer, DiffOpt.BackwardDifferentiate(), nothing)
```
"""
struct BackwardDifferentiate <: MOI.AbstractOptimizerAttribute end

"""
ForwardDifferentiate <: MOI.AbstractOptimizerAttribute

An `MOI.AbstractOptimizerAttribute` that triggers forward differentiation
on the solver. If `MOI.supports(optimizer, DiffOpt.ForwardDifferentiate())`
returns `true`, then the solver natively supports forward differentiation
through the DiffOpt attribute interface, and DiffOpt will delegate
differentiation directly to the solver instead of using its own
differentiation backend.

Trigger the computation with:
```julia
MOI.set(optimizer, DiffOpt.ForwardDifferentiate(), nothing)
```
"""
struct ForwardDifferentiate <: MOI.AbstractOptimizerAttribute end

MOI.attribute_value_type(::DifferentiateTimeSec) = Float64

MOI.is_set_by_optimize(::DifferentiateTimeSec) = true
Expand Down
14 changes: 14 additions & 0 deletions src/jump_moi_overloads.jl
Original file line number Diff line number Diff line change
Expand Up @@ -251,6 +251,20 @@ end

MOI.constant(func::IndexMappedFunction) = MOI.constant(func.func)

# Support JuMP.coefficient on plain MOI functions returned by native solvers
function JuMP.coefficient(
func::MOI.ScalarAffineFunction{T},
vi::MOI.VariableIndex,
) where {T}
coef = zero(T)
for term in func.terms
if term.variable == vi
coef += term.coefficient
end
end
return coef
end

function JuMP.coefficient(func::IndexMappedFunction, vi::MOI.VariableIndex)
return JuMP.coefficient(func.func, func.index_map[vi])
end
Expand Down
90 changes: 89 additions & 1 deletion src/moi_wrapper.jl
Original file line number Diff line number Diff line change
Expand Up @@ -562,6 +562,22 @@ function reverse_differentiate!(model::Optimizer)
"Set `DiffOpt.AllowObjectiveAndSolutionInput()` to `true` to silence this warning."
end
end
if MOI.supports(model.optimizer, BackwardDifferentiate())
# Solver natively supports backward differentiation.
# Copy input_cache directly into model.optimizer and trigger differentiation.
opt = model.optimizer
for (vi, value) in model.input_cache.dx
MOI.set(opt, ReverseVariablePrimal(), vi, value)
end
for (ci, value) in model.input_cache.dy
MOI.set(opt, ReverseConstraintDual(), ci, value)
end
if !iszero(model.input_cache.dobj)
MOI.set(opt, ReverseObjectiveSensitivity(), model.input_cache.dobj)
end
MOI.set(opt, BackwardDifferentiate(), nothing)
return
end
diff = _diff(model)
MOI.set(
diff,
Expand Down Expand Up @@ -673,6 +689,38 @@ function forward_differentiate!(model::Optimizer)
"Trying to compute the forward differentiation on a model with termination status $(st)",
)
end
if MOI.supports(model.optimizer, ForwardDifferentiate())
# Solver natively supports forward differentiation.
# Copy input_cache directly into model.optimizer and trigger differentiation.
opt = model.optimizer
T = Float64
for (ci, value) in model.input_cache.parameter_constraints
MOI.set(opt, ForwardConstraintSet(), ci, MOI.Parameter(value))
end
if model.input_cache.objective !== nothing
MOI.set(
opt,
ForwardObjectiveFunction(),
model.input_cache.objective,
)
end
for (F, S) in MOI.Utilities.DoubleDicts.nonempty_outer_keys(
model.input_cache.scalar_constraints,
)
for (index, value) in model.input_cache.scalar_constraints[F, S]
MOI.set(opt, ForwardConstraintFunction(), index, value)
end
end
for (F, S) in MOI.Utilities.DoubleDicts.nonempty_outer_keys(
model.input_cache.vector_constraints,
)
for (index, value) in model.input_cache.vector_constraints[F, S]
MOI.set(opt, ForwardConstraintFunction(), index, value)
end
end
MOI.set(opt, ForwardDifferentiate(), nothing)
return
end
diff = _diff(model)
MOI.set(
diff,
Expand Down Expand Up @@ -738,7 +786,10 @@ end

function empty_input_sensitivities!(model::Optimizer)
empty!(model.input_cache)
if model.diff !== nothing
solver = _native_diff_solver(model)
if solver !== nothing
empty_input_sensitivities!(solver)
elseif model.diff !== nothing
empty_input_sensitivities!(model.diff)
end
return
Expand Down Expand Up @@ -782,6 +833,35 @@ function _instantiate_diff(model::Optimizer, constructor)
return model_bridged
end

function _solver_supports_differentiate(model::Optimizer)
return MOI.supports(model.optimizer, BackwardDifferentiate()) ||
MOI.supports(model.optimizer, ForwardDifferentiate())
end

# Find the native differentiation solver in the optimizer chain.
# Cached in `model.diff` to avoid repeated unwrapping.
function _native_diff_solver(model::Optimizer)
if model.diff === nothing && _solver_supports_differentiate(model)
model.diff = _find_native_solver(model.optimizer)
model.index_map = MOI.Utilities.identity_index_map(model.optimizer)
end
return model.diff
end

_find_native_solver(opt) = opt

function _find_native_solver(opt::MOI.Utilities.CachingOptimizer)
return _find_native_solver(opt.optimizer)
end

function _find_native_solver(opt::MOI.Bridges.LazyBridgeOptimizer)
return _find_native_solver(opt.model)
end

function _find_native_solver(opt::POI.Optimizer)
return _find_native_solver(opt.optimizer)
end

function _diff(model::Optimizer)
if model.diff === nothing
_check_termination_status(model)
Expand Down Expand Up @@ -837,6 +917,10 @@ end
# DiffOpt attributes redirected to `diff`

function _checked_diff(model::Optimizer, attr::MOI.AnyAttribute, call)
solver = _native_diff_solver(model)
if solver !== nothing
return solver
end
if model.diff === nothing
error("Cannot get attribute `$attr`. First call `DiffOpt.$call`.")
end
Expand Down Expand Up @@ -1125,6 +1209,10 @@ function MOI.set(
end

function MOI.get(model::Optimizer, attr::DifferentiateTimeSec)
solver = _native_diff_solver(model)
if solver !== nothing
return MOI.get(solver, attr)
end
return MOI.get(model.diff, attr)
end

Expand Down
51 changes: 38 additions & 13 deletions src/parameters.jl
Original file line number Diff line number Diff line change
Expand Up @@ -5,45 +5,70 @@

# block other methods

MOI.supports(::POI.Optimizer, ::ForwardObjectiveFunction) = false
function _poi_inner_supports_native_diff(model::POI.Optimizer)
return MOI.supports(model.optimizer, BackwardDifferentiate()) ||
MOI.supports(model.optimizer, ForwardDifferentiate())
end

function MOI.supports(model::POI.Optimizer, ::ForwardObjectiveFunction)
return _poi_inner_supports_native_diff(model)
end

function MOI.set(::POI.Optimizer, ::ForwardObjectiveFunction, _)
function MOI.set(model::POI.Optimizer, attr::ForwardObjectiveFunction, v)
if _poi_inner_supports_native_diff(model)
return MOI.set(model.optimizer, attr, v)
end
return error(
"Forward objective function is not supported when " *
"`JuMP.Parameter`s (or `MOI.Parameter`s) are present in the model.",
)
end

MOI.supports(::POI.Optimizer, ::ForwardConstraintFunction) = false
function MOI.supports(model::POI.Optimizer, ::ForwardConstraintFunction)
return _poi_inner_supports_native_diff(model)
end

function MOI.set(
::POI.Optimizer,
::ForwardConstraintFunction,
::MOI.ConstraintIndex,
_,
model::POI.Optimizer,
attr::ForwardConstraintFunction,
ci::MOI.ConstraintIndex,
v,
)
if _poi_inner_supports_native_diff(model)
return MOI.set(model.optimizer, attr, ci, v)
end
return error(
"Forward constraint function is not supported when " *
"`JuMP.Parameter`s (or `MOI.Parameter`s) are present in the model.",
)
end

MOI.supports(::POI.Optimizer, ::ReverseObjectiveFunction) = false
function MOI.supports(model::POI.Optimizer, ::ReverseObjectiveFunction)
return _poi_inner_supports_native_diff(model)
end

function MOI.get(::POI.Optimizer, ::ReverseObjectiveFunction)
function MOI.get(model::POI.Optimizer, attr::ReverseObjectiveFunction)
if _poi_inner_supports_native_diff(model)
return MOI.get(model.optimizer, attr)
end
return error(
"Reverse objective function is not supported when " *
"`JuMP.Parameter`s (or `MOI.Parameter`s) are present in the model.",
)
end

MOI.supports(::POI.Optimizer, ::ReverseConstraintFunction) = false
function MOI.supports(model::POI.Optimizer, ::ReverseConstraintFunction)
return _poi_inner_supports_native_diff(model)
end

function MOI.get(
::POI.Optimizer,
::ReverseConstraintFunction,
::MOI.ConstraintIndex,
model::POI.Optimizer,
attr::ReverseConstraintFunction,
ci::MOI.ConstraintIndex,
)
if _poi_inner_supports_native_diff(model)
return MOI.get(model.optimizer, attr, ci)
end
return error(
"Reverse constraint function is not supported when " *
"`JuMP.Parameter`s (or `MOI.Parameter`s) are present in the model.",
Expand Down
Loading
Loading