Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix various JET errors #3519

Merged
merged 2 commits into from
Sep 21, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 3 additions & 2 deletions src/copy.jl
Original file line number Diff line number Diff line change
Expand Up @@ -323,11 +323,12 @@ function Base.deepcopy(::GenericModel)
end

function MOI.copy_to(dest::MOI.ModelLike, src::GenericModel)
if nonlinear_model(src) !== nothing
nlp = nonlinear_model(src)
if nlp !== nothing
# Re-set the NLP block in-case things have changed since last
# solve.
evaluator = MOI.Nonlinear.Evaluator(
nonlinear_model(src),
nlp,
MOI.Nonlinear.SparseReverseMode(),
index.(all_variables(src)),
)
Expand Down
58 changes: 31 additions & 27 deletions src/nlp.jl
Original file line number Diff line number Diff line change
Expand Up @@ -162,9 +162,9 @@ julia> set_nonlinear_objective(model, MIN_SENSE, :(\$(x) + \$(x)^2))
```
"""
function set_nonlinear_objective(model::Model, sense::MOI.OptimizationSense, x)
_init_NLP(model)
set_objective_sense(model, sense)
MOI.Nonlinear.set_objective(model.nlp_model, x)
nlp = nonlinear_model(model; force = true)::MOI.Nonlinear.Model
MOI.Nonlinear.set_objective(nlp, x)
return
end

Expand All @@ -178,7 +178,7 @@ function _nlp_objective_function(model::GenericModel)
if model.nlp_model === nothing
return nothing
end
return model.nlp_model.objective
return something(model.nlp_model).objective
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It's surprising the compiler isn't able to figure this one out

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Part of this is because model.nlp_model calls getproperty, which may do arbitrary things.

This is another cause where the specific concrete call of a particular GenericModel{T} might be able to, but we can't figure it out statically based on (::GenericModel).

end

###
Expand Down Expand Up @@ -213,8 +213,8 @@ end
Add an anonymous parameter to the model.
"""
function add_nonlinear_parameter(model::Model, value::Real)
_init_NLP(model)
p = MOI.Nonlinear.add_parameter(model.nlp_model, Float64(value))
nlp = nonlinear_model(model; force = true)::MOI.Nonlinear.Model
p = MOI.Nonlinear.add_parameter(nlp, Float64(value))
return NonlinearParameter(model, p.value)
end

Expand Down Expand Up @@ -243,7 +243,8 @@ julia> value(p)
```
"""
function value(p::NonlinearParameter)
return p.model.nlp_model[MOI.Nonlinear.ParameterIndex(p.index)]
nlp = nonlinear_model(p.model; force = true)::MOI.Nonlinear.Model
return nlp[MOI.Nonlinear.ParameterIndex(p.index)]
end

"""
Expand All @@ -267,7 +268,8 @@ julia> value(p)
```
"""
function set_value(p::NonlinearParameter, value::Number)
p.model.nlp_model[MOI.Nonlinear.ParameterIndex(p.index)] = value
nlp = nonlinear_model(p.model; force = true)::MOI.Nonlinear.Model
nlp[MOI.Nonlinear.ParameterIndex(p.index)] = value
return value
end

Expand Down Expand Up @@ -328,8 +330,8 @@ subexpression[1]: x + x ^ 2.0
```
"""
function add_nonlinear_expression(model::Model, ex)
_init_NLP(model)
index = MOI.Nonlinear.add_expression(model.nlp_model, ex)
nlp = nonlinear_model(model; force = true)::MOI.Nonlinear.Model
index = MOI.Nonlinear.add_expression(nlp, ex)
return NonlinearExpression(model, index.value)
end

Expand Down Expand Up @@ -379,9 +381,10 @@ end
Evaluate `ex` using `var_value(v)` as the value for each variable `v`.
"""
function value(var_value::Function, ex::NonlinearExpression)
nlp = nonlinear_model(ex.model; force = true)::MOI.Nonlinear.Model
return MOI.Nonlinear.evaluate(
_VariableValueMap(ex.model, var_value),
ex.model.nlp_model,
nlp,
MOI.Nonlinear.ExpressionIndex(ex.index),
)
end
Expand Down Expand Up @@ -461,9 +464,9 @@ julia> add_nonlinear_constraint(model, :(\$(x) + \$(x)^2 <= 1))
```
"""
function add_nonlinear_constraint(model::Model, ex::Expr)
_init_NLP(model)
nlp = nonlinear_model(model; force = true)::MOI.Nonlinear.Model
f, set = _expr_to_constraint(ex)
c = MOI.Nonlinear.add_constraint(model.nlp_model, f, set)
c = MOI.Nonlinear.add_constraint(nlp, f, set)
return ConstraintRef(model, c, ScalarShape())
end

Expand All @@ -476,9 +479,9 @@ function is_valid(model::Model, c::NonlinearConstraintRef)
if model !== c.model
return false
end
_init_NLP(model)
nlp = nonlinear_model(model; force = true)::MOI.Nonlinear.Model
index = MOI.Nonlinear.ConstraintIndex(c.index.value)
return MOI.is_valid(model.nlp_model, index)
return MOI.is_valid(nlp, index)
end

"""
Expand All @@ -487,9 +490,9 @@ end
Delete the nonlinear constraint `c` from `model`.
"""
function delete(model::Model, c::NonlinearConstraintRef)
_init_NLP(model)
nlp = nonlinear_model(model; force = true)::MOI.Nonlinear.Model
index = MOI.Nonlinear.ConstraintIndex(c.index.value)
MOI.Nonlinear.delete(model.nlp_model, index)
MOI.Nonlinear.delete(nlp, index)
return
end

Expand Down Expand Up @@ -544,10 +547,11 @@ Evaluate `c` using `var_value(v)` as the value for each variable `v`.
"""
function value(var_value::Function, c::NonlinearConstraintRef)
index = MOI.Nonlinear.ConstraintIndex(c.index.value)
nlp = nonlinear_model(c.model; force = true)::MOI.Nonlinear.Model
return MOI.Nonlinear.evaluate(
_VariableValueMap(c.model, var_value),
c.model.nlp_model,
c.model.nlp_model[index].expression,
nlp,
nlp[index].expression,
)
end

Expand Down Expand Up @@ -712,8 +716,8 @@ function register(
if autodiff == false
error("If only the function is provided, must set autodiff=true")
end
_init_NLP(model)
MOI.Nonlinear.register_operator(model.nlp_model, op, dimension, f)
nlp = nonlinear_model(model; force = true)::MOI.Nonlinear.Model
MOI.Nonlinear.register_operator(nlp, op, dimension, f)
return
end

Expand Down Expand Up @@ -796,19 +800,19 @@ function register(
∇f::Function;
autodiff::Bool = false,
)
_init_NLP(model)
nlp = nonlinear_model(model; force = true)::MOI.Nonlinear.Model
if dimension == 1
if autodiff == false
error(
"Currently must provide 2nd order derivatives of univariate functions. Try setting autodiff=true.",
)
end
MOI.Nonlinear.register_operator(model.nlp_model, op, dimension, f, ∇f)
MOI.Nonlinear.register_operator(nlp, op, dimension, f, ∇f)
else
if autodiff == true
@warn("autodiff = true ignored since gradient is already provided.")
end
MOI.Nonlinear.register_operator(model.nlp_model, op, dimension, f, ∇f)
MOI.Nonlinear.register_operator(nlp, op, dimension, f, ∇f)
end
return
end
Expand Down Expand Up @@ -869,8 +873,8 @@ function register(
∇f::Function,
∇²f::Function,
)
_init_NLP(model)
MOI.Nonlinear.register_operator(model.nlp_model, op, dimension, f, ∇f, ∇²f)
nlp = nonlinear_model(model; force = true)::MOI.Nonlinear.Model
MOI.Nonlinear.register_operator(nlp, op, dimension, f, ∇f, ∇²f)
return
end

Expand Down Expand Up @@ -898,9 +902,9 @@ function NLPEvaluator(
model::Model;
_differentiation_backend::MOI.Nonlinear.AbstractAutomaticDifferentiation = MOI.Nonlinear.SparseReverseMode(),
)
_init_NLP(model)
nlp = nonlinear_model(model; force = true)::MOI.Nonlinear.Model
return MOI.Nonlinear.Evaluator(
model.nlp_model,
nlp,
_differentiation_backend,
index.(all_variables(model)),
)
Expand Down
4 changes: 2 additions & 2 deletions src/nlp_expr.jl
Original file line number Diff line number Diff line change
Expand Up @@ -577,14 +577,14 @@ end
moi_function_type(::Type{<:GenericNonlinearExpr}) = MOI.ScalarNonlinearFunction

function constraint_object(c::NonlinearConstraintRef)
nlp = nonlinear_model(c.model)
nlp = nonlinear_model(c.model)::MOI.Nonlinear.Model
data = nlp.constraints[index(c)]
return ScalarConstraint(jump_function(c.model, data.expression), data.set)
end

function jump_function(model::GenericModel, expr::MOI.Nonlinear.Expression)
V = variable_ref_type(typeof(model))
nlp = nonlinear_model(model)
nlp = nonlinear_model(model)::MOI.Nonlinear.Model
parsed = Vector{Any}(undef, length(expr.nodes))
adj = MOI.Nonlinear.adjacency_matrix(expr.nodes)
rowvals = SparseArrays.rowvals(adj)
Expand Down
5 changes: 3 additions & 2 deletions src/objective.jl
Original file line number Diff line number Diff line change
Expand Up @@ -118,8 +118,9 @@ function set_objective_function(model::GenericModel, func::MOI.AbstractFunction)
MOI.set(model, attr, func)
# Nonlinear objectives override regular objectives, so if there was a
# nonlinear objective set, we must clear it.
if nonlinear_model(model) !== nothing
MOI.Nonlinear.set_objective(nonlinear_model(model), nothing)
nlp = nonlinear_model(model)
if nlp !== nothing
MOI.Nonlinear.set_objective(nlp, nothing)
end
return
end
Expand Down
5 changes: 3 additions & 2 deletions src/optimizer_interface.jl
Original file line number Diff line number Diff line change
Expand Up @@ -414,7 +414,8 @@ function optimize!(
)
# The nlp_model is not kept in sync, so re-set it here.
# TODO: Consider how to handle incremental solves.
if nonlinear_model(model) !== nothing
nlp = nonlinear_model(model)
if nlp !== nothing
if _uses_new_nonlinear_interface(model)
error(
"Cannot optimize a model which contains the features from " *
Expand All @@ -424,7 +425,7 @@ function optimize!(
)
end
evaluator = MOI.Nonlinear.Evaluator(
nonlinear_model(model),
nlp,
_differentiation_backend,
index.(all_variables(model)),
)
Expand Down
13 changes: 8 additions & 5 deletions src/print.jl
Original file line number Diff line number Diff line change
Expand Up @@ -399,7 +399,8 @@ function nonlinear_constraint_string(
mode::MIME,
c::MOI.Nonlinear.ConstraintIndex,
)
constraint = nonlinear_model(model)[c]
nlp = nonlinear_model(model)::MOI.Nonlinear.Model
constraint = nlp[c]
body = nonlinear_expr_string(model, mode, constraint.expression)
lhs = _set_lhs(constraint.set)
rhs = _set_rhs(constraint.set)
Expand Down Expand Up @@ -442,7 +443,8 @@ function nonlinear_expr_string(
mode::MIME,
c::MOI.Nonlinear.Expression,
)
expr = MOI.Nonlinear.convert_to_expr(nonlinear_model(model), c)
nlp = nonlinear_model(model)::MOI.Nonlinear.Model
expr = MOI.Nonlinear.convert_to_expr(nlp, c)
# Walk terms, and replace
# MOI.VariableIndex => VariableRef
# MOI.Nonlinear.ExpressionIndex => _NonlinearExpressionIO
Expand Down Expand Up @@ -588,7 +590,7 @@ function function_string(mode::MIME"text/latex", v::AbstractVariableRef)
# Convert any x[args] to x_{args} so that indices on x print as subscripts.
m = match(r"^(.*)\[(.+)\]$", var_name)
if m !== nothing
var_name = m[1] * "_{" * m[2] * "}"
return string(m[1]::AbstractString, "_{", m[2]::AbstractString, "}")
end
return var_name
end
Expand Down Expand Up @@ -655,7 +657,7 @@ function function_string(mode, q::GenericQuadExpr)
end

function function_string(mode, vector::Vector{<:AbstractJuMPScalar})
return "[" * join(function_string.(Ref(mode), vector), ", ") * "]"
return string("[", join(function_string.(Ref(mode), vector), ", "), "]")
end

function function_string(
Expand Down Expand Up @@ -702,7 +704,8 @@ function function_string(mode, constraint::AbstractConstraint)
end

function function_string(mode::MIME, p::NonlinearExpression)
expr = nonlinear_model(p.model)[index(p)]
nlp = nonlinear_model(p.model)::MOI.Nonlinear.Model
expr = nlp[index(p)]
s = nonlinear_expr_string(p.model, mode, expr)
return "subexpression[$(p.index)]: " * s
end
Expand Down
Loading