Skip to content

Commit

Permalink
start edge backend
Browse files Browse the repository at this point in the history
  • Loading branch information
jalving committed Jan 2, 2024
1 parent cd6d7a0 commit ee59ee1
Show file tree
Hide file tree
Showing 6 changed files with 301 additions and 125 deletions.
3 changes: 1 addition & 2 deletions Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,8 @@ version = "0.5.3"
[deps]
DataStructures = "864edb3b-99cc-5e75-8d2d-829cb0a9cfe8"
Distributed = "8ba89e20-285c-5b6f-9357-94700520ee1b"
Graphs = "86223c79-3864-5bf0-83f7-82e725a168b6"
JuMP = "4076af6c-e467-56ae-b986-b466b2749572"
LightGraphs = "093fc24a-ae57-5d10-9952-331d41423f4d"
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
MathOptInterface = "b8f27783-ece8-5eb3-8dc8-9495eed66fee"
Printf = "de0858da-6303-5e67-8744-51eddeeeb8d7"
Expand All @@ -19,7 +19,6 @@ Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"

[compat]
DataStructures = "0.18"
LightGraphs = "~1.3"
MathOptInterface = "1.6"
Reexport = "~0.2, 1"
Requires = "~1.0, 1"
Expand Down
2 changes: 1 addition & 1 deletion src/Plasmo.jl
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ using Requires
using LinearAlgebra
using DataStructures
using SparseArrays
using LightGraphs
using Graphs
using Printf

using MathOptInterface
Expand Down
198 changes: 143 additions & 55 deletions src/backend.jl
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
# const ConstraintRefUnion = Union{JuMP.ConstraintRef,LinkConstraintRef}

# maps node/edge variable and constraints to the optigraph backend
mutable struct NodeToGraphMap
var_map::OrderedDict{NodeVariableRef,MOI.VariableIndex} #node variable to optimizer
Expand Down Expand Up @@ -53,9 +51,9 @@ function Base.getindex(g2n_map::GraphToNodeMap, idx::MOI.ConstraintIndex)
return g2n_map.con_map[idx]
end

#acts like a caching optimizer, except it uses references to underlying nodes in the graph
#NOTE: OptiGraph does not support modes yet. Eventually we will
#try to support Direct, Manual, and Automatic modes on an optigraph.
# acts as an intermediate optimizer, except it uses references to underlying nodes in the graph
# NOTE: OptiGraph does not support modes yet. Eventually we will support more than CachingOptimizer
# try to support Direct, Manual, and Automatic modes on an optigraph.
mutable struct GraphMOIBackend <: MOI.AbstractOptimizer
optigraph::AbstractOptiGraph
# TODO: nlp model
Expand All @@ -69,7 +67,8 @@ end
GraphMOIBackend()
Initialize an empty optigraph backend. Contains a model_cache that can be used to set
`MOI.AbstractModelAttribute`s and `MOI.AbstractOptimizerAttribute`s.
`MOI.AbstractModelAttribute`s and `MOI.AbstractOptimizerAttribute`s. By default we
use a `CachingOptimizer` to store the underlying optimizer.
"""
function GraphMOIBackend(optigraph::AbstractOptiGraph)
inner = MOI.Utilities.UniversalFallback(MOI.Utilities.Model{Float64}())
Expand Down Expand Up @@ -101,44 +100,7 @@ end

### Variables

function next_variable_index(node::OptiNode)
return MOI.VariableIndex(num_variables(node) + 1)
end

function _moi_add_node_variable(
node::OptiNode,
v::JuMP.AbstractVariable
)
# add variable to source graph
variable_index = next_variable_index(node)
vref = NodeVariableRef(node, variable_index)
graph_var_index = _add_variable_to_backend(graph_backend(node), vref)
_moi_constrain_node_variable(graph_backend(node), graph_var_index, v.info, Float64)

# add variable to all other contained graphs
for graph in contained_optigraphs(node)
graph_var_index = _add_variable_to_backend(graph_backend(graph), vref)
_moi_constrain_node_variable(
graph_backend(graph),
graph_var_index,
v.info,
Float64
)
end
return vref
end

function _add_variable_to_backend(
graph_backend::GraphMOIBackend,
vref::NodeVariableRef
)
graph_index = MOI.add_variable(graph_backend.moi_backend)
graph_backend.node_to_graph_map[vref] = graph_index
graph_backend.graph_to_node_map[graph_index] = vref

return graph_index
end

# copied from...
function _moi_constrain_node_variable(
gb::GraphMOIBackend,
index,
Expand Down Expand Up @@ -182,17 +144,45 @@ function _moi_constrain_node_variable(
end
end

### Constraints
function next_variable_index(node::OptiNode)
return MOI.VariableIndex(num_variables(node) + 1)
end

function next_constraint_index(
node::OptiNode,
::Type{F},
::Type{S}
)::MOI.ConstraintIndex{F,S} where {F<:MOI.AbstractFunction,S<:MOI.AbstractSet}
index = num_constraints(node, F, S)
return MOI.ConstraintIndex{F,S}(index + 1)
function _moi_add_node_variable(
node::OptiNode,
v::JuMP.AbstractVariable
)
# add variable to source graph
variable_index = next_variable_index(node)
vref = NodeVariableRef(node, variable_index)
# graph_var_index = _add_variable_to_backend(graph_backend(node), vref)
# _moi_constrain_node_variable(graph_backend(node), graph_var_index, v.info, Float64)

# add variable to all other containing optigraphs
for graph in containing_optigraphs(node)
graph_var_index = _add_variable_to_backend(graph_backend(graph), vref)
_moi_constrain_node_variable(
graph_backend(graph),
graph_var_index,
v.info,
Float64
)
end
return vref
end

function _add_variable_to_backend(
graph_backend::GraphMOIBackend,
vref::NodeVariableRef
)
graph_index = MOI.add_variable(graph_backend.moi_backend)
graph_backend.node_to_graph_map[vref] = graph_index
graph_backend.graph_to_node_map[graph_index] = vref
return graph_index
end

### Node Constraints

# copied from: https://github.com/jump-dev/JuMP.jl/blob/0df25a9185ceede762af533bc965c9374c97450c/src/constraints.jl#L673
function _moi_add_constraint(
model::MOI.ModelLike,
Expand All @@ -212,6 +202,15 @@ function _moi_add_constraint(
return MOI.add_constraint(model, f, s)
end

function next_constraint_index(
node::OptiNode,
::Type{F},
::Type{S}
)::MOI.ConstraintIndex{F,S} where {F<:MOI.AbstractFunction,S<:MOI.AbstractSet}
index = num_constraints(node, F, S)
return MOI.ConstraintIndex{F,S}(index + 1)
end

function _moi_add_node_constraint(
node::OptiNode,
con::JuMP.AbstractConstraint
Expand All @@ -223,23 +222,112 @@ function _moi_add_node_constraint(
typeof(set)
)::MOI.ConstraintIndex{typeof(func),typeof(set)}
cref = ConstraintRef(node, constraint_index, JuMP.shape(con))
_add_constraint_to_backend(graph_backend(node), cref, func, set)
for graph in containing_optigraphs(node)
_add_node_constraint_to_backend(graph_backend(graph), cref, func, set)
end
return cref
end

function _add_constraint_to_backend(
function _add_node_constraint_to_backend(
graph_backend::GraphMOIBackend,
cref::ConstraintRef,
func::F,
set::S
) where {F<:MOI.AbstractFunction,S<:MOI.AbstractSet}

graph_index = MOI.add_constraint(graph_backend.moi_backend, func, set)
graph_backend.node_to_graph_map[cref] = graph_index
graph_backend.graph_to_node_map[graph_index] = cref
return graph_index
end

### Edge Constraints

function next_constraint_index(
edge::OptiEdge,
::Type{F},
::Type{S}
)::MOI.ConstraintIndex{F,S} where {F<:MOI.AbstractFunction,S<:MOI.AbstractSet}
index = num_constraints(edge, F, S)
return MOI.ConstraintIndex{F,S}(index + 1)
end

# #Add a LinkConstraint to the MOI backend. This is used as part of _aggregate_backends!
# function _add_link_constraint!(id::Symbol, dest::MOI.ModelLike, link::LinkConstraint)
# jump_func = JuMP.jump_function(link)
# moi_func = JuMP.moi_function(link)
# for (i, term) in enumerate(JuMP.linear_terms(jump_func))
# coeff = term[1]
# var = term[2]

# src = JuMP.backend(optinode(var))
# idx_map = src.optimizers[id].node_to_optimizer_map

# var_idx = JuMP.index(var)
# dest_idx = idx_map[var_idx]

# moi_func.terms[i] = MOI.ScalarAffineTerm{Float64}(coeff, dest_idx)
# end
# moi_set = JuMP.moi_set(link)
# constraint_index = MOI.add_constraint(dest, moi_func, moi_set)
# return constraint_index
# end

function _has_var_idx(graph_backend::GraphMOIBackend, var::NodeVariableRef)
return haskey(graph_backend.node_to_graph_map.var_map, var)
end

function _set_var_idx(graph_backend::GraphMOIBackend, var::NodeVariableRef)
n_vars = MOI.get(graph_backend.moi_backend, MOI.NumberOfVariables())
graph_backend.node_to_graph_map[var] = MOI.VariableIndex(n_vars + 1)
return
end

function _moi_add_edge_constraint(
edge::OptiEdge,
con::JuMP.AbstractConstraint
)
# get moi function and set
jump_func = JuMP.jump_function(con)
moi_func = JuMP.moi_function(con)
moi_set = JuMP.moi_set(con)

# create constraint index and reference
constraint_index = next_constraint_index(
edge,
typeof(moi_func),
typeof(moi_set)
)::MOI.ConstraintIndex{typeof(moi_func),typeof(moi_set)}
cref = ConstraintRef(edge, constraint_index, JuMP.shape(con))

# TODO: figure out edges between subgraphs
for graph in containing_optigraphs(edge)
# update moi_func with actual indices
for (i, term) in enumerate(JuMP.linear_terms(jump_func))
coeff = term[1]
var = term[2]
if !(_has_var_idx(backend(graph), var))
_set_var_idx(backend(graph), var)
end
backend_var_idx = graph.backend.node_to_graph_map[var]
moi_func.terms[i] = MOI.ScalarAffineTerm{Float64}(coeff, backend_var_idx)
end
_add_edge_constraint_to_backend(graph_backend(graph), cref, moi_func, moi_set)
end
return cref
end

function _add_edge_constraint_to_backend(
graph_backend::GraphMOIBackend,
cref::ConstraintRef,
func::F,
set::S
) where {F<:MOI.AbstractFunction,S<:MOI.AbstractSet}
graph_con_index = MOI.add_constraint(graph_backend.moi_backend, func, set)
graph_backend.edge_to_graph_map[cref] = graph_con_index
graph_backend.graph_to_edge_map[graph_con_index] = cref
return graph_index
end

### TODO

function MOI.optimize!(graph_backend::GraphMOIBackend)
Expand Down
10 changes: 9 additions & 1 deletion src/dev.jl
Original file line number Diff line number Diff line change
@@ -1,16 +1,24 @@
using Plasmo
graph = OptiGraph()

n1 = Plasmo.add_node(graph)
@variable(n1, x >= 0)
@variable(n1, y >= 0)
@constraint(n1, ref1, x+y==2)

print(n1[:x])

n2 = Plasmo.add_node(graph)
@variable(n2, x >= 1)
@variable(n2, y >= 2)
@constraint(n2, ref2, x+y==4)

# edge1 = Plasmo.add_edge(graph, n1, n2)
# @constraint(edge1)

@linkconstraint(graph, n1[:x] + n2[:x] == 2)



# m = Model()
# @variable(m, x[1:100000])
# Base.summarysize(m) # 11053087
Expand Down
48 changes: 26 additions & 22 deletions src/notes.txt
Original file line number Diff line number Diff line change
@@ -1,37 +1,41 @@
# Development Notes:

## Create our own MOI interface for the OptiGraph
# Motivation
- The idea for the new Plasmo.jl is to create our own MOI interface for the OptiGraph

## Why we can't use JuMP.Model for an OptiGraph backend
- No way to handle linking between subproblems (variables must be owned by their model)
- Less prone to user 'messing things up' by working directly with 'Model'
- More flexibility creating our own MOI wrapper: We could use MOI or GOI backends

## Use MOI.copy_to to create final optimization problem if necessary
- e.g. we need to transfer modular subgraphs into aggregated optigraph to solve

## Nodes are associated with variables and constraints through GraphBackend
- Nodes are 'light-weight' memory objects
- More flexibility creating our own MOI wrapper: We could use either MOI or GOI backends

## Edges point to a constraint index on an owning optigraph
# Development Notes:

## Creating the optigraph backend
- Adding variables to nodes and edges updates the corresponds backend(s)
- We create a final optigraph backend using contained subgraphs if needed
- We can optionally choose to build an optigraph using a single backend
- `add_subgraph!(graph; modular=False)` creates a graph that points to the parent backend
- `add_subgraph!(graph, sg)` uses the subgraph backend. same as `add_subgraph!(graph, modular=True)`.
- links between subgraphs use referenced variables if all one backend, otherwise creates new references

## adding a subgraph can (optionally) directly update the parent graph
- `add_subgraph!(graph; modular=false)`
## Nodes and edges are light-weight in memory.
- They are associated with variables and constraints through the GraphBackend

## Creating JuMP models
- It should be possible to obtain a JuMP Model from an optigraph. this performs necessary copy functions
- `model = jump_model(graph)`

## It should be possible to obtain a JuMP Model from an optigraph
- `jump_model(graph)`

# Other Notes:
## Multiple dictionaries are not memory efficient
- creating a new dictionary for each node does not scale. We need to keep the amount of node data to an absolute minimum and levarage aggregate data structures where possible
- need to PR JuMP with something like a 'register' call in the macro


# OptiGraph Backends

## OptiGraph Backends
- MOI Backend
- GOI Backend
- MOI Backend (standard solvers)
- GOI Backend (graph-based solvers)

## Distributed OptiGraph Backend
- MOI Backends
- GOI Backends
- DGOI Backend
## Distributed OptiGraph Backends
- MOI Backends (coordinate standard solvers)
- GOI Backends (coordinate graph-based solvers)
- DGOI Backend (hookup a distributed solver)
Loading

0 comments on commit ee59ee1

Please sign in to comment.