diff --git a/Project.toml b/Project.toml index c6642c16..5e1ebed8 100644 --- a/Project.toml +++ b/Project.toml @@ -7,8 +7,8 @@ version = "0.5.3" [deps] DataStructures = "864edb3b-99cc-5e75-8d2d-829cb0a9cfe8" Distributed = "8ba89e20-285c-5b6f-9357-94700520ee1b" +Graphs = "86223c79-3864-5bf0-83f7-82e725a168b6" JuMP = "4076af6c-e467-56ae-b986-b466b2749572" -LightGraphs = "093fc24a-ae57-5d10-9952-331d41423f4d" LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" MathOptInterface = "b8f27783-ece8-5eb3-8dc8-9495eed66fee" Printf = "de0858da-6303-5e67-8744-51eddeeeb8d7" @@ -19,7 +19,6 @@ Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" [compat] DataStructures = "0.18" -LightGraphs = "~1.3" MathOptInterface = "1.6" Reexport = "~0.2, 1" Requires = "~1.0, 1" diff --git a/src/Plasmo.jl b/src/Plasmo.jl index 8f8205a9..0aa2ec6d 100644 --- a/src/Plasmo.jl +++ b/src/Plasmo.jl @@ -4,7 +4,7 @@ using Requires using LinearAlgebra using DataStructures using SparseArrays -using LightGraphs +using Graphs using Printf using MathOptInterface diff --git a/src/backend.jl b/src/backend.jl index a9e8f38e..5a8a95ee 100644 --- a/src/backend.jl +++ b/src/backend.jl @@ -1,5 +1,3 @@ -# const ConstraintRefUnion = Union{JuMP.ConstraintRef,LinkConstraintRef} - # maps node/edge variable and constraints to the optigraph backend mutable struct NodeToGraphMap var_map::OrderedDict{NodeVariableRef,MOI.VariableIndex} #node variable to optimizer @@ -53,9 +51,9 @@ function Base.getindex(g2n_map::GraphToNodeMap, idx::MOI.ConstraintIndex) return g2n_map.con_map[idx] end -#acts like a caching optimizer, except it uses references to underlying nodes in the graph -#NOTE: OptiGraph does not support modes yet. Eventually we will -#try to support Direct, Manual, and Automatic modes on an optigraph. +# acts as an intermediate optimizer, except it uses references to underlying nodes in the graph +# NOTE: OptiGraph does not support modes yet. Eventually we will support more than CachingOptimizer +# try to support Direct, Manual, and Automatic modes on an optigraph. mutable struct GraphMOIBackend <: MOI.AbstractOptimizer optigraph::AbstractOptiGraph # TODO: nlp model @@ -69,7 +67,8 @@ end GraphMOIBackend() Initialize an empty optigraph backend. Contains a model_cache that can be used to set -`MOI.AbstractModelAttribute`s and `MOI.AbstractOptimizerAttribute`s. +`MOI.AbstractModelAttribute`s and `MOI.AbstractOptimizerAttribute`s. By default we +use a `CachingOptimizer` to store the underlying optimizer. """ function GraphMOIBackend(optigraph::AbstractOptiGraph) inner = MOI.Utilities.UniversalFallback(MOI.Utilities.Model{Float64}()) @@ -101,44 +100,7 @@ end ### Variables -function next_variable_index(node::OptiNode) - return MOI.VariableIndex(num_variables(node) + 1) -end - -function _moi_add_node_variable( - node::OptiNode, - v::JuMP.AbstractVariable -) - # add variable to source graph - variable_index = next_variable_index(node) - vref = NodeVariableRef(node, variable_index) - graph_var_index = _add_variable_to_backend(graph_backend(node), vref) - _moi_constrain_node_variable(graph_backend(node), graph_var_index, v.info, Float64) - - # add variable to all other contained graphs - for graph in contained_optigraphs(node) - graph_var_index = _add_variable_to_backend(graph_backend(graph), vref) - _moi_constrain_node_variable( - graph_backend(graph), - graph_var_index, - v.info, - Float64 - ) - end - return vref -end - -function _add_variable_to_backend( - graph_backend::GraphMOIBackend, - vref::NodeVariableRef -) - graph_index = MOI.add_variable(graph_backend.moi_backend) - graph_backend.node_to_graph_map[vref] = graph_index - graph_backend.graph_to_node_map[graph_index] = vref - - return graph_index -end - +# copied from... function _moi_constrain_node_variable( gb::GraphMOIBackend, index, @@ -182,17 +144,45 @@ function _moi_constrain_node_variable( end end -### Constraints +function next_variable_index(node::OptiNode) + return MOI.VariableIndex(num_variables(node) + 1) +end -function next_constraint_index( - node::OptiNode, - ::Type{F}, - ::Type{S} -)::MOI.ConstraintIndex{F,S} where {F<:MOI.AbstractFunction,S<:MOI.AbstractSet} - index = num_constraints(node, F, S) - return MOI.ConstraintIndex{F,S}(index + 1) +function _moi_add_node_variable( + node::OptiNode, + v::JuMP.AbstractVariable +) + # add variable to source graph + variable_index = next_variable_index(node) + vref = NodeVariableRef(node, variable_index) + # graph_var_index = _add_variable_to_backend(graph_backend(node), vref) + # _moi_constrain_node_variable(graph_backend(node), graph_var_index, v.info, Float64) + + # add variable to all other containing optigraphs + for graph in containing_optigraphs(node) + graph_var_index = _add_variable_to_backend(graph_backend(graph), vref) + _moi_constrain_node_variable( + graph_backend(graph), + graph_var_index, + v.info, + Float64 + ) + end + return vref +end + +function _add_variable_to_backend( + graph_backend::GraphMOIBackend, + vref::NodeVariableRef +) + graph_index = MOI.add_variable(graph_backend.moi_backend) + graph_backend.node_to_graph_map[vref] = graph_index + graph_backend.graph_to_node_map[graph_index] = vref + return graph_index end +### Node Constraints + # copied from: https://github.com/jump-dev/JuMP.jl/blob/0df25a9185ceede762af533bc965c9374c97450c/src/constraints.jl#L673 function _moi_add_constraint( model::MOI.ModelLike, @@ -212,6 +202,15 @@ function _moi_add_constraint( return MOI.add_constraint(model, f, s) end +function next_constraint_index( + node::OptiNode, + ::Type{F}, + ::Type{S} +)::MOI.ConstraintIndex{F,S} where {F<:MOI.AbstractFunction,S<:MOI.AbstractSet} + index = num_constraints(node, F, S) + return MOI.ConstraintIndex{F,S}(index + 1) +end + function _moi_add_node_constraint( node::OptiNode, con::JuMP.AbstractConstraint @@ -223,23 +222,112 @@ function _moi_add_node_constraint( typeof(set) )::MOI.ConstraintIndex{typeof(func),typeof(set)} cref = ConstraintRef(node, constraint_index, JuMP.shape(con)) - _add_constraint_to_backend(graph_backend(node), cref, func, set) + for graph in containing_optigraphs(node) + _add_node_constraint_to_backend(graph_backend(graph), cref, func, set) + end return cref end -function _add_constraint_to_backend( +function _add_node_constraint_to_backend( graph_backend::GraphMOIBackend, cref::ConstraintRef, func::F, set::S ) where {F<:MOI.AbstractFunction,S<:MOI.AbstractSet} - graph_index = MOI.add_constraint(graph_backend.moi_backend, func, set) graph_backend.node_to_graph_map[cref] = graph_index graph_backend.graph_to_node_map[graph_index] = cref return graph_index end +### Edge Constraints + +function next_constraint_index( + edge::OptiEdge, + ::Type{F}, + ::Type{S} +)::MOI.ConstraintIndex{F,S} where {F<:MOI.AbstractFunction,S<:MOI.AbstractSet} + index = num_constraints(edge, F, S) + return MOI.ConstraintIndex{F,S}(index + 1) +end + +# #Add a LinkConstraint to the MOI backend. This is used as part of _aggregate_backends! +# function _add_link_constraint!(id::Symbol, dest::MOI.ModelLike, link::LinkConstraint) +# jump_func = JuMP.jump_function(link) +# moi_func = JuMP.moi_function(link) +# for (i, term) in enumerate(JuMP.linear_terms(jump_func)) +# coeff = term[1] +# var = term[2] + +# src = JuMP.backend(optinode(var)) +# idx_map = src.optimizers[id].node_to_optimizer_map + +# var_idx = JuMP.index(var) +# dest_idx = idx_map[var_idx] + +# moi_func.terms[i] = MOI.ScalarAffineTerm{Float64}(coeff, dest_idx) +# end +# moi_set = JuMP.moi_set(link) +# constraint_index = MOI.add_constraint(dest, moi_func, moi_set) +# return constraint_index +# end + +function _has_var_idx(graph_backend::GraphMOIBackend, var::NodeVariableRef) + return haskey(graph_backend.node_to_graph_map.var_map, var) +end + +function _set_var_idx(graph_backend::GraphMOIBackend, var::NodeVariableRef) + n_vars = MOI.get(graph_backend.moi_backend, MOI.NumberOfVariables()) + graph_backend.node_to_graph_map[var] = MOI.VariableIndex(n_vars + 1) + return +end + +function _moi_add_edge_constraint( + edge::OptiEdge, + con::JuMP.AbstractConstraint +) + # get moi function and set + jump_func = JuMP.jump_function(con) + moi_func = JuMP.moi_function(con) + moi_set = JuMP.moi_set(con) + + # create constraint index and reference + constraint_index = next_constraint_index( + edge, + typeof(moi_func), + typeof(moi_set) + )::MOI.ConstraintIndex{typeof(moi_func),typeof(moi_set)} + cref = ConstraintRef(edge, constraint_index, JuMP.shape(con)) + + # TODO: figure out edges between subgraphs + for graph in containing_optigraphs(edge) + # update moi_func with actual indices + for (i, term) in enumerate(JuMP.linear_terms(jump_func)) + coeff = term[1] + var = term[2] + if !(_has_var_idx(backend(graph), var)) + _set_var_idx(backend(graph), var) + end + backend_var_idx = graph.backend.node_to_graph_map[var] + moi_func.terms[i] = MOI.ScalarAffineTerm{Float64}(coeff, backend_var_idx) + end + _add_edge_constraint_to_backend(graph_backend(graph), cref, moi_func, moi_set) + end + return cref +end + +function _add_edge_constraint_to_backend( + graph_backend::GraphMOIBackend, + cref::ConstraintRef, + func::F, + set::S +) where {F<:MOI.AbstractFunction,S<:MOI.AbstractSet} + graph_con_index = MOI.add_constraint(graph_backend.moi_backend, func, set) + graph_backend.edge_to_graph_map[cref] = graph_con_index + graph_backend.graph_to_edge_map[graph_con_index] = cref + return graph_index +end + ### TODO function MOI.optimize!(graph_backend::GraphMOIBackend) diff --git a/src/dev.jl b/src/dev.jl index b755e7b6..7a0056d6 100644 --- a/src/dev.jl +++ b/src/dev.jl @@ -1,16 +1,24 @@ using Plasmo graph = OptiGraph() + n1 = Plasmo.add_node(graph) @variable(n1, x >= 0) @variable(n1, y >= 0) @constraint(n1, ref1, x+y==2) - +print(n1[:x]) n2 = Plasmo.add_node(graph) @variable(n2, x >= 1) @variable(n2, y >= 2) @constraint(n2, ref2, x+y==4) +# edge1 = Plasmo.add_edge(graph, n1, n2) +# @constraint(edge1) + +@linkconstraint(graph, n1[:x] + n2[:x] == 2) + + + # m = Model() # @variable(m, x[1:100000]) # Base.summarysize(m) # 11053087 diff --git a/src/notes.txt b/src/notes.txt index eef36135..99baafbf 100644 --- a/src/notes.txt +++ b/src/notes.txt @@ -1,37 +1,41 @@ -# Development Notes: - -## Create our own MOI interface for the OptiGraph +# Motivation +- The idea for the new Plasmo.jl is to create our own MOI interface for the OptiGraph ## Why we can't use JuMP.Model for an OptiGraph backend - No way to handle linking between subproblems (variables must be owned by their model) - Less prone to user 'messing things up' by working directly with 'Model' -- More flexibility creating our own MOI wrapper: We could use MOI or GOI backends - -## Use MOI.copy_to to create final optimization problem if necessary -- e.g. we need to transfer modular subgraphs into aggregated optigraph to solve - -## Nodes are associated with variables and constraints through GraphBackend -- Nodes are 'light-weight' memory objects +- More flexibility creating our own MOI wrapper: We could use either MOI or GOI backends -## Edges point to a constraint index on an owning optigraph +# Development Notes: +## Creating the optigraph backend +- Adding variables to nodes and edges updates the corresponds backend(s) +- We create a final optigraph backend using contained subgraphs if needed +- We can optionally choose to build an optigraph using a single backend + - `add_subgraph!(graph; modular=False)` creates a graph that points to the parent backend + - `add_subgraph!(graph, sg)` uses the subgraph backend. same as `add_subgraph!(graph, modular=True)`. + - links between subgraphs use referenced variables if all one backend, otherwise creates new references -## adding a subgraph can (optionally) directly update the parent graph -- `add_subgraph!(graph; modular=false)` +## Nodes and edges are light-weight in memory. +- They are associated with variables and constraints through the GraphBackend +## Creating JuMP models +- It should be possible to obtain a JuMP Model from an optigraph. this performs necessary copy functions + - `model = jump_model(graph)` -## It should be possible to obtain a JuMP Model from an optigraph -- `jump_model(graph)` +# Other Notes: ## Multiple dictionaries are not memory efficient - creating a new dictionary for each node does not scale. We need to keep the amount of node data to an absolute minimum and levarage aggregate data structures where possible -- need to PR JuMP with something like a 'register' call in the macro + + +# OptiGraph Backends ## OptiGraph Backends -- MOI Backend -- GOI Backend +- MOI Backend (standard solvers) +- GOI Backend (graph-based solvers) -## Distributed OptiGraph Backend -- MOI Backends -- GOI Backends -- DGOI Backend \ No newline at end of file +## Distributed OptiGraph Backends +- MOI Backends (coordinate standard solvers) +- GOI Backends (coordinate graph-based solvers) +- DGOI Backend (hookup a distributed solver) diff --git a/src/optigraph.jl b/src/optigraph.jl index a7595b49..2c685c3a 100644 --- a/src/optigraph.jl +++ b/src/optigraph.jl @@ -2,6 +2,7 @@ abstract type AbstractOptiGraph <: JuMP.AbstractModel end ### OptiNode +# TODO: node index? struct OptiNode{GT<:AbstractOptiGraph} <: JuMP.AbstractModel source_graph::GT label::String @@ -12,14 +13,24 @@ function Base.string(node::OptiNode) end Base.print(io::IO, node::OptiNode) = Base.print(io, Base.string(node)) Base.show(io::IO, node::OptiNode) = Base.print(io, node) + function JuMP.object_dictionary(node::OptiNode) return node.source_graph.node_obj_dict end -function Base.setindex!(node::OptiNode, value::Any, t::Tuple{Plasmo.OptiNode, Symbol}) +function Base.setindex!(node::OptiNode, value::Any, name::Symbol) + t = (node, name) node.source_graph.node_obj_dict[t] = value + return +end + +function Base.getindex(node::OptiNode, name::Symbol) + t = (node,name) + return node.source_graph.node_obj_dict[t] end +### Node Variables + struct NodeVariableRef <: JuMP.AbstractVariableRef node::OptiNode index::MOI.VariableIndex @@ -39,13 +50,13 @@ function JuMP.backend(node::OptiNode) return JuMP.backend(node.source_graph.backend) end -function contained_optigraphs(node::OptiNode) +function containing_optigraphs(node::OptiNode) source_graph = node.source_graph + graphs = [source_graph] if haskey(source_graph.node_to_graphs, node) - return source_graph.node_to_graphs[node] - else - return OptiGraph[] + graphs = [graphs; source_graph.node_to_graphs[node]] end + return graphs end ### OptiEdge @@ -53,13 +64,33 @@ end struct OptiEdge{GT<:AbstractOptiGraph} <: JuMP.AbstractModel source_graph::GT label::String + nodes::OrderedSet{OptiNode} +end + +# const NodeOrEdge = Union{OptiNode,OptiEdge} + +# struct LinkConstraintRef +# edge::OptiEdge +# index::MOI.ConstraintIndex +# end +# Base.broadcastable(c::LinkConstraintRef) = Ref(c) + +function graph_backend(edge::OptiEdge) + return graph_backend(edge.source_graph) +end + +function JuMP.backend(edge::OptiEdge) + return JuMP.backend(edge.source_graph.backend) end -struct LinkConstraintRef - edge::OptiEdge - index::MOI.ConstraintIndex +function containing_optigraphs(edge::OptiEdge) + source_graph = edge.source_graph + graphs = [source_graph] + if haskey(source_graph.edge_to_graphs, edge) + graphs = [graphs; source_graph.node_to_graphs[edge]] + end + return graphs end -Base.broadcastable(c::LinkConstraintRef) = Ref(c) ### OptiGraph @@ -69,12 +100,14 @@ mutable struct OptiGraph <: AbstractOptiGraph #<: JuMP.AbstractModel optiedges::Vector{OptiEdge} #Local optiedges subgraphs::Vector{OptiGraph} - node_to_graphs::OrderedDict{OptiNode,Vector{OptiGraph}} # track node membership in other graphs; nodes use this to query different backends + # track node membership in other graphs; nodes use this to query different backends + node_to_graphs::OrderedDict{OptiNode,Vector{OptiGraph}} + edge_to_graphs::OrderedDict{OptiEdge,Vector{OptiGraph}} backend::MOI.ModelLike - # objects on nodes - node_obj_dict::OrderedDict{Tuple{OptiNode,Symbol},Any} + node_obj_dict::OrderedDict{Tuple{OptiNode,Symbol},Any} # object dictionary for nodes + edge_obj_dict::OrderedDict{Tuple{OptiNode,Symbol},Any} # object dictionary for edges obj_dict::Dict{Symbol,Any} ext::Dict{Symbol,Any} # extension information @@ -87,6 +120,8 @@ mutable struct OptiGraph <: AbstractOptiGraph #<: JuMP.AbstractModel optigraph.node_to_graphs = OrderedDict{OptiNode,Vector{OptiGraph}}() optigraph.node_obj_dict = OrderedDict{Tuple{OptiNode,Symbol},Any}() + optigraph.edge_to_graphs = OrderedDict{OptiEdge,Vector{OptiGraph}}() + optigraph.edge_obj_dict = OrderedDict{Tuple{OptiEdge,Symbol},Any}() optigraph.backend = GraphMOIBackend(optigraph) optigraph.obj_dict = Dict{Symbol,Any}() @@ -106,19 +141,19 @@ Base.print(io::IO, graph::OptiGraph) = Base.print(io, Base.string(graph)) Base.show(io::IO, graph::OptiGraph) = Base.print(io, graph) # TODO: PR for JuMP on name_to_register. This lets us overrride how objects get registered in OptiGraphs -function JuMP.name_to_register(node::OptiNode, name::Symbol) - return (node,name) -end +# function JuMP.name_to_register(node::OptiNode, name::Symbol) +# return (node,name) +# end ### Add Node function add_node(graph::OptiGraph; label::String="n$(length(graph.optinodes) + 1)") - optinode = OptiNode{OptiGraph}(graph, label) # , Dict{Symbol,Any}() + optinode = OptiNode{OptiGraph}(graph, label) push!(graph.optinodes, optinode) return optinode end -### Variables +### Node Variables """ JuMP.add_variable(node::OptiNode, v::JuMP.AbstractVariable, name::String="") @@ -161,31 +196,7 @@ end ### Constraints -# NOTE: Using an alias on ConstraintRef{M,C,S} causes issues with dispatching JuMP functions. I'm not sure it is really necessary vs just using ConstraintRef for dispatch. -# const NodeConstraintRef = JuMP.ConstraintRef{OptiNode, MOI.ConstraintIndex{F,S} where {F,S}, Shape where Shape <: JuMP.AbstractShape} - -function JuMP.num_constraints( - node::OptiNode, - ::Type{F}, - ::Type{S} -)::Int64 where {F<:MOI.AbstractFunction,S<:MOI.AbstractSet} - return MOI.get(JuMP.backend(node), MOI.NumberOfConstraints{F,S}()) -end - -""" - JuMP.add_constraint(node::OptiNode, con::JuMP.AbstractConstraint, base_name::String="") - -Add a constraint `con` to optinode `node`. This function supports use of the @constraint JuMP macro. -""" -function JuMP.add_constraint( - node::OptiNode, con::JuMP.AbstractConstraint, name::String="" -) - # TODO: determine whether `model_convert` is necessary? - con = JuMP.model_convert(node, con) - cref = _moi_add_node_constraint(node, con) - return cref -end - +# TODO: figure out if JuMP really needs this level of customization # Adapted from: https://github.com/jump-dev/JuMP.jl/blob/0df25a9185ceede762af533bc965c9374c97450c/src/aff_expr.jl#L633-L641 function MOI.ScalarAffineFunction( a::GenericAffExpr{C,<:NodeVariableRef}, @@ -213,17 +224,45 @@ function JuMP.GenericAffExpr{C,NodeVariableRef}( return aff end +### Node Constraints + +# NOTE: Using an alias on ConstraintRef{M,C,S} causes issues with dispatching JuMP functions. I'm not sure it is really necessary vs just using ConstraintRef for dispatch. +# const NodeConstraintRef = JuMP.ConstraintRef{OptiNode, MOI.ConstraintIndex{F,S} where {F,S}, Shape where Shape <: JuMP.AbstractShape} +# const NodeConstraintRef = JuMP.ConstraintRef{OptiNode, MOI.ConstraintIndex} + function JuMP.jump_function( - model::OptiNode, + node::OptiNode, f::MOI.ScalarAffineFunction{C}, ) where {C} - return JuMP.GenericAffExpr{C,NodeVariableRef}(model, f) + return JuMP.GenericAffExpr{C,NodeVariableRef}(node, f) end function MOI.get(node::OptiNode, attr::MOI.AbstractConstraintAttribute, ref::ConstraintRef) return MOI.get(graph_backend(node), attr, ref) end +function JuMP.num_constraints( + node::OptiNode, + ::Type{F}, + ::Type{S} +)::Int64 where {F<:MOI.AbstractFunction,S<:MOI.AbstractSet} + return MOI.get(JuMP.backend(node), MOI.NumberOfConstraints{F,S}()) +end + +""" + JuMP.add_constraint(node::OptiNode, con::JuMP.AbstractConstraint, base_name::String="") + +Add a constraint `con` to optinode `node`. This function supports use of the @constraint JuMP macro. +""" +function JuMP.add_constraint( + node::OptiNode, con::JuMP.AbstractConstraint, name::String="" +) + # TODO: determine whether `model_convert` is necessary? + con = JuMP.model_convert(node, con) + cref = _moi_add_node_constraint(node, con) + return cref +end + """ JuMP.add_nonlinear_constraint(node::OptiNode, expr::Expr) @@ -244,6 +283,44 @@ function JuMP.add_nonlinear_expression(node::OptiNode, expr::Any) return JuMP.add_nonlinear_expression(jump_model(node), expr) end +### Add Edges + +function add_edge( + graph::OptiGraph, + nodes::OptiNode...; + label::String="e$(length(graph.optiedges) + 1)" +) + edge = OptiEdge{OptiGraph}(graph, label, OrderedSet(collect(nodes))) + push!(graph.optiedges, optiedge) + return edge +end + +function JuMP.num_constraints( + edge::OptiEdge, + ::Type{F}, + ::Type{S} +)::Int64 where {F<:MOI.AbstractFunction,S<:MOI.AbstractSet} + return MOI.get(JuMP.backend(edge), MOI.NumberOfConstraints{F,S}()) +end + +function JuMP.add_constraint( + edge::OptiEdge, con::JuMP.AbstractConstraint, name::String="" +) + con = JuMP.model_convert(edge, con) # converts coefficient and constant types + cref = _moi_add_edge_constraint(node, con) + # TODO: set name + return cref +end + +function add_link_constraint( + graph::OptiGraph, con::JuMP.ScalarConstraint, name::String="" +) + nodes = get_nodes(con) + optiedge = add_optiedge(graph, nodes) + cref = JuMP.add_constraint(optiedge, con, name) + return cref +end + ### private methods # function _moi_add_node_variable(