Skip to content

Commit

Permalink
got multi-subgraph-backends working
Browse files Browse the repository at this point in the history
  • Loading branch information
jalving committed Jan 4, 2024
1 parent 06d2556 commit ed77656
Show file tree
Hide file tree
Showing 16 changed files with 583 additions and 526 deletions.
1 change: 1 addition & 0 deletions Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ version = "0.5.3"
DataStructures = "864edb3b-99cc-5e75-8d2d-829cb0a9cfe8"
Distributed = "8ba89e20-285c-5b6f-9357-94700520ee1b"
Graphs = "86223c79-3864-5bf0-83f7-82e725a168b6"
HiGHS = "87dc4568-4c63-4d18-b0c0-bb2238e4078b"
JuMP = "4076af6c-e467-56ae-b986-b466b2749572"
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
MathOptInterface = "b8f27783-ece8-5eb3-8dc8-9495eed66fee"
Expand Down
148 changes: 8 additions & 140 deletions src/Plasmo.jl
Original file line number Diff line number Diff line change
Expand Up @@ -20,150 +20,18 @@ export OptiGraph
# import DataStructures.OrderedDict
# import Base: ==, string, print, show

abstract type AbstractOptiGraph <: JuMP.AbstractModel end

include("optigraph.jl")

include("backend.jl")
# export

# #################################
# # OptiGraph
# ################################
# AbstractOptiGraph,
# OptiGraph,
# OptiNode,
# OptiEdge,
# LinkConstraint,
# LinkConstraintRef,
# Partition,
# OptiGraphNLPEvaluator,
# add_node!,
# optinode,
# optinodes,
# all_nodes,
# optinode_by_index,
# num_nodes,
# num_all_nodes,
# optiedge,
# optiedges,
# all_edges,
# optiedge_by_index,
# num_edges,
# num_all_edges,
# add_subgraph!,
# subgraph,
# subgraphs,
# all_subgraphs,
# subgraph_by_index,
# num_subgraphs,
# num_all_subgraphs,
# has_subgraphs,
# optigraph_reference,
# @optinode,
# @linkconstraint,

# # linkconstraints
# linkconstraints,
# all_linkconstraints,
# num_linkconstraints,
# num_all_linkconstraints,
# num_linked_variables,

# # optinode
# jump_model,
# set_model,
# has_model,
# is_set_to_node,
# label,
# set_label,
# attached_node,
# set_attached_node,
# is_node_variable,
# is_linked_variable,

# # graph processing
# incident_edges,
# neighborhood,
# induced_edges,
# expand,
# induced_graph,
# apply_partition!,
# cross_edges,
# hierarchical_edges,
# global_edges,
# aggregate,
# aggregate!,

# # model functions
# num_all_variables,
# num_all_constraints,
# has_objective,
# has_nl_objective,
# has_node_objective,
# set_node_primals,
# set_node_duals,
# set_node_status,

# # hypergraph functions
# in_degree,
# out_degree,
# all_neighbors,
# induced_subgraph,
# neighbors,
# adjacency_matrix,
# incidence_matrix,

# # graph projections
# bipartite_graph,
# clique_graph,
# hyper_graph,
# edge_graph,
# edge_hyper_graph

#Abstract Types
# abstract type AbstractOptiGraph <: JuMP.AbstractModel end
# abstract type AbstractOptiEdge end
# abstract type AbstractLinkConstraintRef end
# abstract type AbstractLinkConstraint <: JuMP.AbstractConstraint end

# include("graph_representations/hypergraph.jl")

# include("graph_representations/bipartitegraph.jl")
include("optinode.jl")

# include("graph_representations/cliquegraph.jl")
include("optiedge.jl")

# include("moi_backend_node.jl")

# include("optinode.jl")

# include("optiedge.jl")

# include("moi_backend_graph.jl")

# include("optigraph.jl")

# include("macros.jl")

# include("aggregate.jl")

# include("aggregate_utils.jl")

# include("optimizer_interface.jl")

# include("graph_projections.jl")

# include("graph_functions.jl")

# include("nlp_evaluator.jl")
include("optigraph.jl")

# include("partition.jl")
include("graph_backend.jl")

# include("structure.jl")
include("optimizer_interface.jl")

# function __init__()
# @require KaHyPar = "2a6221f6-aa48-11e9-3542-2d9e0ef01880" include(
# "partition_interface/kahypar.jl"
# )
# end
include("jump_interop.jl")

end
end
1 change: 1 addition & 0 deletions src/aggregate.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
aggregate.jl
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
21 changes: 16 additions & 5 deletions src/dev.jl → src/dev1.jl
Original file line number Diff line number Diff line change
@@ -1,25 +1,36 @@
using Plasmo
graph = OptiGraph(;label=:g1)
using HiGHS

graph = OptiGraph(; name=:g1)

n1 = Plasmo.add_node(graph)
@variable(n1, x >= 0)
@variable(n1, y >= 0)
@constraint(n1, ref1, x+y==2)
@constraint(n1, ref1, x+y <= 10)

n2 = Plasmo.add_node(graph)
@variable(n2, x >= 1)
@variable(n2, y >= 2)
@constraint(n2, ref2, x+y==4)
@constraint(n2, ref2, x+y <= 4)

# linking constraint
edge1 = Plasmo.add_edge(graph, n1, n2)
@constraint(edge1, ref3, n1[:x] == n2[:x])

@objective(graph, Min, n1[:x] + n2[:x])

objective_function(graph)
obj = objective_function(graph)


# TODO
# TODO:
#@linkconstraint(graph, n1[:x] + n2[:x] == 2)

# TODO:
set_optimizer(graph, HiGHS.Optimizer)
optimize!(graph)

# TODO: nonlinear

# TODO: build backend from multiple graphs

# TODO: hypergraph interface
56 changes: 56 additions & 0 deletions src/dev2.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
# dev file for subgraphs

using Plasmo
using HiGHS

graph = OptiGraph(; name=:g1)

sg1 = Plasmo.add_subgraph(graph; name=:sg1, optimizer_graph=graph)

# node 1
n1 = Plasmo.add_node(sg1)
@variable(n1, x >= 0)
@variable(n1, y >= 0)
@constraint(n1, ref1, x+y <= 10)

#node 2
n2 = Plasmo.add_node(sg1)
@variable(n2, x >= 1)
@variable(n2, y >= 2)
@constraint(n2, ref2, x+y <= 4)

# linking constraint
edge1 = Plasmo.add_edge(sg1, n1, n2)
@constraint(edge1, ref_edge_1, n1[:x] == n2[:x])

sg2 = Plasmo.add_subgraph(graph; name=:sg2)

# node 3
n3 = Plasmo.add_node(sg2)
@variable(n3, x >= 0)
@variable(n3, y >= 0)
@constraint(n3, ref3, x+y <= 10)

#node 4
n4 = Plasmo.add_node(sg2)
@variable(n4, x >= 1)
@variable(n4, y >= 2)
@constraint(n2, ref4, x+y <= 4)

# linking constraint
edge2 = Plasmo.add_edge(sg2, n3, n4)
@constraint(edge2, ref3, n3[:x] == n4[:x])



@objective(graph, Min, n1[:x] + n2[:x] + n3[:x] + n4[:x])

obj = objective_function(graph)


# TODO:
#@linkconstraint(graph, n1[:x] + n2[:x] == 2)

# TODO:
set_optimizer(graph, HiGHS.Optimizer)
optimize!(graph)
55 changes: 4 additions & 51 deletions src/backend.jl → src/graph_backend.jl
Original file line number Diff line number Diff line change
Expand Up @@ -337,41 +337,10 @@ function _moi_set_objective_function(
return
end

# function _set_backend_objective(
# graph::OptiGraph, obj::JuMP.GenericQuadExpr{Float64,VariableRef}
# )
# graph_backend = JuMP.backend(graph)
# moi_obj = moi_function(obj)
# for (i, terms) in enumerate(quad_terms(obj))
# term1 = terms[2]
# term2 = terms[3]
# node = optinode(term1)
# @assert optinode(term1) == optinode(term2)
# moi_term1 = index(term1)
# moi_term2 = index(term2)
# node_idx_map = backend(node).optimizers[graph.id].node_to_optimizer_map
# new_moi_idx_1 = node_idx_map[moi_term1]
# new_moi_idx_2 = node_idx_map[moi_term2]
# moi_obj = _swap_quad_term!(moi_obj, i, new_moi_idx_1, new_moi_idx_2)
# end

# for (i, terms) in enumerate(linear_terms(obj))
# term = terms[2]
# moi_term = index(term)
# node = optinode(term)
# node_idx_map = backend(node).optimizers[graph.id].node_to_optimizer_map
# new_moi_idx = node_idx_map[moi_term]
# moi_obj = _swap_linear_term!(moi_obj, i, new_moi_idx)
# end

# MOI.set(graph_backend.optimizer, MOI.ObjectiveSense(), MOI.MIN_SENSE)
# MOI.set(
# graph_backend.optimizer,
# MOI.ObjectiveFunction{MOI.ScalarQuadraticFunction{Float64}}(),
# moi_obj,
# )
# return nothing
# end
function MOI.optimize!(graph_backend::GraphMOIBackend)
MOI.optimize!(graph_backend.moi_backend)
return nothing
end

### JuMP interoperability

Expand All @@ -394,22 +363,6 @@ function _moi_add_constraint(
return MOI.add_constraint(model, f, s)
end

### TODO

# function MOI.optimize!(graph_backend::GraphMOIBackend)
# # # TODO: support modes
# # # if graph_backend.mode == MOIU.AUTOMATIC && graph_backend.state == MOIU.EMPTY_OPTIMIZER
# # # normally the `attach_optimizer` gets called in a higher scope, but we can attach here for testing purposes
# # if MOIU.state(graph_backend) == MOIU.EMPTY_OPTIMIZER
# # MOIU.attach_optimizer(graph_backend)
# # else
# # @assert MOIU.state(graph_backend) == MOIU.ATTACHED_OPTIMIZER
# # end
# MOI.optimize!(graph_backend.moi_backend)
# return nothing
# end


### Helpful utilities

# function _swap_indices(variable::MOI.VariableIndex, idxmap::MOIU.IndexMap)
Expand Down
Loading

0 comments on commit ed77656

Please sign in to comment.