From ca9fdf2259634f5707b265a41d3be57dcd009fbe Mon Sep 17 00:00:00 2001 From: jalving Date: Sat, 6 Jan 2024 11:51:15 -0800 Subject: [PATCH] fix issue with wrong nonlinear variable indices --- src/aggregate.jl | 51 ++++++++++++++++++++++- src/dev1.jl | 2 - src/dev2.jl | 2 +- src/dev3.jl | 83 ++++++++++++++++++++++++++++++++------ src/graph_backend.jl | 70 +++++++------------------------- src/jump_interop.jl | 12 ++---- src/optigraph.jl | 40 ++++++++++++++---- src/optimizer_interface.jl | 8 ++++ 8 files changed, 180 insertions(+), 88 deletions(-) diff --git a/src/aggregate.jl b/src/aggregate.jl index ea5bb48..e16fe44 100644 --- a/src/aggregate.jl +++ b/src/aggregate.jl @@ -1 +1,50 @@ -aggregate.jl \ No newline at end of file +""" + aggregate_backends!(graph::OptiGraph) + +Aggregate the moi backends from each subgraph within `graph` to create a single backend. +""" +function aggregate_backends!(graph::OptiGraph) + dest = JuMP.backend(graph) + +end + +### Helpful utilities + +# """ +# append_to_backend!(dest::MOI.ModelLike, src::MOI.ModelLike) + +# Copy the underylying model from `src` into `dest`, but ignore attributes +# such as objective function and objective sense +# """ +# function append_to_backend!(dest::MOI.ModelLike, src::MOI.ModelLike) +# vis_src = MOI.get(src, MOI.ListOfVariableIndices()) #returns vector of MOI.VariableIndex +# index_map = MOIU.IndexMap() + + +# # has_nlp = MOI.NLPBlock() in MOI.get(src, MOI.ListOfModelAttributesSet()) +# # constraints_not_added = if has_nlp +# constraints_not_added = Any[ +# MOI.get(src, MOI.ListOfConstraintIndices{F,S}()) for +# (F, S) in MOI.get(src, MOI.ListOfConstraintTypesPresent()) if +# MOIU._is_variable_function(F) +# ] +# # else +# # Any[ +# # MOIU._try_constrain_variables_on_creation(dest, src, index_map, S) +# # for S in MOIU.sorted_variable_sets_by_cost(dest, src) +# # ] +# # end + +# # Copy free variables into graph optimizer +# MOI.Utilities._copy_free_variables(dest, index_map, vis_src) + +# # Copy variable attributes (e.g. name, and VariablePrimalStart()) +# MOI.Utilities.pass_attributes(dest, src, index_map, vis_src) + +# # Normally this copies ObjectiveSense() and ObjectiveFunction(), but we don't want to do that here +# #MOI.Utilities.pass_attributes(dest, src, idxmap) + +# MOI.Utilities._pass_constraints(dest, src, index_map, constraints_not_added) + +# return index_map #return an idxmap for each source model +# end \ No newline at end of file diff --git a/src/dev1.jl b/src/dev1.jl index d26981d..91d729a 100644 --- a/src/dev1.jl +++ b/src/dev1.jl @@ -15,13 +15,11 @@ n2 = Plasmo.add_node(graph) @constraint(n2, ref2, x+y <= 4) @constraint(n2, nlref, x^3 + y <= 10) - # add external function f(x::Real) = x^2 @operator(graph, op_f, 1, f) @expression(graph, z, op_f(x)) - # linking constraint edge1 = Plasmo.add_edge(graph, n1, n2) @constraint(edge1, ref3a, n1[:x] == n2[:x]) diff --git a/src/dev2.jl b/src/dev2.jl index da2c05b..5b70294 100644 --- a/src/dev2.jl +++ b/src/dev2.jl @@ -1,4 +1,4 @@ -# dev file for subgraphs +# dev file for subgraphs; single backend using Plasmo using HiGHS diff --git a/src/dev3.jl b/src/dev3.jl index b330843..ce0cfe5 100644 --- a/src/dev3.jl +++ b/src/dev3.jl @@ -1,18 +1,75 @@ -using JuMP -using Ipopt +# dev file for subgraphs; different backends +using Plasmo +using HiGHS -m = Model() -@variable(m, x >= 0) -@variable(m, y >= 0) -@constraint(m, ref, x^3 + y^3 >= 0) +graph = OptiGraph(; name=:g1) -jump_func = jump_function(constraint_object(ref)) -moi_func = moi_function(constraint_object(ref)) +# subgraph1 +sg1 = Plasmo.add_subgraph(graph; name=:sg1) +# node 1 +n1 = Plasmo.add_node(sg1) +@variable(n1, x >= 0) +@variable(n1, y >= 0) +@constraint(n1, ref1, n1[:x]+ n1[:y] <= 10) -f(x::Real) = x^2 -@operator(m, op_f, 1, f) -@expression(m, z, op_f(x)) +#node 2 +n2 = Plasmo.add_node(sg1) +@variable(n2, x >= 1) +@variable(n2, y >= 2) +@constraint(n2, ref2, n2[:x] + n2[:y] <= 4) -set_optimizer(m, Ipopt.Optimizer) -JuMP.optimize!(m) \ No newline at end of file +# linking constraint on subgraph1 +edge1 = Plasmo.add_edge(sg1, n1, n2) +@constraint(edge1, ref_edge_1, n1[:x] == n2[:x]) + +# subgraph 2 +sg2 = Plasmo.add_subgraph(graph; name=:sg2) + +# node 3 +n3 = Plasmo.add_node(sg2) +@variable(n3, x >= 0) +@variable(n3, y >= 0) +@constraint(n3, ref3, n3[:x] + n3[:y] <= 10) + +#node 4 +n4 = Plasmo.add_node(sg2) +@variable(n4, x >= 1) +@variable(n4, y >= 2) +@constraint(n4, ref4, n4[:x] + n4[:y] <= 4) + +# linking constraint on subgraph2 +edge2 = Plasmo.add_edge(sg2, n3, n4) +@constraint(edge2, ref_edge_2, n3[:x] == n4[:x]) + +# link across subgraphs +edge3 = Plasmo.add_edge(graph, n2, n4) +@constraint(edge3, ref_edge_3, n2[:y] == n4[:y]) + + +# optimize subgraph1 +@objective(sg1, Min, n1[:x] + n2[:y]) +set_optimizer(sg1, HiGHS.Optimizer) +optimize!(sg1) + +# optimize subgraph2 +@objective(sg2, Min, n3[:x] + n4[:y]) +set_optimizer(sg2, HiGHS.Optimizer) +optimize!(sg2) + + +# optimize complete graph +# @objective(graph, Max, n1[:x] + n2[:x] + n3[:x] + n4[:x]) +# set_optimizer(graph, HiGHS.Optimizer) +# optimize!(graph) + + + +# @show value(n1[:x]) +# @show value(n2[:x]) +# @show value(n3[:x]) +# @show value(n4[:x]) +# @show value(n2[:y]) +# @show value(n4[:y]) + +# println(objective_value(graph)) \ No newline at end of file diff --git a/src/graph_backend.jl b/src/graph_backend.jl index 8aba48d..4655f75 100644 --- a/src/graph_backend.jl +++ b/src/graph_backend.jl @@ -183,13 +183,13 @@ function _moi_add_node_constraint( cref = ConstraintRef(node, constraint_index, JuMP.shape(con)) for graph in containing_optigraphs(node) - # TODO: i think we need to make copies here for moi_funcs - # moi_func_graph = copy(moi_func) + moi_func_graph = deepcopy(moi_func) + # update func variable indices - _update_moi_func!(graph_backend(graph), moi_func, jump_func) + _update_moi_func!(graph_backend(graph), moi_func_graph, jump_func) # add to optinode backend - _add_node_constraint_to_backend(graph_backend(graph), cref, moi_func, moi_set) + _add_node_constraint_to_backend(graph_backend(graph), cref, moi_func_graph, moi_set) end return cref end @@ -219,6 +219,7 @@ end ### MOI Utilities +#TODO: use copies for updating """ _update_moi_func!( backend::GraphMOIBackend, @@ -280,7 +281,7 @@ function _update_moi_func!( for i = 1:length(jump_func.args) jump_arg = jump_func.args[i] moi_arg = moi_func.args[i] - if typeof(jump_arg) == JuMP.GenericNonlinearExpr + if jump_arg isa JuMP.GenericNonlinearExpr _update_moi_func!(backend, moi_arg, jump_arg) elseif typeof(jump_arg) == NodeVariableRef moi_func.args[i] = backend.node_to_graph_map[jump_arg] @@ -289,6 +290,9 @@ function _update_moi_func!( return end +### add variables to a backend for purpose of creating linking constraints or objectives +# across subgraphs + function _add_backend_variables( backend::GraphMOIBackend, jump_func::JuMP.GenericAffExpr @@ -328,7 +332,6 @@ function _add_backend_variables( push!(vars, jump_arg) end end - vars_to_add = setdiff(vars, keys(backend.node_to_graph_map.var_map)) for var in vars_to_add _add_variable_to_backend(backend, var) @@ -358,11 +361,13 @@ function _moi_add_edge_constraint( # add backend variables if linking across optigraphs _add_backend_variables(graph_backend(graph), jump_func) + moi_func_graph = deepcopy(moi_func) + # update the moi function variable indices - _update_moi_func!(graph_backend(graph), moi_func, jump_func) + _update_moi_func!(graph_backend(graph), moi_func_graph, jump_func) # add the constraint to the backend - _add_edge_constraint_to_backend(graph_backend(graph), cref, moi_func, moi_set) + _add_edge_constraint_to_backend(graph_backend(graph), cref, moi_func_graph, moi_set) end return cref @@ -445,51 +450,4 @@ end function MOI.optimize!(graph_backend::GraphMOIBackend) MOI.optimize!(graph_backend.moi_backend) return nothing -end - -### Helpful utilities - -# """ -# append_to_backend!(dest::MOI.ModelLike, src::MOI.ModelLike) - -# Copy the underylying model from `src` into `dest`, but ignore attributes -# such as objective function and objective sense -# """ -# function append_to_backend!(dest::MOI.ModelLike, src::MOI.ModelLike) -# vis_src = MOI.get(src, MOI.ListOfVariableIndices()) #returns vector of MOI.VariableIndex -# index_map = MOIU.IndexMap() - -# # per the comment in MOI: -# # "The `NLPBlock` assumes that the order of variables does not change (#849) -# # Therefore, all VariableIndex and VectorOfVariable constraints are added -# # seprately, and no variables constrained-on-creation are added."" -# # Consequently, Plasmo avoids using the constrained-on-creation approach because -# # of the way it constructs the NLPBlock for the optimizer. - -# # has_nlp = MOI.NLPBlock() in MOI.get(src, MOI.ListOfModelAttributesSet()) -# # constraints_not_added = if has_nlp -# constraints_not_added = Any[ -# MOI.get(src, MOI.ListOfConstraintIndices{F,S}()) for -# (F, S) in MOI.get(src, MOI.ListOfConstraintTypesPresent()) if -# MOIU._is_variable_function(F) -# ] -# # else -# # Any[ -# # MOIU._try_constrain_variables_on_creation(dest, src, index_map, S) -# # for S in MOIU.sorted_variable_sets_by_cost(dest, src) -# # ] -# # end - -# # Copy free variables into graph optimizer -# MOI.Utilities._copy_free_variables(dest, index_map, vis_src) - -# # Copy variable attributes (e.g. name, and VariablePrimalStart()) -# MOI.Utilities.pass_attributes(dest, src, index_map, vis_src) - -# # Normally this copies ObjectiveSense() and ObjectiveFunction(), but we don't want to do that here -# #MOI.Utilities.pass_attributes(dest, src, idxmap) - -# MOI.Utilities._pass_constraints(dest, src, index_map, constraints_not_added) - -# return index_map #return an idxmap for each source model -# end \ No newline at end of file +end \ No newline at end of file diff --git a/src/jump_interop.jl b/src/jump_interop.jl index 8fc4082..35eb978 100644 --- a/src/jump_interop.jl +++ b/src/jump_interop.jl @@ -63,7 +63,6 @@ function _moi_add_constraint( end ### Affine Expressions - # adapted from: https://github.com/jump-dev/JuMP.jl/blob/master/src/aff_expr.jl function _assert_isfinite(a::JuMP.GenericAffExpr) @@ -128,7 +127,6 @@ function JuMP.GenericAffExpr{C,NodeVariableRef}( edge::OptiEdge, f::MOI.ScalarAffineFunction, ) where {C} - println(f) aff = GenericAffExpr{C,NodeVariableRef}(f.constant) # build JuMP Affine Expression over edge variables for t in f.terms @@ -173,7 +171,6 @@ function JuMP.GenericAffExpr{C,NodeVariableRef}( end ### Quadratic Expressions - # adapted from: https://github.com/jump-dev/JuMP.jl/blob/master/src/quad_expr.jl function _moi_quadratic_term(t::Tuple) @@ -224,8 +221,6 @@ function JuMP.GenericQuadExpr{C,NodeVariableRef}( for t in f.quadratic_terms v1 = graph_backend(node).graph_to_node_map[t.variable_1].index v2 = graph_backend(node).graph_to_node_map[t.variable_2].index - # v1 = t.variable_1 - # v2 = t.variable_2 coef = t.coefficient if v1 == v2 coef /= 2 @@ -345,7 +340,6 @@ function JuMP.GenericQuadExpr{C,NodeVariableRef}( end ### Nonlinear Expressions - # adapted from: https://github.com/jump-dev/JuMP.jl/blob/master/src/nlp_expr.jl # OptiNode @@ -354,7 +348,9 @@ JuMP.variable_ref_type(::Type{OptiNode{OptiGraph}}) = NodeVariableRef JuMP.jump_function(::OptiNode, x::Number) = convert(Float64, x) function JuMP.jump_function(node::OptiNode, vidx::MOI.VariableIndex) - return NodeVariableRef(node, vidx) + gb = graph_backend(node) + node_var = gb.graph_to_node_map[vidx] + return NodeVariableRef(node, node_var.index) end function JuMP.jump_function(node::OptiNode, f::MOI.ScalarNonlinearFunction) @@ -445,4 +441,4 @@ function JuMP.jump_function(graph::OptiGraph, f::MOI.ScalarNonlinearFunction) end end return ret -end +end \ No newline at end of file diff --git a/src/optigraph.jl b/src/optigraph.jl index ffaae2c..58040c8 100644 --- a/src/optigraph.jl +++ b/src/optigraph.jl @@ -1,15 +1,16 @@ mutable struct OptiGraph <: AbstractOptiGraph label::Symbol - # topology - optinodes::Vector{OptiNode} #Local optinodes - optiedges::Vector{OptiEdge} #Local optiedges - subgraphs::Vector{OptiGraph} + # topology: TODO: OrderedSets + optinodes::Vector{OptiNode} # local optinodes + optiedges::Vector{OptiEdge} # local optiedges + subgraphs::Vector{OptiGraph} # local subgraphs # subgraphs keep a reference to their parent parent_graph::Union{Nothing,OptiGraph} # it is possible nodes and edges may use a parent graph as their model backend + # this is the case if constructing an optigraph from subgraphs optimizer_graph::OptiGraph # track node membership in other graphs; nodes use this to query different backends @@ -52,6 +53,7 @@ mutable struct OptiGraph <: AbstractOptiGraph end end +# TODO: numerical precision like JuMP Models do # JuMP.value_type(::Type{OptiGraph{T}}) where {T} = T function graph_backend(graph::OptiGraph) @@ -135,17 +137,41 @@ end function add_subgraph( graph::OptiGraph; - optimizer_graph=graph, + optimizer_graph=nothing, name::Symbol=Symbol(:sg,gensym()) ) subgraph = OptiGraph(; name=name) subgraph.parent_graph=graph - # TODO check provided model backend graph makes sense - subgraph.optimizer_graph = optimizer_graph + if optimizer_graph != nothing + if optimizer_graph in traverse_parents(subgraph) + subgraph.optimizer_graph = optimizer_graph + else + error("Invalid optigraph passed as `optimizer_graph`") + end + else + subgraph.optimizer_graph = subgraph + end push!(graph.subgraphs, subgraph) return subgraph end +function traverse_parents(graph::OptiGraph) + parents = OptiGraph[] + if graph.parent_graph != nothing + push!(parents, graph.parent_graph) + append!(parents, traverse_parents(graph.parent_graph)) + end + return parents +end + +function _optimizer_has_subgraphs(graph::OptiGraph) + if all(sg -> sg.optimizer_graph == graph, graph.subgraphs) + return true + else + return false + end +end + ### Objective Function function JuMP.objective_function( diff --git a/src/optimizer_interface.jl b/src/optimizer_interface.jl index aa3f34c..a7b32f7 100644 --- a/src/optimizer_interface.jl +++ b/src/optimizer_interface.jl @@ -56,6 +56,8 @@ function JuMP.optimize!( # ) # MOI.set(model, MOI.NLPBlock(), MOI.NLPBlockData(evaluator)) # end + + # TODO: optimize hooks # If the user or an extension has provided an optimize hook, call # that instead of solving the model ourselves # if !ignore_optimize_hook @@ -70,6 +72,12 @@ function JuMP.optimize!( if JuMP.mode(graph) != DIRECT && MOIU.state(JuMP.backend(graph)) == MOIU.NO_OPTIMIZER throw(JuMP.NoOptimizer()) end + + # TODO: check subgraphs; determine whether we need to aggregate + # if !(_optimizer_has_subgraphs(graph)) + # aggregate_backends!(graph) + # end + try MOI.optimize!(JuMP.backend(graph)) catch err