Skip to content

Commit

Permalink
fix issue with wrong nonlinear variable indices
Browse files Browse the repository at this point in the history
  • Loading branch information
jalving committed Jan 6, 2024
1 parent cc32cdd commit ca9fdf2
Show file tree
Hide file tree
Showing 8 changed files with 180 additions and 88 deletions.
51 changes: 50 additions & 1 deletion src/aggregate.jl
Original file line number Diff line number Diff line change
@@ -1 +1,50 @@
aggregate.jl
"""
aggregate_backends!(graph::OptiGraph)
Aggregate the moi backends from each subgraph within `graph` to create a single backend.
"""
function aggregate_backends!(graph::OptiGraph)
dest = JuMP.backend(graph)

end

### Helpful utilities

# """
# append_to_backend!(dest::MOI.ModelLike, src::MOI.ModelLike)

# Copy the underylying model from `src` into `dest`, but ignore attributes
# such as objective function and objective sense
# """
# function append_to_backend!(dest::MOI.ModelLike, src::MOI.ModelLike)
# vis_src = MOI.get(src, MOI.ListOfVariableIndices()) #returns vector of MOI.VariableIndex
# index_map = MOIU.IndexMap()


# # has_nlp = MOI.NLPBlock() in MOI.get(src, MOI.ListOfModelAttributesSet())
# # constraints_not_added = if has_nlp
# constraints_not_added = Any[
# MOI.get(src, MOI.ListOfConstraintIndices{F,S}()) for
# (F, S) in MOI.get(src, MOI.ListOfConstraintTypesPresent()) if
# MOIU._is_variable_function(F)
# ]
# # else
# # Any[
# # MOIU._try_constrain_variables_on_creation(dest, src, index_map, S)
# # for S in MOIU.sorted_variable_sets_by_cost(dest, src)
# # ]
# # end

# # Copy free variables into graph optimizer
# MOI.Utilities._copy_free_variables(dest, index_map, vis_src)

# # Copy variable attributes (e.g. name, and VariablePrimalStart())
# MOI.Utilities.pass_attributes(dest, src, index_map, vis_src)

# # Normally this copies ObjectiveSense() and ObjectiveFunction(), but we don't want to do that here
# #MOI.Utilities.pass_attributes(dest, src, idxmap)

# MOI.Utilities._pass_constraints(dest, src, index_map, constraints_not_added)

# return index_map #return an idxmap for each source model
# end
2 changes: 0 additions & 2 deletions src/dev1.jl
Original file line number Diff line number Diff line change
Expand Up @@ -15,13 +15,11 @@ n2 = Plasmo.add_node(graph)
@constraint(n2, ref2, x+y <= 4)
@constraint(n2, nlref, x^3 + y <= 10)


# add external function
f(x::Real) = x^2
@operator(graph, op_f, 1, f)
@expression(graph, z, op_f(x))


# linking constraint
edge1 = Plasmo.add_edge(graph, n1, n2)
@constraint(edge1, ref3a, n1[:x] == n2[:x])
Expand Down
2 changes: 1 addition & 1 deletion src/dev2.jl
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# dev file for subgraphs
# dev file for subgraphs; single backend
using Plasmo
using HiGHS

Expand Down
83 changes: 70 additions & 13 deletions src/dev3.jl
Original file line number Diff line number Diff line change
@@ -1,18 +1,75 @@
using JuMP
using Ipopt
# dev file for subgraphs; different backends
using Plasmo
using HiGHS

m = Model()
@variable(m, x >= 0)
@variable(m, y >= 0)
@constraint(m, ref, x^3 + y^3 >= 0)
graph = OptiGraph(; name=:g1)

jump_func = jump_function(constraint_object(ref))
moi_func = moi_function(constraint_object(ref))
# subgraph1
sg1 = Plasmo.add_subgraph(graph; name=:sg1)

# node 1
n1 = Plasmo.add_node(sg1)
@variable(n1, x >= 0)
@variable(n1, y >= 0)
@constraint(n1, ref1, n1[:x]+ n1[:y] <= 10)

f(x::Real) = x^2
@operator(m, op_f, 1, f)
@expression(m, z, op_f(x))
#node 2
n2 = Plasmo.add_node(sg1)
@variable(n2, x >= 1)
@variable(n2, y >= 2)
@constraint(n2, ref2, n2[:x] + n2[:y] <= 4)

set_optimizer(m, Ipopt.Optimizer)
JuMP.optimize!(m)
# linking constraint on subgraph1
edge1 = Plasmo.add_edge(sg1, n1, n2)
@constraint(edge1, ref_edge_1, n1[:x] == n2[:x])

# subgraph 2
sg2 = Plasmo.add_subgraph(graph; name=:sg2)

# node 3
n3 = Plasmo.add_node(sg2)
@variable(n3, x >= 0)
@variable(n3, y >= 0)
@constraint(n3, ref3, n3[:x] + n3[:y] <= 10)

#node 4
n4 = Plasmo.add_node(sg2)
@variable(n4, x >= 1)
@variable(n4, y >= 2)
@constraint(n4, ref4, n4[:x] + n4[:y] <= 4)

# linking constraint on subgraph2
edge2 = Plasmo.add_edge(sg2, n3, n4)
@constraint(edge2, ref_edge_2, n3[:x] == n4[:x])

# link across subgraphs
edge3 = Plasmo.add_edge(graph, n2, n4)
@constraint(edge3, ref_edge_3, n2[:y] == n4[:y])


# optimize subgraph1
@objective(sg1, Min, n1[:x] + n2[:y])
set_optimizer(sg1, HiGHS.Optimizer)
optimize!(sg1)

# optimize subgraph2
@objective(sg2, Min, n3[:x] + n4[:y])
set_optimizer(sg2, HiGHS.Optimizer)
optimize!(sg2)


# optimize complete graph
# @objective(graph, Max, n1[:x] + n2[:x] + n3[:x] + n4[:x])
# set_optimizer(graph, HiGHS.Optimizer)
# optimize!(graph)



# @show value(n1[:x])
# @show value(n2[:x])
# @show value(n3[:x])
# @show value(n4[:x])
# @show value(n2[:y])
# @show value(n4[:y])

# println(objective_value(graph))
70 changes: 14 additions & 56 deletions src/graph_backend.jl
Original file line number Diff line number Diff line change
Expand Up @@ -183,13 +183,13 @@ function _moi_add_node_constraint(
cref = ConstraintRef(node, constraint_index, JuMP.shape(con))

for graph in containing_optigraphs(node)
# TODO: i think we need to make copies here for moi_funcs
# moi_func_graph = copy(moi_func)
moi_func_graph = deepcopy(moi_func)

# update func variable indices
_update_moi_func!(graph_backend(graph), moi_func, jump_func)
_update_moi_func!(graph_backend(graph), moi_func_graph, jump_func)

# add to optinode backend
_add_node_constraint_to_backend(graph_backend(graph), cref, moi_func, moi_set)
_add_node_constraint_to_backend(graph_backend(graph), cref, moi_func_graph, moi_set)
end
return cref
end
Expand Down Expand Up @@ -219,6 +219,7 @@ end

### MOI Utilities

#TODO: use copies for updating
"""
_update_moi_func!(
backend::GraphMOIBackend,
Expand Down Expand Up @@ -280,7 +281,7 @@ function _update_moi_func!(
for i = 1:length(jump_func.args)
jump_arg = jump_func.args[i]
moi_arg = moi_func.args[i]
if typeof(jump_arg) == JuMP.GenericNonlinearExpr
if jump_arg isa JuMP.GenericNonlinearExpr
_update_moi_func!(backend, moi_arg, jump_arg)
elseif typeof(jump_arg) == NodeVariableRef
moi_func.args[i] = backend.node_to_graph_map[jump_arg]
Expand All @@ -289,6 +290,9 @@ function _update_moi_func!(
return
end

### add variables to a backend for purpose of creating linking constraints or objectives
# across subgraphs

function _add_backend_variables(
backend::GraphMOIBackend,
jump_func::JuMP.GenericAffExpr
Expand Down Expand Up @@ -328,7 +332,6 @@ function _add_backend_variables(
push!(vars, jump_arg)
end
end

vars_to_add = setdiff(vars, keys(backend.node_to_graph_map.var_map))
for var in vars_to_add
_add_variable_to_backend(backend, var)
Expand Down Expand Up @@ -358,11 +361,13 @@ function _moi_add_edge_constraint(
# add backend variables if linking across optigraphs
_add_backend_variables(graph_backend(graph), jump_func)

moi_func_graph = deepcopy(moi_func)

# update the moi function variable indices
_update_moi_func!(graph_backend(graph), moi_func, jump_func)
_update_moi_func!(graph_backend(graph), moi_func_graph, jump_func)

# add the constraint to the backend
_add_edge_constraint_to_backend(graph_backend(graph), cref, moi_func, moi_set)
_add_edge_constraint_to_backend(graph_backend(graph), cref, moi_func_graph, moi_set)
end

return cref
Expand Down Expand Up @@ -445,51 +450,4 @@ end
function MOI.optimize!(graph_backend::GraphMOIBackend)
MOI.optimize!(graph_backend.moi_backend)
return nothing
end

### Helpful utilities

# """
# append_to_backend!(dest::MOI.ModelLike, src::MOI.ModelLike)

# Copy the underylying model from `src` into `dest`, but ignore attributes
# such as objective function and objective sense
# """
# function append_to_backend!(dest::MOI.ModelLike, src::MOI.ModelLike)
# vis_src = MOI.get(src, MOI.ListOfVariableIndices()) #returns vector of MOI.VariableIndex
# index_map = MOIU.IndexMap()

# # per the comment in MOI:
# # "The `NLPBlock` assumes that the order of variables does not change (#849)
# # Therefore, all VariableIndex and VectorOfVariable constraints are added
# # seprately, and no variables constrained-on-creation are added.""
# # Consequently, Plasmo avoids using the constrained-on-creation approach because
# # of the way it constructs the NLPBlock for the optimizer.

# # has_nlp = MOI.NLPBlock() in MOI.get(src, MOI.ListOfModelAttributesSet())
# # constraints_not_added = if has_nlp
# constraints_not_added = Any[
# MOI.get(src, MOI.ListOfConstraintIndices{F,S}()) for
# (F, S) in MOI.get(src, MOI.ListOfConstraintTypesPresent()) if
# MOIU._is_variable_function(F)
# ]
# # else
# # Any[
# # MOIU._try_constrain_variables_on_creation(dest, src, index_map, S)
# # for S in MOIU.sorted_variable_sets_by_cost(dest, src)
# # ]
# # end

# # Copy free variables into graph optimizer
# MOI.Utilities._copy_free_variables(dest, index_map, vis_src)

# # Copy variable attributes (e.g. name, and VariablePrimalStart())
# MOI.Utilities.pass_attributes(dest, src, index_map, vis_src)

# # Normally this copies ObjectiveSense() and ObjectiveFunction(), but we don't want to do that here
# #MOI.Utilities.pass_attributes(dest, src, idxmap)

# MOI.Utilities._pass_constraints(dest, src, index_map, constraints_not_added)

# return index_map #return an idxmap for each source model
# end
end
12 changes: 4 additions & 8 deletions src/jump_interop.jl
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,6 @@ function _moi_add_constraint(
end

### Affine Expressions

# adapted from: https://github.com/jump-dev/JuMP.jl/blob/master/src/aff_expr.jl

function _assert_isfinite(a::JuMP.GenericAffExpr)
Expand Down Expand Up @@ -128,7 +127,6 @@ function JuMP.GenericAffExpr{C,NodeVariableRef}(
edge::OptiEdge,
f::MOI.ScalarAffineFunction,
) where {C}
println(f)
aff = GenericAffExpr{C,NodeVariableRef}(f.constant)
# build JuMP Affine Expression over edge variables
for t in f.terms
Expand Down Expand Up @@ -173,7 +171,6 @@ function JuMP.GenericAffExpr{C,NodeVariableRef}(
end

### Quadratic Expressions

# adapted from: https://github.com/jump-dev/JuMP.jl/blob/master/src/quad_expr.jl

function _moi_quadratic_term(t::Tuple)
Expand Down Expand Up @@ -224,8 +221,6 @@ function JuMP.GenericQuadExpr{C,NodeVariableRef}(
for t in f.quadratic_terms
v1 = graph_backend(node).graph_to_node_map[t.variable_1].index
v2 = graph_backend(node).graph_to_node_map[t.variable_2].index
# v1 = t.variable_1
# v2 = t.variable_2
coef = t.coefficient
if v1 == v2
coef /= 2
Expand Down Expand Up @@ -345,7 +340,6 @@ function JuMP.GenericQuadExpr{C,NodeVariableRef}(
end

### Nonlinear Expressions

# adapted from: https://github.com/jump-dev/JuMP.jl/blob/master/src/nlp_expr.jl

# OptiNode
Expand All @@ -354,7 +348,9 @@ JuMP.variable_ref_type(::Type{OptiNode{OptiGraph}}) = NodeVariableRef
JuMP.jump_function(::OptiNode, x::Number) = convert(Float64, x)

function JuMP.jump_function(node::OptiNode, vidx::MOI.VariableIndex)
return NodeVariableRef(node, vidx)
gb = graph_backend(node)
node_var = gb.graph_to_node_map[vidx]
return NodeVariableRef(node, node_var.index)
end

function JuMP.jump_function(node::OptiNode, f::MOI.ScalarNonlinearFunction)
Expand Down Expand Up @@ -445,4 +441,4 @@ function JuMP.jump_function(graph::OptiGraph, f::MOI.ScalarNonlinearFunction)
end
end
return ret
end
end
Loading

0 comments on commit ca9fdf2

Please sign in to comment.