Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Revamp for Julia v0.6 #66

Merged
merged 9 commits into from
Nov 23, 2017
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 1 addition & 2 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,7 @@ os:
- linux
- osx
julia:
- 0.5
- nightly
- 0.6
notifications:
email: false
sudo: false
Expand Down
4 changes: 2 additions & 2 deletions REQUIRE
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
julia 0.5
MathProgBase 0.5 0.7
julia 0.6
MathProgBase 0.5 0.8
6 changes: 2 additions & 4 deletions appveyor.yml
Original file line number Diff line number Diff line change
@@ -1,9 +1,7 @@
environment:
matrix:
- JULIA_URL: "https://julialang-s3.julialang.org/bin/winnt/x86/0.5/julia-0.5-latest-win32.exe"
- JULIA_URL: "https://julialang-s3.julialang.org/bin/winnt/x64/0.5/julia-0.5-latest-win64.exe"
- JULIA_URL: "https://julialangnightlies-s3.julialang.org/bin/winnt/x86/julia-latest-win32.exe"
- JULIA_URL: "https://julialangnightlies-s3.julialang.org/bin/winnt/x64/julia-latest-win64.exe"
- JULIA_URL: "https://julialang-s3.julialang.org/bin/winnt/x86/0.6/julia-0.6-latest-win32.exe"
- JULIA_URL: "https://julialang-s3.julialang.org/bin/winnt/x64/0.6/julia-0.6-latest-win64.exe"

branches:
only:
Expand Down
24 changes: 12 additions & 12 deletions examples/jump_const_obj.jl
Original file line number Diff line number Diff line change
@@ -1,18 +1,18 @@
using JuMP, FactCheck, AmplNLWriter
using JuMP, Base.Test, AmplNLWriter

# Example with no objective (#50)

if !isdefined(:solver); solver = BonminNLSolver(); end
# solver = AmplNLSolver(Ipopt.amplexe, ["print_level=0"])

m = Model(solver=solver)
@variable(m, 0 <= yp <= 1, Int)
@variable(m, 0 <= l <= 1000.0)
@variable(m, 0 <= f <= 1000.0)
@NLconstraint(m, .087 * l >= f ^ 2)
@constraint(m, l <= yp * 1000.0)
@objective(m, Min, 5)
@testset "example: jump_no_obj" begin
m = Model(solver=solver)
@variable(m, 0 <= yp <= 1, Int)
@variable(m, 0 <= l <= 1000.0)
@variable(m, 0 <= f <= 1000.0)
@NLconstraint(m, .087 * l >= f ^ 2)
@constraint(m, l <= yp * 1000.0)
@objective(m, Min, 5)

context("example: jump_no_obj") do
@fact solve(m) --> :Optimal
@fact getobjectivevalue(m) --> 5
@test solve(m) == :Optimal
@test getobjectivevalue(m) == 5
end
21 changes: 11 additions & 10 deletions examples/jump_maxmin.jl
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
using JuMP, FactCheck, AmplNLWriter
using JuMP, Base.Test, AmplNLWriter

# solver = AmplNLSolver(Ipopt.amplexe, ["print_level=0"])

if !isdefined(:solver); solver = IpoptNLSolver(); end
# Note min and max not implemented in Couenne

## Solve test problem with simple min functions
Expand All @@ -11,13 +12,13 @@ if !isdefined(:solver); solver = IpoptNLSolver(); end
# The optimal objective value is 0.25.
# x = 0.5
##
context("example: maxmin") do
@testset "example: maxmin" begin
m = Model(solver=solver)
@variable(m, -0.5 <= x <= 0.5, start = 0.25)
@NLobjective(m, Max, min(x^2, 0.3, x))
@fact solve(m) --> :Optimal
@fact getobjectivevalue(m) --> roughly(0.25, 1e-2)
@fact getvalue(x) --> roughly(0.5, 1e-2)
@test solve(m) == :Optimal
@test isapprox(getobjectivevalue(m), 0.25, atol=1e-2)
@test isapprox(getvalue(x), 0.5, atol=1e-2)
end

## Solve test problem with simple max functions
Expand All @@ -28,11 +29,11 @@ end
# The optimal objective value is 0.
# x = 0.
##
context("example: minmax") do
@testset "example: minmax" begin
m = Model(solver=solver)
@variable(m, -1 <= x <= 1, start=-1)
@NLobjective(m, Min, max(x^2, x, -1))
@fact solve(m) --> :Optimal
@fact getobjectivevalue(m) --> roughly(0, 1e-2)
@fact getvalue(x) --> roughly(0, 1e-2)
@test solve(m) == :Optimal
@test isapprox(getobjectivevalue(m), 0, atol=1e-2)
@test isapprox(getvalue(x), 0, atol=1e-2)
end
45 changes: 27 additions & 18 deletions examples/jump_minlp.jl
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
using JuMP, FactCheck, AmplNLWriter
using JuMP, Base.Test, AmplNLWriter

## Solve test problem 1 (Synthesis of processing system) in
# M. Duran & I.E. Grossmann, "An outer approximation algorithm for
Expand All @@ -24,24 +24,33 @@ using JuMP, FactCheck, AmplNLWriter
# The solution is (1.30098, 0, 1, 0, 1, 0).
##

if !isdefined(:solver); solver = BonminNLSolver(); end
# solver = AmplNLSolver(Ipopt.amplexe, ["print_level=0"])

m = Model(solver=solver)
x_U = [2,2,1]
@variable(m, x_U[i] >= x[i=1:3] >= 0)
@variable(m, y[4:6], Bin)

@NLobjective(m, Min, 10 + 10*x[1] - 7*x[3] + 5*y[4] + 6*y[5] + 8*y[6] - 18*log(x[2]+1) - 19.2*log(x[1]-x[2]+1))
@NLconstraint(m, 0.8*log(x[2] + 1) + 0.96*log(x[1] - x[2] + 1) - 0.8*x[3] >= 0)
@NLconstraint(m, log(x[2] + 1) + 1.2*log(x[1] - x[2] + 1) - x[3] - 2*y[6] >= -2)
@NLconstraint(m, x[2] - x[1] <= 0)
@NLconstraint(m, x[2] - 2*y[4] <= 0)
@NLconstraint(m, x[1] - x[2] - 2*y[5] <= 0)
@NLconstraint(m, y[4] + 2*y[5]*0.5 <= 1)
@testset "example: jump_minlp" begin
m = Model(solver=solver)
x_U = [2,2,1]
@variable(m, x_U[i] >= x[i=1:3] >= 0)
@variable(m, y[i=4:6], Bin)

context("example: jump_minlp") do
@fact solve(m) --> :Optimal
@fact getvalue(x)[:] --> roughly([1.30098, 0.0, 1.0], 1e-5)
@fact getvalue(y)[:] --> roughly([0.0, 1.0, 0.0], 1e-5)
@fact getobjectivevalue(m) --> roughly(6.00975, 1e-5)
@NLobjective(m, Min, 10 + 10*x[1] - 7*x[3] + 5*y[4] + 6*y[5] + 8*y[6] - 18*log(x[2]+1) - 19.2*log(x[1]-x[2]+1))
@NLconstraint(m, 0.8*log(x[2] + 1) + 0.96*log(x[1] - x[2] + 1) - 0.8*x[3] >= 0)
@NLconstraint(m, log(x[2] + 1) + 1.2*log(x[1] - x[2] + 1) - x[3] - 2*y[6] >= -2)
@NLconstraint(m, x[2] - x[1] <= 0)
@NLconstraint(m, x[2] - 2*y[4] <= 0)
@NLconstraint(m, x[1] - x[2] - 2*y[5] <= 0)
@NLconstraint(m, y[4] + 2*y[5]*0.5 <= 1)

@test solve(m) == :Optimal

if getsolvername(solver) == "ipopt"
# Ipopt solves the relaxation
@test isapprox(getvalue(x)[:], [1.14652, 0.546596, 1.0], atol=1e-5)
@test isapprox(getvalue(y)[:], [0.27330, 0.299959, 0.0], atol=1e-5)
@test isapprox(getobjectivevalue(m), 0.75928, atol=1e-5)
else
@test isapprox(getvalue(x)[:], [1.30098, 0.0, 1.0], atol=1e-5)
@test isapprox(getvalue(y)[:], [0.0, 1.0, 0.0], atol=1e-5)
@test isapprox(getobjectivevalue(m), 6.00975, atol=1e-5)
end
end
48 changes: 24 additions & 24 deletions examples/jump_nlexpr.jl
Original file line number Diff line number Diff line change
@@ -1,32 +1,32 @@
using JuMP, FactCheck, AmplNLWriter
using JuMP, Base.Test, AmplNLWriter

# Example testing basic use of NLExpr with AmplNLWriter.jl

if !isdefined(:solver); solver = IpoptNLSolver(); end
# solver = AmplNLSolver(Ipopt.amplexe, ["print_level=0"])

m = Model(solver=solver)

n = 30
l = -ones(n); l[1] = 0
u = ones(n)
@variable(m, l[i] <= x[i=1:n] <= u[i])
@NLexpression(m, f1, x[1])
@NLexpression(m, g, 1 + 9 * sum{x[j] ^ 2, j = 2:n} / (n - 1))
@NLexpression(m, h, 1 - (f1 / g) ^ 2)
@NLexpression(m, f2, g * h)
@testset "example: jump_nlexpr" begin
m = Model(solver=solver)

setvalue(x[1], 1)
setvalue(x[2:n], zeros(n - 1))
@NLobjective(m, :Min, f2)
n = 30
l = -ones(n); l[1] = 0
u = ones(n)
@variable(m, l[i] <= x[i=1:n] <= u[i])
@NLexpression(m, f1, x[1])
@NLexpression(m, g, 1 + 9 * sum(x[j] ^ 2 for j = 2:n) / (n - 1))
@NLexpression(m, h, 1 - (f1 / g) ^ 2)
@NLexpression(m, f2, g * h)

context("example: jump_nlexpr") do
@fact solve(m) --> :Optimal
@fact getvalue(x[1]) --> roughly(1.0, 1e-5)
@fact getvalue(x[2:end]) --> roughly(zeros(n - 1), 1e-5)
@fact getvalue(f1) --> roughly(1.0, 1e-5)
@fact getvalue(f2) --> roughly(0.0, 1e-5)
@fact getvalue(g) --> roughly(1.0, 1e-5)
@fact getvalue(h) --> roughly(0.0, 1e-5)
@fact getobjectivevalue(m) --> roughly(0.0, 1e-5)
end
setvalue(x[1], 1)
setvalue(x[2:n], zeros(n - 1))
@NLobjective(m, :Min, f2)

@test solve(m) == :Optimal
@test isapprox(getvalue(x[1]), 1.0, atol=1e-5)
@test isapprox(getvalue(x[2:end]), zeros(n - 1), atol=1e-5)
@test isapprox(getvalue(f1), 1.0, atol=1e-5)
@test isapprox(getvalue(f2), 0.0, atol=1e-5)
@test isapprox(getvalue(g), 1.0, atol=1e-5)
@test isapprox(getvalue(h), 0.0, atol=1e-5)
@test isapprox(getobjectivevalue(m), 0.0, atol=1e-5)
end
27 changes: 12 additions & 15 deletions examples/jump_nltrig.jl
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
using JuMP, FactCheck, AmplNLWriter
using JuMP, Base.Test, AmplNLWriter

## Solve test problem with sind and cosd functions
#
Expand All @@ -8,26 +8,23 @@ using JuMP, FactCheck, AmplNLWriter
# The optimal objective value is 0
##

# Allow resolving the model from multiple starts after NLP changes in JuMP 0.12
EnableNLPResolve()
# solver = AmplNLSolver(Ipopt.amplexe, ["print_level=0"])

if !isdefined(:solver); solver = CouenneNLSolver(); end
@testset "example: jump_nltrig" begin
m = Model(solver=solver)
@variable(m, x[1:2])

m = Model(solver=solver)
@variable(m, x[1:2])
@NLobjective(m, Min, (7 - (3*cosd(x[1]) + 5*cosd(x[2])))^2 + (0 - (3*sind(x[1]) + 5*sind(x[2])))^2)

@NLobjective(m, Min, (7 - (3*cosd(x[1]) + 5*cosd(x[2])))^2 + (0 - (3*sind(x[1]) + 5*sind(x[2])))^2)

context("example: jump_nltrig") do
setvalue(x[1], 30)
setvalue(x[2], -50)
@fact solve(m) --> :Optimal
@fact getvalue(x)[:] --> roughly([38.21321, -21.78678], 1e-5)
@fact getobjectivevalue(m) --> roughly(0.0, 1e-5)
@test solve(m) == :Optimal
@test isapprox(getvalue(x)[:], [38.21321, -21.78678], atol=1e-5)
@test isapprox(getobjectivevalue(m), 0.0, atol=1e-5)
# Now try from the other side
setvalue(x[1], -30)
setvalue(x[2], 50)
@fact solve(m) --> :Optimal
@fact getvalue(x)[:] --> roughly([-38.21321, 21.78678], 1e-5)
@fact getobjectivevalue(m) --> roughly(0.0, 1e-5)
@test solve(m) == :Optimal
@test isapprox(getvalue(x)[:], [-38.21321, 21.78678], atol=1e-5)
@test isapprox(getobjectivevalue(m), 0.0, atol=1e-5)
end
22 changes: 11 additions & 11 deletions examples/jump_no_obj.jl
Original file line number Diff line number Diff line change
@@ -1,17 +1,17 @@
using JuMP, FactCheck, AmplNLWriter
using JuMP, Base.Test, AmplNLWriter

# Example with no objective (#50)

if !isdefined(:solver); solver = BonminNLSolver(); end
# solver = AmplNLSolver(Ipopt.amplexe, ["print_level=0"])

m = Model(solver=solver)
@variable(m, 0 <= yp <= 1, Int)
@variable(m, 0 <= l <= 1000.0)
@variable(m, 0 <= f <= 1000.0)
@NLconstraint(m, .087 * l >= f ^ 2)
@constraint(m, l <= yp * 1000.0)
@testset "example: jump_no_obj" begin
m = Model(solver=solver)
@variable(m, 0 <= yp <= 1, Int)
@variable(m, 0 <= l <= 1000.0)
@variable(m, 0 <= f <= 1000.0)
@NLconstraint(m, .087 * l >= f ^ 2)
@constraint(m, l <= yp * 1000.0)

context("example: jump_no_obj") do
@fact solve(m) --> :Optimal
@fact getobjectivevalue(m) --> 0
@test solve(m) == :Optimal
@test getobjectivevalue(m) == 0
end
33 changes: 20 additions & 13 deletions examples/jump_nonlinearbinary.jl
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
using JuMP, FactCheck, AmplNLWriter
using JuMP, Base.Test, AmplNLWriter

## Solve test problem with non-linear binary variables
#
Expand All @@ -8,20 +8,27 @@ using JuMP, FactCheck, AmplNLWriter
# The solution is (0, 0).
##

if !isdefined(:solver); solver = BonminNLSolver(); end
# solver = AmplNLSolver(Ipopt.amplexe, ["print_level=0"])

m = Model(solver=solver)
@variable(m, x[1:2], Bin)
@testset "example: jump_nonlinearbinary" begin
m = Model(solver=solver)
@variable(m, x[1:2], Bin)

# Set some non-binary bounds on x1 and x2. These should be ignored.
# The optimal solution if x is Int is (1, 2) which is allowed by these bounds
setupperbound(x[1], 2)
setupperbound(x[2], 2)
# Set some non-binary bounds on x1 and x2. These should be ignored.
# The optimal solution if x is Int is (1, 2) which is allowed by these bounds
setupperbound(x[1], 2)
setupperbound(x[2], 2)

@NLobjective(m, Min, 100*(x[2] - (0.5 + x[1])^2)^2 + (1 - x[1])^2)
@NLobjective(m, Min, 100*(x[2] - (0.5 + x[1])^2)^2 + (1 - x[1])^2)

context("example: jump_nonlinearbinary") do
@fact solve(m) --> :Optimal
@fact getvalue(x)[:] --> [0.0, 0.0]
@fact getobjectivevalue(m) --> 7.25
@test solve(m) == :Optimal

if getsolvername(solver) == "ipopt"
# Ipopt solves the relaxation
@test isapprox(getvalue(x), [0.501245, 1.0], atol=1e-6)
@test isapprox(getobjectivevalue(m), 0.249377, atol=1e-6)
else
@test getvalue(x)[:] == [0.0, 0.0]
@test getobjectivevalue(m) == 7.25
end
end
35 changes: 18 additions & 17 deletions examples/jump_pruning.jl
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
using JuMP, FactCheck, AmplNLWriter
using JuMP, Base.Test, AmplNLWriter

## Solve test problem with lots of expressions to prune
#
Expand All @@ -16,23 +16,24 @@ using JuMP, FactCheck, AmplNLWriter
# The optimal objective value is 400, solutions can vary.
##

if !isdefined(:solver); solver = IpoptNLSolver(); end
m = Model(solver=solver)
# solver = AmplNLSolver(Ipopt.amplexe, ["print_level=0"])

@variable(m, x[1:2] >= 0)
@testset "example: jump_pruning" begin
m = Model(solver=solver)

@NLobjective(m, Max, x[1]^2 * x[2]^2)
@NLconstraint(m, x[1] * x[2] <= 20)
@NLconstraint(m, x[1] + x[2] <= 40)
@NLconstraint(m, x[1] * x[2] + x[1] + x[2] <= 60)
@NLconstraint(m, x[1] + x[1] * x[2] + x[2] <= 60)
@NLconstraint(m, x[1] * x[2] + x[1] + x[2] <= 60)
@NLconstraint(m, x[1] * x[2] - x[1] - x[2] <= 0)
@NLconstraint(m, x[2] - x[1] * x[2] + x[1] <= 60)
@NLconstraint(m, x[2] - x[1] + x[1] * x[2] <= 0)
@NLconstraint(m, 0 <= 1.0)
@variable(m, x[1:2] >= 0)

context("example: jump_pruning") do
@fact solve(m) --> :Optimal
@fact getobjectivevalue(m) --> roughly(400, 1e-2)
@NLobjective(m, Max, x[1]^2 * x[2]^2)
@NLconstraint(m, x[1] * x[2] <= 20)
@NLconstraint(m, x[1] + x[2] <= 40)
@NLconstraint(m, x[1] * x[2] + x[1] + x[2] <= 60)
@NLconstraint(m, x[1] + x[1] * x[2] + x[2] <= 60)
@NLconstraint(m, x[1] * x[2] + x[1] + x[2] <= 60)
@NLconstraint(m, x[1] * x[2] - x[1] - x[2] <= 0)
@NLconstraint(m, x[2] - x[1] * x[2] + x[1] <= 60)
@NLconstraint(m, x[2] - x[1] + x[1] * x[2] <= 0)
@NLconstraint(m, 0 <= 1.0)

@test solve(m) == :Optimal
@test isapprox(getobjectivevalue(m), 400, atol=1e-2)
end
Loading