Skip to content

Commit

Permalink
Merge pull request #1349 from JuliaOpt/bl/fix0.7
Browse files Browse the repository at this point in the history
Fix various Julia v0.7 warnings
  • Loading branch information
blegat authored Jun 18, 2018
2 parents f7f9f54 + 4e6e653 commit 75258ea
Show file tree
Hide file tree
Showing 11 changed files with 35 additions and 33 deletions.
32 changes: 17 additions & 15 deletions src/Derivatives/coloring.jl
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
module Coloring

using Compat

import DataStructures

include("topological_sort.jl")
Expand Down Expand Up @@ -82,16 +84,16 @@ function gen_adjlist(I,J,nel)
adjcount[i] += 1
adjcount[j] += 1
end
offsets = Array{Int}(nel+1)
offsets = Array{Int}(undef, nel+1)
offsets[1] = 1
for k in 1:nel
offsets[k+1] = offsets[k] + adjcount[k]
end
fill!(adjcount,0)

edges = Array{MyPair{Int}}(n_edges)
adjlist = Array{Int}(offsets[nel+1]-1)
edgeindex = Array{Int}(length(adjlist))
edges = Array{MyPair{Int}}(undef, n_edges)
adjlist = Array{Int}(undef, offsets[nel+1]-1)
edgeindex = Array{Int}(undef, length(adjlist))
edge_count = 0

for k in 1:length(I)
Expand Down Expand Up @@ -176,7 +178,7 @@ function acyclic_coloring(g::UndirectedGraph)
end
num_colors = 0
forbiddenColors = Int[]
firstNeighbor = Array{Edge}(0)
firstNeighbor = Edge[]
firstVisitToTree = fill(Edge(0,0,0),num_edges(g))
color = fill(0, num_vertices(g))
# disjoint set forest of edges in the graph
Expand Down Expand Up @@ -270,7 +272,7 @@ function recovery_preprocess(g::UndirectedGraph,color,num_colors, local_indices)
twocolorindex = zeros(Int32,num_colors, num_colors)
seen_twocolors = 0
# count of edges in each subgraph
edge_count = Array{Int}(0)
edge_count = Int[]
for k in 1:length(g.edges)
e = g.edges[k]
u = e.first
Expand All @@ -286,9 +288,9 @@ function recovery_preprocess(g::UndirectedGraph,color,num_colors, local_indices)
edge_count[idx] += 1
end
# edges sorted by twocolor subgraph
sorted_edges = Array{Vector{MyPair{Int}}}(seen_twocolors)
sorted_edges = Array{Vector{MyPair{Int}}}(undef, seen_twocolors)
for idx in 1:seen_twocolors
sorted_edges[idx] = Array{MyPair{Int}}(0)
sorted_edges[idx] = MyPair{Int}[]
sizehint!(sorted_edges[idx],edge_count[idx])
end

Expand All @@ -303,10 +305,10 @@ function recovery_preprocess(g::UndirectedGraph,color,num_colors, local_indices)
end

# list of unique vertices in each twocolor subgraph
vertexmap = Array{Vector{Int}}(seen_twocolors)
vertexmap = Array{Vector{Int}}(undef, seen_twocolors)

postorder = Array{Vector{Int}}(seen_twocolors)
parents = Array{Vector{Int}}(seen_twocolors)
postorder = Array{Vector{Int}}(undef, seen_twocolors)
parents = Array{Vector{Int}}(undef, seen_twocolors)

# temporary lookup map from global index to subgraph index
revmap = zeros(Int,num_vertices(g))
Expand Down Expand Up @@ -342,7 +344,7 @@ function recovery_preprocess(g::UndirectedGraph,color,num_colors, local_indices)
end

# set up offsets for adjlist
offset = Array{Int}(length(vlist)+1)
offset = Array{Int}(undef, length(vlist)+1)
offset[1] = 1
for k in 1:length(vlist)
offset[k+1] = offset[k] + adjcount[vlist[k]]
Expand All @@ -351,7 +353,7 @@ function recovery_preprocess(g::UndirectedGraph,color,num_colors, local_indices)
# adjlist for node u in twocolor idx starts at
# vec[offset[u]]
# u has global index vlist[u]
vec = Array{Int}(offset[length(vlist)+1]-1)
vec = Array{Int}(undef, offset[length(vlist)+1]-1)

# now fill in
for k in 1:length(my_edges)
Expand Down Expand Up @@ -471,7 +473,7 @@ end
export hessian_color_preprocess

# allocate a seed matrix
seed_matrix(rinfo::RecoveryInfo) = Array{Float64}(length(rinfo.local_indices),rinfo.num_colors)
seed_matrix(rinfo::RecoveryInfo) = Array{Float64}(undef,length(rinfo.local_indices),rinfo.num_colors)

export seed_matrix

Expand Down Expand Up @@ -512,7 +514,7 @@ function recover_from_matmat!(V, R, rinfo::RecoveryInfo, stored_values)
vmap = rinfo.vertexmap[t]
order = rinfo.postorder[t]
parent = rinfo.parents[t]
stored_values[1:length(order)] = 0.0
stored_values[1:length(order)] .= 0.0

@inbounds for z in 1:length(order)
v = order[z]
Expand Down
4 changes: 2 additions & 2 deletions src/Derivatives/conversion.jl
Original file line number Diff line number Diff line change
Expand Up @@ -66,8 +66,8 @@ export expr_to_nodedata
# edges leaving any node (i.e., the children)
function adjmat(nd::Vector{NodeData})
len = length(nd)
I = Array{Int}(len)
J = Array{Int}(len)
I = Vector{Int}(undef, len)
J = Vector{Int}(undef, len)
realnz = 0
for nz in 1:len
par = nd[nz].parent
Expand Down
2 changes: 1 addition & 1 deletion src/Derivatives/linearity.jl
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ export CONSTANT, LINEAR, PIECEWISE_LINEAR, NONLINEAR

function classify_linearity(nd::Vector{NodeData},adj,subexpression_linearity)

linearity = Array{Linearity}(length(nd))
linearity = Array{Linearity}(undef, length(nd))

# do a forward pass through the graph, which is reverse order of nd

Expand Down
2 changes: 1 addition & 1 deletion src/Derivatives/subexpressions.jl
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ function order_subexpressions(main_expressions::Vector{Vector{NodeData}},subexpr
end
N = nsub+length(main_expressions)
sp = sparse(I,J,ones(length(I)),N,N)
cmap = Array{Int}(N)
cmap = Vector{Int}(undef, N)
order = reverse(Coloring.reverse_topological_sort_by_dfs(sp.rowval,sp.colptr,N,cmap)[1])
# remove the subexpressions which never appear anywhere
# and the indices of the main expressions
Expand Down
2 changes: 1 addition & 1 deletion src/Derivatives/topological_sort.jl
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ mutable struct TopologicalSortVisitor
parents::Vector{Int}

function TopologicalSortVisitor(n::Int)
vs = Array{Int}(0)
vs = Int[]
sizehint!(vs, n)
new(vs, zeros(Int,n))
end
Expand Down
2 changes: 1 addition & 1 deletion src/Derivatives/types.jl
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ mutable struct UserOperatorRegistry
univariate_operator_fprimeprime::Vector{Any}
end

UserOperatorRegistry() = UserOperatorRegistry(Dict{Symbol,Int}(),Vector{MOI.AbstractNLPEvaluator}(0),Dict{Symbol,Int}(),[],[],[])
UserOperatorRegistry() = UserOperatorRegistry(Dict{Symbol,Int}(),MOI.AbstractNLPEvaluator[],Dict{Symbol,Int}(),[],[],[])

# we use the MathOptInterface NLPEvaluator interface, where the
# operator takes the place of the objective function.
Expand Down
4 changes: 2 additions & 2 deletions src/JuMPArray.jl
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,7 @@ Base.isassigned(A::JuMPArray{T,N}, idx...) where {T,N} = length(idx) == N && all
# For ambiguity
Base.isassigned(A::JuMPArray{T,N}, idx::Int...) where {T,N} = length(idx) == N && all(t -> haskey(A.lookup[t[1]], t[2]), enumerate(idx))

Base.eachindex(A::JuMPArray) = CartesianRange(size(A.data))
Base.eachindex(A::JuMPArray) = CartesianIndices(size(A.data))

# TODO: similar

Expand Down Expand Up @@ -160,7 +160,7 @@ function Base.show_nd(io::IO, a::JuMPArray, print_matrix::Function, label_slices
end
tailinds = Base.tail(Base.tail(indices(a.data)))
nd = ndims(a)-2
for I in CartesianRange(tailinds)
for I in CartesianIndices(tailinds)
idxs = I.I
if limit
for i = 1:nd
Expand Down
6 changes: 3 additions & 3 deletions src/macros.jl
Original file line number Diff line number Diff line change
Expand Up @@ -21,13 +21,13 @@ function buildrefsets(expr::Expr, cname)
# Creating an indexed set of refs
refcall = Expr(:ref, cname)
if isexpr(c, :typed_vcat) || isexpr(c, :ref)
shift!(c.args)
popfirst!(c.args)
end
condition = :()
if isexpr(c, :vcat) || isexpr(c, :typed_vcat)
if isexpr(c.args[1], :parameters)
@assert length(c.args[1].args) == 1
condition = shift!(c.args).args[1]
condition = popfirst!(c.args).args[1]
else
condition = pop!(c.args)
end
Expand Down Expand Up @@ -1046,7 +1046,7 @@ macro variable(args...)
x = gensym()
anon_singleton = true
else
x = shift!(extra)
x = popfirst!(extra)
if x in [:Int,:Bin,:PSD]
_error("Ambiguous variable name $x detected. Use the \"category\" keyword argument to specify a category for an anonymous variable.")
end
Expand Down
2 changes: 1 addition & 1 deletion src/variables.jl
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,7 @@ MOI.isvalid(m::Model, v::VariableRef) = (v.m === m) && MOI.isvalid(m.moibackend,
# The default hash is slow. It's important for the performance of AffExpr to
# define our own.
# https://github.com/JuliaOpt/MathOptInterface.jl/issues/234#issuecomment-366868878
Base.hash(v::VariableRef, h::UInt) = hash(object_id(v.m), hash(v.index.value, h))
Base.hash(v::VariableRef, h::UInt) = hash(objectid(v.m), hash(v.index.value, h))
Base.isequal(v1::VariableRef, v2::VariableRef) = v1.m === v2.m && v1.index == v2.index


Expand Down
10 changes: 5 additions & 5 deletions test/containers.jl
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ function containermatches(c1::AbstractArray,c2::AbstractArray)
end

function containermatches(c1::JuMPArray,c2::JuMPArray)
return typeof(c1) == typeof(c2) && indices(c1) == indices(c2)
return typeof(c1) == typeof(c2) && Compat.axes(c1) == Compat.axes(c2)
end

containermatches(c1::Dict, c2::Dict) = (eltype(c1) == eltype(c2))
Expand Down Expand Up @@ -79,7 +79,7 @@ containermatches(c1, c2) = false
@test_throws ErrorException @dummycontainer([i=1:10; iseven(i)], JuMPArray)
@test containermatches(@dummycontainer([i=1:10; iseven(i)], Dict), Dict{Any,Bool}())

# Dependent indices
# Dependent axes
@test containermatches(@dummycontainer([i=1:10, j=1:i], Auto), Dict{Any,Bool}())
@test_throws ErrorException @dummycontainer([i=1:10, j=1:i], Array)
@test_throws ErrorException @dummycontainer([i=1:10, j=1:i], JuMPArray)
Expand All @@ -99,7 +99,7 @@ end
@test A[3,1,1,1,1] == 2.0
@test isassigned(A, 2)
@test !isassigned(A, 1)
@test length.(indices(A)) == (2,)
@test length.(Compat.axes(A)) == (2,)
B = plus1.(A)
@test B[2] == 2.0
@test B[3] == 3.0
Expand All @@ -113,7 +113,7 @@ And data, a 2-element Array{Float64,1}:
A = @inferred JuMPArray([1.0,2.0], s2)
@test @inferred A[:a] == 1.0
@test A[:b] == 2.0
@test length.(indices(A)) == (2,)
@test length.(Compat.axes(A)) == (2,)
B = plus1.(A)
@test B[:a] == 2.0
@test B[:b] == 3.0
Expand All @@ -125,7 +125,7 @@ And data, a 2-element Array{Float64,1}:
3.0"""

A = @inferred JuMPArray([1 2; 3 4], s1, s2)
@test length.(indices(A)) == (2,2)
@test length.(Compat.axes(A)) == (2,2)
@test @inferred A[2,:a] == 1
@test A[3,:a] == 3
@test A[2,:b] == 2
Expand Down
2 changes: 1 addition & 1 deletion test/derivatives.jl
Original file line number Diff line number Diff line change
Expand Up @@ -201,7 +201,7 @@ function test_linearity(ex,testval,IJ = [],indices=[])
linearity = classify_linearity(nd,adj,[])
@test linearity[1] == testval
idxset = Coloring.IndexedSet(100)
edgelist = compute_hessian_sparsity(nd,adj,linearity,idxset,Array{Set{Tuple{Int,Int}}}(0), Array{Vector{Int}}(0))
edgelist = compute_hessian_sparsity(nd,adj,linearity,idxset,Array{Set{Tuple{Int,Int}}}(undef, 0), Array{Vector{Int}}(undef, 0))
if linearity[1] != NONLINEAR
@test length(edgelist) == 0
elseif length(IJ) > 0
Expand Down

0 comments on commit 75258ea

Please sign in to comment.