Skip to content

Commit

Permalink
Added random seeds to all tests
Browse files Browse the repository at this point in the history
  • Loading branch information
nic-barbara committed Jul 18, 2023
1 parent cacbc5a commit 153bf96
Show file tree
Hide file tree
Showing 9 changed files with 55 additions and 37 deletions.
10 changes: 6 additions & 4 deletions test/ParameterTypes/contraction.jl
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@ using Test

# include("../test_utils.jl")

rng = MersenneTwister(42)

"""
Test that the contracting REN actually does contract.
Expand All @@ -17,15 +19,15 @@ nu, nx, nv, ny = 4, 5, 10, 5

ren_ps = ContractingRENParams{Float64}(
nu, nx, nv, ny;
init=:cholesky, αbar=ᾱ, polar_param=false, output_map=false
init=:cholesky, αbar=ᾱ, polar_param=false, output_map=false, rng
)
ren = REN(ren_ps)

# Same inputs. different initial conditions
u0 = randn(nu, batches)
u0 = randn(rng, nu, batches)

x0 = randn(nx, batches)
x1 = randn(nx, batches)
x0 = randn(rng, nx, batches)
x1 = randn(rng, nx, batches)

# Simulate
x0n, y0 = ren(x0, u0)
Expand Down
8 changes: 5 additions & 3 deletions test/ParameterTypes/dense_lbdn.jl
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@ using Test

# include("../test_utils.jl")

rng = MersenneTwister(42)

"""
Test that the model satisfies a specified Lipschitz bound
"""
Expand All @@ -14,12 +16,12 @@ nu, ny = 4, 2
nh = [10, 5, 20, 4]
γ = 1e-5

lbdn_ps = DenseLBDNParams{Float64}(nu, nh, ny, γ)
lbdn_ps = DenseLBDNParams{Float64}(nu, nh, ny, γ; rng)
lbdn = LBDN(lbdn_ps)

# Different inputs with different initial conditions
u0 = randn(nu, batches)
u1 = u0 .+ 0.001*rand(nu, batches)
u0 = randn(rng, nu, batches)
u1 = u0 .+ 0.001*rand(rng, nu, batches)

# Simulate
y0 = lbdn(u0)
Expand Down
18 changes: 10 additions & 8 deletions test/ParameterTypes/general_behavioural_constrains.jl
Original file line number Diff line number Diff line change
Expand Up @@ -7,29 +7,31 @@ using Test

# include("../test_utils.jl")

rng = MersenneTwister(42)

"""
Test that the behavioural constraints are satisfied
"""
batches = 42
nu, nx, nv, ny = 10, 5, 10, 20

# Generate random matrices
X = randn(ny,ny)
Y = randn(nu,nu)
S = rand(nu,ny)
X = randn(rng, ny,ny)
Y = randn(rng, nu,nu)
S = rand(rng, nu,ny)

Q = -X'*X
R = S * (Q \ S') + Y'*Y

ren_ps = GeneralRENParams{Float64}(nu, nx, nv, ny, Q, S, R)
ren_ps = GeneralRENParams{Float64}(nu, nx, nv, ny, Q, S, R; rng)
ren = REN(ren_ps)

# Different inputs with different initial conditions
u0 = 10*randn(nu, batches)
u1 = rand(nu, batches)
u0 = 10*randn(rng, nu, batches)
u1 = rand(rng, nu, batches)

x0 = randn(nx, batches)
x1 = randn(nx, batches)
x0 = randn(rng, nx, batches)
x1 = randn(rng, nx, batches)

# Simulate
x0n, y0 = ren(x0, u0)
Expand Down
12 changes: 7 additions & 5 deletions test/ParameterTypes/lipschitz_bound.jl
Original file line number Diff line number Diff line change
Expand Up @@ -6,22 +6,24 @@ using Test

# include("../test_utils.jl")

rng = MersenneTwister(42)

"""
Test that the model satisfies a specified Lipschitz bound
"""
batches = 42
nu, nx, nv, ny = 4, 5, 10, 2
γ = 10

ren_ps = LipschitzRENParams{Float64}(nu, nx, nv, ny, γ)
ren_ps = LipschitzRENParams{Float64}(nu, nx, nv, ny, γ; rng)
ren = REN(ren_ps)

# Different inputs with different initial conditions
u0 = 10*randn(nu, batches)
u1 = rand(nu, batches)
u0 = 10*randn(rng, nu, batches)
u1 = rand(rng, nu, batches)

x0 = randn(nx, batches)
x1 = randn(nx, batches)
x0 = randn(rng, nx, batches)
x1 = randn(rng, nx, batches)

# Simulate
x0n, y0 = ren(x0, u0)
Expand Down
12 changes: 7 additions & 5 deletions test/ParameterTypes/passivity.jl
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@ using Test

# include("../test_utils.jl")

rng = MersenneTwister(42)

"""
Test passivity inequality
"""
Expand All @@ -15,15 +17,15 @@ nu, nx, nv, ny = 6, 5, 10, 6
T = 100

# Test constructors
ren_ps = PassiveRENParams{Float64}(nu, nx, nv, ny; init=:random, ν= 1.0)
ren_ps = PassiveRENParams{Float64}(nu, nx, nv, ny; init=:random, ν= 1.0, rng)
ren = REN(ren_ps)

# Different inputs with different initial conditions
u0 = 10*randn(nu, batches)
u1 = rand(nu, batches)
u0 = 10*randn(rng, nu, batches)
u1 = rand(rng, nu, batches)

x0 = randn(nx, batches)
x1 = randn(nx, batches)
x0 = randn(rng, nx, batches)
x1 = randn(rng, nx, batches)

# Simulate
x0n, y0 = ren(x0, u0)
Expand Down
8 changes: 5 additions & 3 deletions test/Wrappers/diff_lbdn.jl
Original file line number Diff line number Diff line change
Expand Up @@ -5,18 +5,20 @@ using Random
using RobustNeuralNetworks
using Test

rng = MersenneTwister(42)

"""
Test that backpropagation runs and parameters change
"""
batches = 10
nu, ny, γ = 2, 3, 1
nh = [10,5]
model_ps = DenseLBDNParams{Float32}(nu, nh, ny, γ; learn_γ=true)
model_ps = DenseLBDNParams{Float32}(nu, nh, ny, γ; learn_γ=true, rng)
model = DiffLBDN(model_ps)

# Dummy data
us = randn(nu, batches)
ys = randn(ny, batches)
us = randn(rng, nu, batches)
ys = randn(rng, ny, batches)
data = [(us[:,k], ys[:,k]) for k in 1:batches]

# Dummy loss function just for testing
Expand Down
8 changes: 5 additions & 3 deletions test/Wrappers/diff_ren.jl
Original file line number Diff line number Diff line change
Expand Up @@ -5,18 +5,20 @@ using Random
using RobustNeuralNetworks
using Test

rng = MersenneTwister(42)

"""
Test that backpropagation runs and parameters change
"""
batches = 10
nu, nx, nv, ny = 4, 5, 10, 2
γ = 10
ren_ps = LipschitzRENParams{Float64}(nu, nx, nv, ny, γ)
ren_ps = LipschitzRENParams{Float64}(nu, nx, nv, ny, γ; rng)
model = DiffREN(ren_ps)

# Dummy data
us = randn(nu, batches)
ys = randn(ny, batches)
us = randn(rng, nu, batches)
ys = randn(rng, ny, batches)
data = [(us[:,k], ys[:,k]) for k in 1:batches]

# Dummy loss function just for testing
Expand Down
8 changes: 5 additions & 3 deletions test/Wrappers/wrap_ren.jl
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@ using Random
using RobustNeuralNetworks
using Test

rng = MersenneTwister(42)

"""
Test REN wrapper with General REN params
"""
Expand All @@ -15,15 +17,15 @@ Q = Matrix{Float64}(-I(ny))
R = 0.1^2 * Matrix{Float64}(I(nu))
S = zeros(Float64, nu, ny)

ren_ps = GeneralRENParams{Float64}(nu, nx, nv, ny, Q, S, R)
ren_ps = GeneralRENParams{Float64}(nu, nx, nv, ny, Q, S, R; rng)
ren1 = WrapREN(ren_ps)

x0 = init_states(ren1, batches)
u0 = randn(nu, batches)
u0 = randn(rng, nu, batches)

# Update the model after changing a parameter
old_B2 = deepcopy(ren1.explicit.B2)
ren1.params.direct.B2 .*= rand(size(ren1.params.direct.B2)...)
ren1.params.direct.B2 .*= rand(rng, size(ren1.params.direct.B2)...)

x1, y1 = ren1(x0, u0)
update_explicit!(ren1)
Expand Down
8 changes: 5 additions & 3 deletions test/Wrappers/zero_dim.jl
Original file line number Diff line number Diff line change
Expand Up @@ -5,17 +5,19 @@ using Random
using RobustNeuralNetworks
using Test

rng = MersenneTwister(42)

"""
Test that backpropagation runs when nx = 0 and nv = 0
"""
batches = 10
nu, nx, nv, ny = 4, 0, 0, 2
γ = 10
model_ps = LipschitzRENParams{Float64}(nu, nx, nv, ny, γ)
model_ps = LipschitzRENParams{Float64}(nu, nx, nv, ny, γ; rng)

# Dummy data
us = randn(nu, batches)
ys = randn(ny, batches)
us = randn(rng, nu, batches)
ys = randn(rng, ny, batches)
data = [(us, ys)]

# Dummy loss function just for testing
Expand Down

0 comments on commit 153bf96

Please sign in to comment.