Skip to content

Commit

Permalink
Merge pull request #27 from JuliaReach/schillic/ai2
Browse files Browse the repository at this point in the history
Add partial AI2 algorithm
  • Loading branch information
schillic authored May 25, 2024
2 parents 7d38b14 + 39f1ab9 commit 2d94163
Show file tree
Hide file tree
Showing 17 changed files with 257 additions and 5 deletions.
4 changes: 2 additions & 2 deletions Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,8 @@ Reexport = "189a3867-3050-52da-a836-e630ba90ab69"
Requires = "ae029012-a4dd-5104-9daa-d747884805df"

[compat]
ControllerFormats = "0.2"
LazySets = "2.11.1"
ControllerFormats = "0.2.3"
LazySets = "2.13"
LinearAlgebra = "<0.0.1, 1.6"
ReachabilityBase = "0.2.1"
Reexport = "0.2, 1"
Expand Down
3 changes: 2 additions & 1 deletion docs/make.jl
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,8 @@ makedocs(; sitename="NeuralNetworkReachability.jl",
pages=["Home" => "index.md",
"Library" => Any["ForwardAlgorithms" => "lib/ForwardAlgorithms.md",
"BackwardAlgorithms" => "lib/BackwardAlgorithms.md",
"BidirectionalAlgorithms" => "lib/BidirectionalAlgorithms.md"],
"BidirectionalAlgorithms" => "lib/BidirectionalAlgorithms.md",
"Util" => "lib/Util.md"],
"About" => "about.md"])

deploydocs(; repo="github.com/JuliaReach/NeuralNetworkReachability.jl.git",
Expand Down
3 changes: 3 additions & 0 deletions docs/src/lib/ForwardAlgorithms.md
Original file line number Diff line number Diff line change
Expand Up @@ -19,5 +19,8 @@ LazyForward
BoxForward
SplitForward
DeepZ
AI2Box
AI2Zonotope
AI2Polytope
Verisig
```
16 changes: 16 additions & 0 deletions docs/src/lib/Util.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
# Util

This section of the manual describes the module for utilities.

```@contents
Pages = ["Util.md"]
Depth = 3
```

```@meta
CurrentModule = NeuralNetworkReachability.Util
```

```@docs
ConvSet
```
122 changes: 122 additions & 0 deletions src/ForwardAlgorithms/AI2.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,122 @@
"""
AI2Box <: AI2
AI2 forward algorithm for ReLU activation functions based on abstract
interpretation with the interval domain from [1].
### Notes
This algorithm is less precise than [`BoxForward`](@ref) because it abstracts
after every step, including the affine map.
[1]: Gehr et al.: *AI²: Safety and robustness certification of neural networks
with abstract interpretation*, SP 2018.
"""
struct AI2Box <: ForwardAlgorithm end

"""
AI2Zonotope <: AI2
AI2 forward algorithm for ReLU activation functions based on abstract
interpretation with the zonotope domain from [1].
### Fields
- `join_algorithm` -- (optional; default: `"join"`) algorithm to compute the
join of two zonotopes
[1]: Gehr et al.: *AI²: Safety and robustness certification of neural networks
with abstract interpretation*, SP 2018.
"""
struct AI2Zonotope{S} <: ForwardAlgorithm
join_algorithm::S
end

# the default join algorithm is "join"
AI2Zonotope() = AI2Zonotope("join")

"""
AI2Polytope <: AI2
AI2 forward algorithm for ReLU activation functions based on abstract
interpretation with the polytope domain from [1].
[1]: Gehr et al.: *AI²: Safety and robustness certification of neural networks
with abstract interpretation*, SP 2018.
"""
struct AI2Polytope <: ForwardAlgorithm end

# meet and join algorithms for different abstract domains
const _meet_zonotope = (X, Y) -> overapproximate(X Y, Zonotope)
const _join_zonotope(algo) = (X, Y) -> overapproximate(X Y, Zonotope; algorithm=algo)
const _meet_polytope = intersection
const _join_polytope = convex_hull

# apply affine map

# box: box approximation of the affine map
function forward(H, W::AbstractMatrix, b::AbstractVector, ::AI2Box)
return box_approximation(W * H + b)
end

# zonotope and polytope: closed under affine map
function forward(X, W::AbstractMatrix, b::AbstractVector, ::Union{AI2Zonotope,AI2Polytope})
return affine_map(W, X, b)
end

# apply ReLU activation function
# for each dimension 1:n
# 1(a) if nonnegative: nothing
# 1(b) if negative: project
# 1(c) if both nonnegative and negative: intersect with half-spaces and project negative
# 2: take the domain element(s) corresponding to the previous set(s)
# 3(c): union of the two sets, then take the corresponding domain element

# box: exploits that Box(ReLU(H)) = ReLU(H)
function forward(H::AbstractHyperrectangle, ::ReLU, ::AI2Box)
return rectify(H)
end

# zonotope: intersection the zonotope overapproximation of all pairwise projected intersections
function forward(Z::AbstractZonotope, ::ReLU, algo::AI2Zonotope)
require(@__MODULE__, :IntervalConstraintProgramming; fun_name="forward",
explanation="with AI2Zonotope")

return _forward_AI2_ReLU(Z; meet=_meet_zonotope, join=_join_zonotope(algo.join_algorithm))
end

# polytope: the convex hull of all pairwise polytopes
function forward(P::AbstractPolytope, ::ReLU, ::AI2Polytope)
return _forward_AI2_ReLU(P; meet=_meet_polytope, join=_join_polytope)
end

function _forward_AI2_ReLU(X::LazySet{N}; meet, join) where {N}
n = dim(X)
d = ones(N, n) # reused vector for "almost" identity matrices
for i in 1:n
if low(X, i) >= 0 # nonnegative case
continue
elseif high(X, i) <= 0 # negative case
d[i] = zero(N)
D = Diagonal(d)
X = linear_map(D, X)
d[i] = one(N)
else # mixed case
# nonnegative part
H1 = HalfSpace(SingleEntryVector(i, n, -one(N)), zero(N))
X1 = meet(X, H1)

# negative part
H2 = HalfSpace(SingleEntryVector(i, n, one(N)), zero(N))
X2 = meet(X, H2)
d[i] = zero(N)
D = Diagonal(d)
X2′ = linear_map(D, X2)
d[i] = one(N)

# join
X = join(X1, X2′)
end
end
return X
end
5 changes: 5 additions & 0 deletions src/ForwardAlgorithms/DefaultForward.jl
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,11 @@ function forward(X::AbstractSingleton, net::FeedforwardNetwork, ::ForwardAlgorit
return forward(X, net, DefaultForward())
end

# flattening a ConvSet just unwraps the set
function forward(cs::ConvSet, ::FlattenLayerOp, ::ForwardAlgorithm=DefaultForward())
return cs.set
end

# propagate singleton through network and store all intermediate results
function _forward_store(X::AbstractSingleton, net::FeedforwardNetwork,
algo::DefaultForward=DefaultForward())
Expand Down
5 changes: 5 additions & 0 deletions src/ForwardAlgorithms/ForwardAlgorithms.jl
Original file line number Diff line number Diff line change
@@ -1,8 +1,11 @@
module ForwardAlgorithms

using ..Util
using LinearAlgebra: Diagonal
using ControllerFormats
using LazySets
using LazySets: remove_zero_columns
using ReachabilityBase.Arrays: SingleEntryVector
using ReachabilityBase.Comparison: _isapprox
using ReachabilityBase.Require: require
using Requires
Expand All @@ -14,6 +17,7 @@ export forward,
BoxForward,
SplitForward,
DeepZ,
AI2Box, AI2Zonotope, AI2Polytope,
Verisig

include("ForwardAlgorithm.jl")
Expand All @@ -24,6 +28,7 @@ include("LazyForward.jl")
include("BoxForward.jl")
include("SplitForward.jl")
include("DeepZ.jl")
include("AI2.jl")
include("Verisig.jl")

include("init.jl")
Expand Down
3 changes: 3 additions & 0 deletions src/ForwardAlgorithms/init.jl
Original file line number Diff line number Diff line change
@@ -1,5 +1,8 @@
# optional dependencies
function __init__()
@require IntervalConstraintProgramming = "138f1668-1576-5ad7-91b9-7425abbf3153" begin
# nothing
end
@require ReachabilityAnalysis = "1e97bd63-91d1-579d-8e8d-501d2b57c93f" begin
include("init_ReachabilityAnalysis.jl")
end
Expand Down
3 changes: 3 additions & 0 deletions src/NeuralNetworkReachability.jl
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,9 @@ module NeuralNetworkReachability

using Reexport

include("Util/Util.jl")
@reexport using .Util

include("ForwardAlgorithms/ForwardAlgorithms.jl")
@reexport using .ForwardAlgorithms

Expand Down
22 changes: 22 additions & 0 deletions src/Util/ConvSet.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
"""
ConvSet{T<:LazySet{N}}
Wrapper of a set to represent a three-dimensional structure.
### Fields
- `set` -- set of dimension `dims[1] * dims[2] * dims[3]`
- `dims` -- 3-tuple with the dimensions
"""
struct ConvSet{T<:LazySet}
set::T
dims::NTuple{3,Int}

function ConvSet(set::T, dims::NTuple{3,Int}; validate=Val(true)) where {T}
if validate isa Val{true} && (dim(set) != dims[1] * dims[2] * dims[3] ||
dims[1] <= 0 || dims[2] <= 0 || dims[3] <= 0)
throw(ArgumentError("invalid dimensions $(dim(set)) and $dims"))
end
return new{T}(set, dims)
end
end
9 changes: 9 additions & 0 deletions src/Util/Util.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
module Util

using LazySets: LazySet, dim

export ConvSet

include("ConvSet.jl")

end # module
39 changes: 39 additions & 0 deletions test/ForwardAlgorithms/forward.jl
Original file line number Diff line number Diff line change
Expand Up @@ -255,6 +255,38 @@ end
end
end

@testset "AI² ReLU example" begin
N = example_network_AI2()
W = N.layers[1].weights
b = N.layers[1].bias

H = Hyperrectangle(; low=[0.0, 1.0], high=[2.0, 3.0])
Z = Zonotope([1.0, 2.0], [0.5 0.5 0.0; 0.5 0.0 0.5])
P = HPolytope([HalfSpace([-2.0, 1.0], 1.0), HalfSpace([1.0, 1.0], 4.0),
HalfSpace([0.0, -1.0], -1.0), HalfSpace([1.0, -1.0], 0.0)])

# affine map
@test isequivalent(forward(H, W, b, AI2Box()),
Hyperrectangle([0.0, 2.0], [3.0, 1.0]))
@test isequivalent(forward(Z, W, b, AI2Zonotope()),
Zonotope([0.0, 2.0], [0.5 1.0 -0.5; 0.5 0.0 0.5]))
@test isequivalent(forward(P, W, b, AI2Polytope()),
VPolygon([[2.0, 2.0], [1.0, 1.0], [-1.0, 1.0], [-1.0, 3.0]]))

# ReLU activation
Z2 = Zonotope([2.0, 2.0], [0.5 0.5 0.0; 0.5 0.0 0.5]) # all nonnegative
@test forward(Z2, ReLU(), AI2Zonotope()) == Z2
Z2 = Zonotope([-2.0, -2.0], [0.5 0.5 0.0; 0.5 0.0 0.5]) # all nonpositive
@test isequivalent(forward(Z2, ReLU(), AI2Zonotope()), Singleton(zeros(2)))

# network with ReLU activation
@test isequivalent(forward(H, N, AI2Box()), Hyperrectangle(; low=[0.0, 1.0], high=[3.0, 3.0]))
# zonotope implementation is less precise than in the paper
@test (Zonotope([0.5, 2.0], [0.5 0.5 -0.5; 0.0 0.5 0.5]), forward(Z, N, AI2Zonotope()))
@test isequivalent(forward(P, N, AI2Polytope()),
VPolygon([[0.0, 3.0], [0.0, 1.0], [1.0, 1.0], [2.0, 2.0]]))
end

@testset "Forward leaky-ReLU network" begin
N = example_network_232(LeakyReLU(0.1))
X = BallInf([1.0, 1.0], 0.1)
Expand Down Expand Up @@ -334,3 +366,10 @@ end
# not supported yet
@test_broken forward(X, N, Verisig())
end

@testset "Forward flattening layer" begin
S = Singleton(1:8)
dims = (2, 2, 2)
cs = ConvSet(S, dims)
@test forward(cs, FlattenLayerOp()) == S
end
4 changes: 3 additions & 1 deletion test/Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
Aqua = "4c88cf16-eb10-579e-8560-4a9242c79595"
CDDLib = "3391f64e-dcde-5f30-b752-e11513730f60"
ControllerFormats = "02ac4b2c-022a-44aa-84a5-ea45a5754bcc"
IntervalConstraintProgramming = "138f1668-1576-5ad7-91b9-7425abbf3153"
LazySets = "b4f0291d-fe17-52bc-9479-3d1a343d9043"
Optim = "429524aa-4258-5aef-a3af-852621145aeb"
Polyhedra = "67491407-f73d-577b-9b50-8179a7c68029"
Expand All @@ -12,7 +13,8 @@ Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
Aqua = "0.8"
CDDLib = "0.6 - 0.9"
ControllerFormats = "0.2"
LazySets = "2.12.1"
IntervalConstraintProgramming = "0.9 - 0.13"
LazySets = "2.13"
Optim = "1"
Polyhedra = "0.6 - 0.7"
ReachabilityAnalysis = "0.22 - 0.25"
10 changes: 10 additions & 0 deletions test/Util/Util.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
using NeuralNetworkReachability.Util: ConvSet

@testset "ConvSet" begin
X = BallInf(zeros(12), 1.0)
ConvSet(X, (1, 2, 6))
ConvSet(X, (2, 2, 3))
@test_throws ArgumentError ConvSet(X, (0, 1, 12))
@test_throws ArgumentError ConvSet(X, (2, 2, 2))
@test_throws ArgumentError ConvSet(X, (0, 0, 0))
end
4 changes: 4 additions & 0 deletions test/example_networks.jl
Original file line number Diff line number Diff line change
Expand Up @@ -13,3 +13,7 @@ function example_network_1221(act::ActivationFunction=ReLU())
DenseLayerOp([2.0 2.0; 2.0 2.0], [2.5, 2.5], act),
DenseLayerOp([3.0 3.0;], [3.5], Id())])
end

function example_network_AI2()
return FeedforwardNetwork([DenseLayerOp([2.0 -1.0; 0.0 1.0], [0.0, 0.0], ReLU())])
end
5 changes: 5 additions & 0 deletions test/optional_dependencies_not_loaded.jl
Original file line number Diff line number Diff line change
Expand Up @@ -7,4 +7,9 @@ for dummy in [1]
@test_throws AssertionError Verisig()
@test_throws AssertionError forward(X, N, Verisig(nothing))
end

# AI2Zonotope
if !isdefined(@__MODULE__, :IntervalConstraintProgramming)
@test_throws AssertionError forward(X, N, AI2Zonotope())
end
end
5 changes: 4 additions & 1 deletion test/runtests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,11 @@ include("example_networks.jl")
end

# load optional dependencies
import ReachabilityAnalysis, Polyhedra, CDDLib, Optim
import IntervalConstraintProgramming, ReachabilityAnalysis, Polyhedra, CDDLib, Optim

@testset "Util" begin
include("Util/Util.jl")
end
@testset "ForwardAlgorithms" begin
include("ForwardAlgorithms/forward.jl")
end
Expand Down

0 comments on commit 2d94163

Please sign in to comment.