Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix reshape #20

Draft
wants to merge 5 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
98 changes: 0 additions & 98 deletions docs/src/hamiltonian_neural_network.md

This file was deleted.

2 changes: 2 additions & 0 deletions src/SymbolicNeuralNetworks.jl
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,8 @@ module SymbolicNeuralNetworks
export symbolize
include("utils/symbolize.jl")

include("utils/create_array.jl")

export AbstractSymbolicNeuralNetwork
export SymbolicNeuralNetwork, SymbolicModel
export HamiltonianSymbolicNeuralNetwork, HNNLoss
Expand Down
2 changes: 1 addition & 1 deletion src/pullback.jl
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ function SymbolicPullback(nn::SymbolicNeuralNetwork, loss::NetworkLoss)
@variables soutput[1:output_dimension(nn.model)]
symbolic_loss = loss(nn.model, nn.params, nn.input, soutput)
symbolic_pullbacks = symbolic_pullback(symbolic_loss, nn)
pbs_executable = build_nn_function(symbolic_pullbacks, nn.params, nn.input, soutput)
pbs_executable = build_nn_function(symbolic_pullbacks, nn.params, nn.input, soutput; reduce = +)
function pbs(input, output, params)
pullback(::Union{Real, AbstractArray{<:Real}}) = _get_contents(_get_params(pbs_executable(input, output, params)))
pullback
Expand Down
15 changes: 11 additions & 4 deletions src/utils/build_function.jl
Original file line number Diff line number Diff line change
Expand Up @@ -22,12 +22,19 @@ function build_nn_function(eq::EqT, nn::AbstractSymbolicNeuralNetwork)
build_nn_function(eq, nn.params, nn.input)
end

function build_nn_function(eq::EqT, sparams::NeuralNetworkParameters, sinput::Symbolics.Arr)
function build_nn_function(eq::EqT, sparams::NeuralNetworkParameters, sinput::Symbolics.Arr; reduce = hcat)
gen_fun = _build_nn_function(eq, sparams, sinput)
gen_fun_returned(x, ps) = mapreduce(k -> gen_fun(x, ps, k), hcat, axes(x, 2))
gen_fun_returned(x::Union{AbstractVector, Symbolics.Arr}, ps) = gen_fun_returned(reshape(x, length(x), 1), ps)
gen_fun_returned(x, ps) = mapreduce(k -> gen_fun(x, ps, k), reduce, axes(x, 2))
function gen_fun_returned(x::Union{AbstractVector, Symbolics.Arr}, ps)
output_not_reshaped = gen_fun_returned(reshape(x, length(x), 1), ps)
# for vectors we do not reshape, as the output may be a matrix
output_not_reshaped
end
# check this! (definitely not correct in all cases!)
gen_fun_returned(x::AbstractArray{<:Number, 3}, ps) = reshape(gen_fun_returned(reshape(x, size(x, 1), size(x, 2) * size(x, 3)), ps), size(x, 1), size(x, 2), size(x, 3))
function gen_fun_returned(x::AbstractArray{<:Number, 3}, ps)
output_not_reshaped = gen_fun_returned(reshape(x, size(x, 1), size(x, 2) * size(x, 3)), ps)
reshape(output_not_reshaped, size(output_not_reshaped, 1), size(x, 2), size(x, 3))
end
gen_fun_returned
end

Expand Down
25 changes: 21 additions & 4 deletions src/utils/build_function2.jl
Original file line number Diff line number Diff line change
Expand Up @@ -16,14 +16,31 @@
build_nn_function(eqs, nn.params, nn.input, soutput)
end

function build_nn_function(eq::EqT, sparams::NeuralNetworkParameters, sinput::Symbolics.Arr, soutput::Symbolics.Arr)
function build_nn_function(eq::EqT, sparams::NeuralNetworkParameters, sinput::Symbolics.Arr, soutput::Symbolics.Arr; reduce = hcat)
gen_fun = _build_nn_function(eq, sparams, sinput, soutput)
gen_fun_returned(input, output, ps) = mapreduce(k -> gen_fun(input, output, ps, k), +, axes(input, 2))
gen_fun_returned(input::AT, output::AT, ps) where {AT <: Union{AbstractVector, Symbolics.Arr}} = gen_fun_returned(reshape(input, length(input), 1), reshape(output, length(output), 1), ps)
gen_fun_returned(input::AT, output::AT, ps) where {T, AT <: AbstractArray{T, 3}} = gen_fun_returned(reshape(input, size(input, 1), size(input, 2) * size(input, 3)), reshape(output, size(output, 1), size(output, 2) * size(output, 3)), ps)
gen_fun_returned(input, output, ps) = mapreduce(k -> gen_fun(input, output, ps, k), reduce, axes(input, 2))
function gen_fun_returned(x::AT, y::AT, ps) where {AT <: Union{AbstractVector, Symbolics.Arr}}
output_not_reshaped = gen_fun_returned(reshape(x, length(x), 1), reshape(y, length(y), 1), ps)
# for vectors we do not reshape, as the output may be a matrix
output_not_reshaped
end
# check this! (definitely not correct in all cases!)
function gen_fun_returned(x::AT, y::AT, ps) where {AT <: AbstractArray{<:Number, 3}}
output_not_reshaped = gen_fun_returned(reshape(x, size(x, 1), size(x, 2) * size(x, 3)), reshape(y, size(y, 1), size(y, 2) * size(y, 3)), ps)
# if arrays are added together then don't reshape!
optional_reshape(output_not_reshaped, reduce, x)
end
gen_fun_returned
end

function optional_reshape(output_not_reshaped::AbstractVecOrMat, ::typeof(+), ::AbstractArray{<:Number, 3})
output_not_reshaped

Check warning on line 37 in src/utils/build_function2.jl

View check run for this annotation

Codecov / codecov/patch

src/utils/build_function2.jl#L36-L37

Added lines #L36 - L37 were not covered by tests
end

function optional_reshape(output_not_reshaped::AbstractVecOrMat, ::typeof(hcat), input::AbstractArray{<:Number, 3})
reshape(output_not_reshaped, size(output_not_reshaped, 1), size(input, 2), size(input, 3))
end

"""
_build_nn_function(eq, params, sinput, soutput)

Expand Down
16 changes: 8 additions & 8 deletions src/utils/build_function_arrays.jl
Original file line number Diff line number Diff line change
Expand Up @@ -29,8 +29,8 @@ funcs_evaluated = funcs(input, ps)
(true, true, true)
```
"""
function build_nn_function(eqs::AbstractArray{<:Union{NamedTuple, NeuralNetworkParameters}}, sparams::NeuralNetworkParameters, sinput::Symbolics.Arr...)
ps_semi = [function_valued_parameters(eq, sparams, sinput...) for eq in eqs]
function build_nn_function(eqs::AbstractArray{<:Union{NamedTuple, NeuralNetworkParameters}}, sparams::NeuralNetworkParameters, sinput::Symbolics.Arr...; reduce = hcat)
ps_semi = [function_valued_parameters(eq, sparams, sinput...; reduce = reduce) for eq in eqs]

_pbs_executable(ps_functions, params, input...) = apply_element_wise(ps_functions, params, input...)
__pbs_executable(input, params) = _pbs_executable(ps_semi, params, input)
Expand Down Expand Up @@ -72,8 +72,8 @@ funcs_evaluated = funcs(input, ps)

Internally this is using [`function_valued_parameters`](@ref) and [`apply_element_wise`](@ref).
"""
function build_nn_function(eqs::Union{NamedTuple, NeuralNetworkParameters}, sparams::NeuralNetworkParameters, sinput::Symbolics.Arr...)
ps = function_valued_parameters(eqs, sparams, sinput...)
function build_nn_function(eqs::Union{NamedTuple, NeuralNetworkParameters}, sparams::NeuralNetworkParameters, sinput::Symbolics.Arr...; reduce = hcat)
ps = function_valued_parameters(eqs, sparams, sinput...; reduce = reduce)
_pbs_executable(ps::Union{NamedTuple, NeuralNetworkParameters}, params::NeuralNetworkParameters, input::AbstractArray...) = apply_element_wise(ps, params, input...)
__pbs_executable(input::AbstractArray, params::NeuralNetworkParameters) = _pbs_executable(ps, params, input)
# return this one if sinput & soutput are supplied
Expand Down Expand Up @@ -110,13 +110,13 @@ b = c(input, ps).^2
(true, true)
```
"""
function function_valued_parameters(eqs::NeuralNetworkParameters, sparams::NeuralNetworkParameters, sinput::Symbolics.Arr...)
vals = Tuple(build_nn_function(eqs[key], sparams, sinput...) for key in keys(eqs))
function function_valued_parameters(eqs::NeuralNetworkParameters, sparams::NeuralNetworkParameters, sinput::Symbolics.Arr...; reduce = hcat)
vals = Tuple(build_nn_function(eqs[key], sparams, sinput...; reduce = reduce) for key in keys(eqs))
NeuralNetworkParameters{keys(eqs)}(vals)
end

function function_valued_parameters(eqs::NamedTuple, sparams::NeuralNetworkParameters, sinput::Symbolics.Arr...)
vals = Tuple(build_nn_function(eqs[key], sparams, sinput...) for key in keys(eqs))
function function_valued_parameters(eqs::NamedTuple, sparams::NeuralNetworkParameters, sinput::Symbolics.Arr...; reduce = hcat)
vals = Tuple(build_nn_function(eqs[key], sparams, sinput...; reduce = reduce) for key in keys(eqs))
NamedTuple{keys(eqs)}(vals)
end

Expand Down
3 changes: 3 additions & 0 deletions src/utils/create_array.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
function Symbolics.SymbolicUtils.Code.create_array(::Type{<:Base.ReshapedArray{T, N, P}}, S, nd::Val, d::Val, elems...) where {T, N, P}
Symbolics.SymbolicUtils.Code.create_array(P, S, nd, d, elems...)
end
40 changes: 40 additions & 0 deletions test/reshape_test.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
using SymbolicNeuralNetworks
using AbstractNeuralNetworks
using Symbolics
using Test

function set_up_network()
c = Chain(Dense(2, 3))
nn = SymbolicNeuralNetwork(c)
soutput = nn.model(nn.input, nn.params)
nn_cpu = NeuralNetwork(c)
nn, soutput, nn_cpu
end

function test_for_input()
nn, soutput, nn_cpu = set_up_network()
input = rand(2, 5)
input2 = reshape((@view input[:, 1:2]), 2, 1, 2)
built_function = build_nn_function(soutput, nn.params, nn.input)
outputs = built_function(input2, nn_cpu.params)
for i in 1:2
@test nn.model(input[:, i], nn_cpu.params) ≈ outputs[:, 1, i]
end
end

function test_for_input_and_output()
nn, soutput2, nn_cpu = set_up_network()
input = rand(2, 5)
output = rand(3, 5)
input2 = reshape((@view input[:, 1:2]), 2, 1, 2)
output2 = reshape((@view output[:, 1:2]), 3, 1, 2)
@variables soutput[1:3]
built_function = build_nn_function((soutput - soutput2).^2, nn.params, nn.input, soutput)
outputs = built_function(input2, output2, nn_cpu.params)
for i in 1:2
@test (nn.model(input[:, i], nn_cpu.params) - output[:, i]).^2 ≈ outputs[:, 1, i]
end
end

test_for_input()
test_for_input_and_output()
9 changes: 5 additions & 4 deletions test/runtests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,8 @@ using SafeTestsets
using Test

@safetestset "Docstring tests. " begin include("doctest.jl") end
@safetestset "Symbolic gradient " begin include("symbolic_gradient.jl") end
@safetestset "Symbolic Neural network " begin include("neural_network_derivative.jl") end
@safetestset "Symbolic Params " begin include("test_params.jl") end
# @safetestset "HNN Loss " begin include("test_hnn_loss_pullback.jl") end
@safetestset "Symbolic gradient " begin include("symbolic_gradient.jl") end
@safetestset "Symbolic Neural network " begin include("neural_network_derivative.jl") end
@safetestset "Symbolic Params " begin include("test_params.jl") end
# @safetestset "HNN Loss " begin include("test_hnn_loss_pullback.jl") end
@safetestset "Check if reshape works in the correct way with the generated functions. " begin include("reshape_test.jl") end
Loading