Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[GNNLux] Adding NNConv Layer #478

Closed
wants to merge 47 commits into from
Closed
Changes from 9 commits
Commits
Show all changes
47 commits
Select commit Hold shift + click to select a range
786b200
WIP
rbSparky Aug 2, 2024
01767ee
WIP
rbSparky Aug 2, 2024
b8c4db6
Update conv.jl
rbSparky Aug 3, 2024
b6c1a27
fix
rbSparky Aug 4, 2024
fb0bb1d
Update conv.jl
rbSparky Aug 4, 2024
32ee61d
fix
rbSparky Aug 4, 2024
2585cb6
Merge branch 'nn-lux' of https://github.com/rbSparky/GraphNeuralNetwo…
rbSparky Aug 4, 2024
cd28e97
Update conv.jl
rbSparky Aug 4, 2024
7f1a07a
Update conv.jl
rbSparky Aug 4, 2024
70674a2
added tests
rbSparky Aug 19, 2024
17627f1
Merge branch 'nn-lux' of https://github.com/rbSparky/GraphNeuralNetwo…
rbSparky Aug 19, 2024
8f081cd
Delete GNNLux/test/layers/temp.jl
rbSparky Aug 19, 2024
af0b78b
Merge branch 'CarloLucibello:master' into nn-lux
rbSparky Aug 19, 2024
890fcda
add to lux
rbSparky Aug 19, 2024
fc2db99
fix test
rbSparky Aug 19, 2024
90fc120
fixing
rbSparky Aug 19, 2024
0dae0bc
Delete data.txt
rbSparky Aug 19, 2024
01ec78b
Delete redundant file
rbSparky Aug 19, 2024
ff012bb
trying test fix
rbSparky Aug 19, 2024
cf7d30a
trying test fix
rbSparky Aug 19, 2024
1c60d1c
Update conv_tests.jl
rbSparky Aug 19, 2024
39b9c74
Update basic_tests.jl
rbSparky Aug 19, 2024
894bdb3
Update conv_tests.jl
rbSparky Aug 19, 2024
6e610c1
Update conv_tests.jl
rbSparky Aug 19, 2024
caf355c
Update conv_tests.jl: edata issues
rbSparky Aug 19, 2024
3e32261
Update conv_tests.jl
rbSparky Aug 19, 2024
24da4c4
Update conv_tests.jl: edata
rbSparky Aug 19, 2024
f2ff073
Update conv_tests.jl
rbSparky Aug 19, 2024
93affd2
change lux testing
rbSparky Aug 19, 2024
3547d9f
Merge branch 'nn-lux' of https://github.com/rbSparky/GraphNeuralNetwo…
rbSparky Aug 19, 2024
f0481b4
Update conv_tests.jl: Trying to fix tests
rbSparky Aug 22, 2024
b3e2649
Update conv.jl: trying to fix
rbSparky Aug 23, 2024
23b89c2
Update conv.jl: reverted
rbSparky Aug 23, 2024
d136de0
Merge branch 'master' into nn-lux
rbSparky Aug 25, 2024
4b32e2f
fixing
rbSparky Aug 25, 2024
6227cd3
Update shared_testsetup.jl: dont make other tests fail
rbSparky Aug 25, 2024
b1d185f
Update shared_testsetup.jl: fixing other tests
rbSparky Aug 25, 2024
ef68f79
gitignore
rbSparky Aug 25, 2024
4f0d60f
ignore
rbSparky Aug 25, 2024
e7661f2
remove useless params
rbSparky Aug 25, 2024
67bc8fd
Update GNNLux.jl: ordering
rbSparky Aug 30, 2024
b94b1f6
Update Project.toml: fixed
rbSparky Sep 3, 2024
91fed90
Update conv_tests.jl: checking test
rbSparky Sep 4, 2024
a587553
Update conv_tests.jl
rbSparky Sep 4, 2024
232a1b4
Update conv_tests.jl
rbSparky Sep 4, 2024
e2de74c
checking tests
rbSparky Sep 7, 2024
faa4df3
Update conv_tests.jl: typo in test
rbSparky Sep 12, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
50 changes: 50 additions & 0 deletions GNNLux/src/layers/conv.jl
Original file line number Diff line number Diff line change
Expand Up @@ -628,3 +628,53 @@ function Base.show(io::IO, l::GINConv)
print(io, ", $(l.ϵ)")
print(io, ")")
end

@concrete struct NNConv <: GNNContainerLayer{(:nn,)}
nn <: AbstractExplicitLayer
aggr
in_dims::Int
out_dims::Int
use_bias::Bool
add_self_loops::Bool
use_edge_weight::Bool
init_weight
init_bias
σ
end


function NNConv(ch::Pair{Int, Int}, nn, σ = identity;
aggr = +,
init_bias = zeros32,
use_bias::Bool = true,
init_weight = glorot_uniform,
add_self_loops::Bool = true,
use_edge_weight::Bool = false,
allow_fast_activation::Bool = true)
in_dims, out_dims = ch
σ = allow_fast_activation ? NNlib.fast_act(σ) : σ
return NNConv(nn, aggr, in_dims, out_dims, use_bias, add_self_loops, use_edge_weight, init_weight, init_bias, σ)
end

function (l::NNConv)(g, x, edge_weight, ps, st)
nn = StatefulLuxLayer{true}(l.nn, ps, st)

# what would be the order of args here?
m = (; nn, l.aggr, ps.weight, bias = _getbias(ps),
l.add_self_loops, l.use_edge_weight, l.σ)
rbSparky marked this conversation as resolved.
Show resolved Hide resolved
y = GNNlib.nn_conv(m, g, x, edge_weight)
stnew = _getstate(nn)
return y, stnew
end

LuxCore.outputsize(d::NNConv) = (d.out_dims,)

function Base.show(io::IO, l::NNConv)
print(io, "NNConv($(l.nn)")
print(io, ", $(l.ϵ)")
l.σ == identity || print(io, ", ", l.σ)
l.use_bias || print(io, ", use_bias=false")
l.add_self_loops || print(io, ", add_self_loops=false")
!l.use_edge_weight || print(io, ", use_edge_weight=true")
print(io, ")")
end