File tree Expand file tree Collapse file tree 3 files changed +7
-7
lines changed Expand file tree Collapse file tree 3 files changed +7
-7
lines changed Original file line number Diff line number Diff line change @@ -13,12 +13,12 @@ using Random
13
13
14
14
import Functors
15
15
16
- # Utilities
17
- include (" utilities.jl" )
16
+ # Model utilities
18
17
include (" core.jl" )
19
18
20
19
# Custom Layers
21
20
include (" layers/Layers.jl" )
21
+ include (" layers/utilities.jl" ) # layer utilities
22
22
using . Layers
23
23
24
24
# CNN models
Original file line number Diff line number Diff line change @@ -12,7 +12,7 @@ using Random
12
12
13
13
import Flux. testmode!
14
14
15
- include (" ../ utilities.jl" )
15
+ include (" utilities.jl" )
16
16
17
17
include (" attention.jl" )
18
18
export MultiHeadSelfAttention
Original file line number Diff line number Diff line change 14
14
15
15
Convenience function for applying an activation function to the output after
16
16
summing up the input arrays. Useful as the `connection` argument for the block
17
- function in [ `Metalhead.resnet`](@ref) .
17
+ function in `Metalhead.resnet`.
18
18
"""
19
19
addact (activation = relu, xs... ) = activation (sum (xs))
20
20
21
21
"""
22
22
actadd(activation = relu, xs...)
23
23
24
- Convenience function for adding input arrays after applying an activation
25
- function to them. Useful as the `connection` argument for the block function in
26
- [ `Metalhead.resnet`](@ref) .
24
+ Convenience function for summing up the input arrays after applying an
25
+ activation function to them. Useful as the `connection` argument for the block
26
+ function in `Metalhead.resnet`.
27
27
"""
28
28
actadd (activation = relu, xs... ) = sum (activation .(x) for x in xs)
29
29
You can’t perform that action at this time.
0 commit comments