Skip to content

Commit ed5a3b8

Browse files
authored
Use new public feature (#2342)
1 parent 92f8982 commit ed5a3b8

File tree

2 files changed

+53
-0
lines changed

2 files changed

+53
-0
lines changed

Project.toml

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@ version = "0.14.6"
55
[deps]
66
Adapt = "79e6a3ab-5dfb-504d-930d-738a2a938a0e"
77
ChainRulesCore = "d360d2e6-b24c-11e9-a2a3-2a2ae2dbcce4"
8+
Compat = "34da2185-b29b-5c13-b0c7-acf172513d20"
89
Functors = "d9f16b24-f501-4c13-a1f2-28368ffc5196"
910
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
1011
MLUtils = "f1d291b0-491e-4a28-83b9-f70985020b54"
@@ -38,6 +39,7 @@ AMDGPU = "0.6, 0.7"
3839
Adapt = "3.0"
3940
CUDA = "4, 5"
4041
ChainRulesCore = "1.12"
42+
Compat = "4.10.0"
4143
Functors = "0.4"
4244
MLUtils = "0.4"
4345
MacroTools = "0.5"

src/Flux.jl

Lines changed: 51 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
module Flux
22

33
using Base: tail
4+
using Compat: @compat # for @compat public
45
using Preferences
56
using LinearAlgebra, Statistics, Random # standard lib
67
using MacroTools, Reexport, ProgressLogging, SpecialFunctions
@@ -30,6 +31,15 @@ export Chain, Dense, Embedding, Maxout, SkipConnection, Parallel, PairwiseFusion
3031
fmap, cpu, gpu, f32, f64, f16, rand32, randn32, zeros32, ones32,
3132
testmode!, trainmode!
3233

34+
@compat(public, ( # mark unexported symbols as API, on Julia 1.11
35+
# modules
36+
Losses,
37+
# layers
38+
Bilinear, Scale, dropout,
39+
# utils
40+
outputsize, state,
41+
))
42+
3343
include("optimise/Optimise.jl")
3444
using .Optimise
3545
export Descent, Adam, Momentum, Nesterov, RMSProp,
@@ -47,6 +57,15 @@ using Adapt, Functors, OneHotArrays
4757
include("utils.jl")
4858
include("functor.jl")
4959

60+
@compat(public, (
61+
# from OneHotArrays.jl
62+
onehot, onehotbatch, onecold,
63+
# from Functors.jl
64+
functor, @functor,
65+
# from Optimise/Train/Optimisers.jl
66+
setup, update!, destructure, freeze!, adjust!, params, trainable
67+
))
68+
5069
# Pirate error to catch a common mistake.
5170
Functors.functor(::Type{<:MLUtils.DataLoader}, x) = error("`DataLoader` does not support Functors.jl, thus functions like `Flux.gpu` will not act on its contents.")
5271

@@ -69,5 +88,37 @@ include("deprecations.jl")
6988
include("losses/Losses.jl")
7089
using .Losses
7190

91+
@compat(public, (
92+
# init
93+
glorot_uniform,
94+
glorot_normal,
95+
kaiming_uniform,
96+
kaiming_normal,
97+
truncated_normal,
98+
orthogonal,
99+
sparse_init,
100+
identity_init,
101+
102+
# Losses
103+
binary_focal_loss,
104+
binarycrossentropy,
105+
crossentropy,
106+
dice_coeff_loss,
107+
focal_loss,
108+
hinge_loss,
109+
huber_loss,
110+
kldivergence,
111+
label_smoothing,
112+
logitbinarycrossentropy,
113+
logitcrossentropy,
114+
mae,
115+
mse,
116+
msle,
117+
poisson_loss,
118+
siamese_contrastive_loss,
119+
squared_hinge_loss,
120+
tversky_loss,
121+
))
122+
72123

73124
end # module

0 commit comments

Comments
 (0)