11module Flux
22
33using Base: tail
4+ using Compat: @compat # for @compat public
45using Preferences
56using LinearAlgebra, Statistics, Random # standard lib
67using MacroTools, Reexport, ProgressLogging, SpecialFunctions
@@ -30,6 +31,15 @@ export Chain, Dense, Embedding, Maxout, SkipConnection, Parallel, PairwiseFusion
3031 fmap, cpu, gpu, f32, f64, f16, rand32, randn32, zeros32, ones32,
3132 testmode!, trainmode!
3233
34+ @compat (public, ( # mark unexported symbols as API, on Julia 1.11
35+ # modules
36+ Losses,
37+ # layers
38+ Bilinear, Scale, dropout,
39+ # utils
40+ outputsize, state,
41+ ))
42+
3343include (" optimise/Optimise.jl" )
3444using . Optimise
3545export Descent, Adam, Momentum, Nesterov, RMSProp,
@@ -47,6 +57,15 @@ using Adapt, Functors, OneHotArrays
4757include (" utils.jl" )
4858include (" functor.jl" )
4959
60+ @compat (public, (
61+ # from OneHotArrays.jl
62+ onehot, onehotbatch, onecold,
63+ # from Functors.jl
64+ functor, @functor ,
65+ # from Optimise/Train/Optimisers.jl
66+ setup, update!, destructure, freeze!, adjust!, params, trainable
67+ ))
68+
5069# Pirate error to catch a common mistake.
5170Functors. functor (:: Type{<:MLUtils.DataLoader} , x) = error (" `DataLoader` does not support Functors.jl, thus functions like `Flux.gpu` will not act on its contents." )
5271
@@ -69,5 +88,37 @@ include("deprecations.jl")
6988include (" losses/Losses.jl" )
7089using . Losses
7190
91+ @compat (public, (
92+ # init
93+ glorot_uniform,
94+ glorot_normal,
95+ kaiming_uniform,
96+ kaiming_normal,
97+ truncated_normal,
98+ orthogonal,
99+ sparse_init,
100+ identity_init,
101+
102+ # Losses
103+ binary_focal_loss,
104+ binarycrossentropy,
105+ crossentropy,
106+ dice_coeff_loss,
107+ focal_loss,
108+ hinge_loss,
109+ huber_loss,
110+ kldivergence,
111+ label_smoothing,
112+ logitbinarycrossentropy,
113+ logitcrossentropy,
114+ mae,
115+ mse,
116+ msle,
117+ poisson_loss,
118+ siamese_contrastive_loss,
119+ squared_hinge_loss,
120+ tversky_loss,
121+ ))
122+
72123
73124end # module
0 commit comments