From e8bf08e58c5f4c2fd61ab4cbd8d4386ac325f35e Mon Sep 17 00:00:00 2001
From: Michael Abbott <32575566+mcabbott@users.noreply.github.com>
Date: Mon, 9 Oct 2023 17:34:44 -0400
Subject: [PATCH 1/3] first attempt to define public

---
 src/Flux.jl | 54 +++++++++++++++++++++++++++++++++++++++++++++++++++++
 1 file changed, 54 insertions(+)

diff --git a/src/Flux.jl b/src/Flux.jl
index d522b91e78..03254d0196 100644
--- a/src/Flux.jl
+++ b/src/Flux.jl
@@ -30,6 +30,17 @@ export Chain, Dense, Embedding, Maxout, SkipConnection, Parallel, PairwiseFusion
        fmap, cpu, gpu, f32, f64, f16, rand32, randn32, zeros32, ones32,
        testmode!, trainmode!
 
+isdefined(Base, :ispublic) && eval(Expr(:public,
+  # modules
+  :Losses,
+
+  # layers -- unexported only!
+  :Bilinear, :Scale, :dropout,
+
+  # utils
+  :outputsize, :state,
+))
+
 include("optimise/Optimise.jl")
 using .Optimise
 export Descent, Adam, Momentum, Nesterov, RMSProp,
@@ -47,6 +58,17 @@ using Adapt, Functors, OneHotArrays
 include("utils.jl")
 include("functor.jl")
 
+isdefined(Base, :ispublic) && eval(Expr(:public,
+  # from Optimise/Train/Optimisers
+  :setup, :update!, :destructure, :freeze!, :adjust!, :params, :trainable,
+
+  # from OneHotArrays
+  :onehot, :onehotbatch, :onecold,
+
+  # from Functors
+  :functor, # Symbol("@functor"),
+))
+
 # Pirate error to catch a common mistake.
 Functors.functor(::Type{<:MLUtils.DataLoader}, x) = error("`DataLoader` does not support Functors.jl, thus functions like `Flux.gpu` will not act on its contents.")
 
@@ -69,5 +91,37 @@ include("deprecations.jl")
 include("losses/Losses.jl")
 using .Losses
 
+isdefined(Base, :ispublic) && eval(Expr(:public,
+  # init
+  :glorot_uniform,
+  :glorot_normal,
+  :kaiming_uniform,
+  :kaiming_normal,
+  :truncated_normal,
+  :orthogonal,
+  :sparse_init,
+  :identity_init,
+
+  # Losses
+  :binary_focal_loss,
+  :binarycrossentropy,
+  :crossentropy,
+  :dice_coeff_loss,
+  :focal_loss,
+  :hinge_loss,
+  :huber_loss,
+  :kldivergence,
+  :label_smoothing,
+  :logitbinarycrossentropy,
+  :logitcrossentropy,
+  :mae,
+  :mse,
+  :msle,
+  :poisson_loss,
+  :siamese_contrastive_loss,
+  :squared_hinge_loss,
+  :tversky_loss,
+))
+
 
 end # module

From 2b314a13c157e26124ed42c5fb312b02049a61ff Mon Sep 17 00:00:00 2001
From: Michael Abbott <32575566+mcabbott@users.noreply.github.com>
Date: Mon, 9 Oct 2023 18:21:14 -0400
Subject: [PATCH 2/3] using Compat.jl instead

---
 Project.toml |  2 ++
 src/Flux.jl  | 80 +++++++++++++++++++++++-----------------------------
 2 files changed, 38 insertions(+), 44 deletions(-)

diff --git a/Project.toml b/Project.toml
index 85c0d678f4..4ce3460a50 100644
--- a/Project.toml
+++ b/Project.toml
@@ -5,6 +5,7 @@ version = "0.14.6"
 [deps]
 Adapt = "79e6a3ab-5dfb-504d-930d-738a2a938a0e"
 ChainRulesCore = "d360d2e6-b24c-11e9-a2a3-2a2ae2dbcce4"
+Compat = "34da2185-b29b-5c13-b0c7-acf172513d20"
 Functors = "d9f16b24-f501-4c13-a1f2-28368ffc5196"
 LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
 MLUtils = "f1d291b0-491e-4a28-83b9-f70985020b54"
@@ -38,6 +39,7 @@ AMDGPU = "0.5, 0.6"
 Adapt = "3.0"
 CUDA = "4, 5"
 ChainRulesCore = "1.12"
+Compat = "4.10.0"
 Functors = "0.4"
 MLUtils = "0.4"
 MacroTools = "0.5"
diff --git a/src/Flux.jl b/src/Flux.jl
index 03254d0196..beb3e12913 100644
--- a/src/Flux.jl
+++ b/src/Flux.jl
@@ -1,6 +1,7 @@
 module Flux
 
 using Base: tail
+using Compat: @compat # for @compat public
 using Preferences
 using LinearAlgebra, Statistics, Random  # standard lib
 using MacroTools, Reexport, ProgressLogging, SpecialFunctions
@@ -30,15 +31,13 @@ export Chain, Dense, Embedding, Maxout, SkipConnection, Parallel, PairwiseFusion
        fmap, cpu, gpu, f32, f64, f16, rand32, randn32, zeros32, ones32,
        testmode!, trainmode!
 
-isdefined(Base, :ispublic) && eval(Expr(:public,
+@compat(public, ( # mark unexported symbols as API, on Julia 1.11
   # modules
-  :Losses,
-
-  # layers -- unexported only!
-  :Bilinear, :Scale, :dropout,
-
+  Losses,
+  # layers
+  Bilinear, Scale, dropout,
   # utils
-  :outputsize, :state,
+  outputsize, state,
 ))
 
 include("optimise/Optimise.jl")
@@ -58,16 +57,9 @@ using Adapt, Functors, OneHotArrays
 include("utils.jl")
 include("functor.jl")
 
-isdefined(Base, :ispublic) && eval(Expr(:public,
-  # from Optimise/Train/Optimisers
-  :setup, :update!, :destructure, :freeze!, :adjust!, :params, :trainable,
-
-  # from OneHotArrays
-  :onehot, :onehotbatch, :onecold,
-
-  # from Functors
-  :functor, # Symbol("@functor"),
-))
+@compat public onehot, onehotbatch, onecold, # from OneHotArrays  
+  functor, @functor, # from Functors
+  setup, update!, destructure, freeze!, adjust!, params, trainable # from Optimise/Train/Optimisers  
 
 # Pirate error to catch a common mistake.
 Functors.functor(::Type{<:MLUtils.DataLoader}, x) = error("`DataLoader` does not support Functors.jl, thus functions like `Flux.gpu` will not act on its contents.")
@@ -91,36 +83,36 @@ include("deprecations.jl")
 include("losses/Losses.jl")
 using .Losses
 
-isdefined(Base, :ispublic) && eval(Expr(:public,
+@compat(public, (
   # init
-  :glorot_uniform,
-  :glorot_normal,
-  :kaiming_uniform,
-  :kaiming_normal,
-  :truncated_normal,
-  :orthogonal,
-  :sparse_init,
-  :identity_init,
+  glorot_uniform,
+  glorot_normal,
+  kaiming_uniform,
+  kaiming_normal,
+  truncated_normal,
+  orthogonal,
+  sparse_init,
+  identity_init,
 
   # Losses
-  :binary_focal_loss,
-  :binarycrossentropy,
-  :crossentropy,
-  :dice_coeff_loss,
-  :focal_loss,
-  :hinge_loss,
-  :huber_loss,
-  :kldivergence,
-  :label_smoothing,
-  :logitbinarycrossentropy,
-  :logitcrossentropy,
-  :mae,
-  :mse,
-  :msle,
-  :poisson_loss,
-  :siamese_contrastive_loss,
-  :squared_hinge_loss,
-  :tversky_loss,
+  binary_focal_loss,
+  binarycrossentropy,
+  crossentropy,
+  dice_coeff_loss,
+  focal_loss,
+  hinge_loss,
+  huber_loss,
+  kldivergence,
+  label_smoothing,
+  logitbinarycrossentropy,
+  logitcrossentropy,
+  mae,
+  mse,
+  msle,
+  poisson_loss,
+  siamese_contrastive_loss,
+  squared_hinge_loss,
+  tversky_loss,
 ))
 
 

From 7e761ef625c023ab22d2d174ea1ef83883e87292 Mon Sep 17 00:00:00 2001
From: Michael Abbott <32575566+mcabbott@users.noreply.github.com>
Date: Tue, 28 Nov 2023 11:34:24 -0500
Subject: [PATCH 3/3] change all to bracket style

---
 src/Flux.jl | 11 ++++++++---
 1 file changed, 8 insertions(+), 3 deletions(-)

diff --git a/src/Flux.jl b/src/Flux.jl
index beb3e12913..d158771ef8 100644
--- a/src/Flux.jl
+++ b/src/Flux.jl
@@ -57,9 +57,14 @@ using Adapt, Functors, OneHotArrays
 include("utils.jl")
 include("functor.jl")
 
-@compat public onehot, onehotbatch, onecold, # from OneHotArrays  
-  functor, @functor, # from Functors
-  setup, update!, destructure, freeze!, adjust!, params, trainable # from Optimise/Train/Optimisers  
+@compat(public, (
+  # from OneHotArrays.jl
+  onehot, onehotbatch, onecold,  
+  # from Functors.jl
+  functor, @functor,
+  # from Optimise/Train/Optimisers.jl
+  setup, update!, destructure, freeze!, adjust!, params, trainable
+))
 
 # Pirate error to catch a common mistake.
 Functors.functor(::Type{<:MLUtils.DataLoader}, x) = error("`DataLoader` does not support Functors.jl, thus functions like `Flux.gpu` will not act on its contents.")