Skip to content

Commit e54440b

Browse files
more tests
1 parent 61c583c commit e54440b

File tree

2 files changed

+9
-37
lines changed

2 files changed

+9
-37
lines changed

Project.toml

-2
Original file line numberDiff line numberDiff line change
@@ -7,8 +7,6 @@ AbstractTrees = "1520ce14-60c1-5f80-bbc7-55ef81b5835c"
77
Adapt = "79e6a3ab-5dfb-504d-930d-738a2a938a0e"
88
ArrayInterface = "4fba245c-0d91-5ea0-9b3e-6abc04ee57a9"
99
CUDA = "052768ef-5323-5732-b1bb-66c8b64840ba"
10-
ChainRules = "082447d4-558c-5d27-93f4-14fc19e9eca2"
11-
ChainRulesCore = "d360d2e6-b24c-11e9-a2a3-2a2ae2dbcce4"
1210
CodecZlib = "944b1d66-785c-5afd-91f1-9de20f533193"
1311
Colors = "5ae59095-9a9b-59fe-a467-6f913c188581"
1412
DelimitedFiles = "8bb1440f-4735-579b-a4ab-409b98df4dab"

test/cuda/layers.jl

+9-35
Original file line numberDiff line numberDiff line change
@@ -114,6 +114,15 @@ pixelshuffle = [PixelShuffle]
114114
gpu_gradtest("PixelShuffle 2d", pixelshuffle, rand(Float32, 3, 4, 18, 3), 3)
115115
gpu_gradtest("PixelShuffle 1d", pixelshuffle, rand(Float32, 3, 18, 3), 3)
116116

117+
embedding = [Embedding]
118+
gpu_gradtest("Embedding", embedding, [1,3,5], 5, 2)
119+
gpu_gradtest("Embedding repeated indices", embedding, [1,3,5,3], 5, 2)
120+
gpu_gradtest("Embedding integer index", embedding, 1, 5, 2)
121+
gpu_gradtest("Embedding 2d index", embedding, [1 2; 3 4], 5, 2)
122+
gpu_gradtest("Embedding OneHotVec index", embedding, OneHotVector(1, 5), 5, 2)
123+
gpu_gradtest("Embedding OneHotMatrix index", embedding, OneHotMatrix([1,2,3], 5), 5, 2)
124+
gpu_gradtest("Embedding OneHotMatrix repeated indices", OneHotMatrix([1,2,2], 5), 5, 2)
125+
117126
@testset "function layers" begin
118127
x = rand(Float32, 3,3)
119128
gpu_autodiff_test(x -> sum(Flux.normalise(x; dims=1)), x)
@@ -259,38 +268,3 @@ end
259268
end
260269
end
261270
end
262-
263-
@testset "Embedding" begin
264-
vocab_size, embed_size = 5, 2
265-
m = Flux.Embedding(vocab_size, embed_size)
266-
267-
x = [1, 3, 5]
268-
y = m(x)
269-
m_g = m |> gpu
270-
x_g = x |> gpu
271-
y_g = m_g(x_g)
272-
@test collect(y_g) == y
273-
274-
gs = gradient(() -> sum(m(x)), params(m))
275-
gs_g = gradient(() -> sum(m_g(x_g)), params(m_g))
276-
@test collect(gs_g[m_g.weight]) gs[m.weight]
277-
278-
gs = gradient(() -> sum(tanh.(m(x))), params(m))
279-
gs_g = gradient(() -> sum(tanh.(m_g(x_g))), params(m_g))
280-
@test collect(gs_g[m_g.weight]) gs[m.weight]
281-
282-
@testset "repeated indices" begin
283-
vocab_size, embed_size = 5, 2
284-
m = Flux.Embedding(vocab_size, embed_size)
285-
286-
x = [1, 3, 5, 3] # repeated indexes
287-
y = m(x)
288-
m_g = m |> gpu
289-
x_g = x |> gpu
290-
y_g = m_g(x_g)
291-
@test Array(y_g) == y
292-
gs = gradient(() -> sum(m(x)), params(m))
293-
gs_g = gradient(() -> sum(m_g(x_g)), params(m_g))
294-
@test Array(gs_g[m_g.weight]) gs[m.weight]
295-
end
296-
end

0 commit comments

Comments
 (0)