Skip to content

Commit eed0bdd

Browse files
Fix tests
1 parent 70648df commit eed0bdd

File tree

1 file changed

+38
-23
lines changed

1 file changed

+38
-23
lines changed

test/runtests.jl

+38-23
Original file line numberDiff line numberDiff line change
@@ -6,9 +6,9 @@ using Test
66
A = rand(5,5,5,5)
77
@test vvmapreduce(abs2, +, A, dims=(1,3)) mapreduce(abs2, +, A, dims=(1,3))
88
@test vvmapreduce(cos, *, A, dims=(2,4)) mapreduce(cos, *, A, dims=(2,4))
9-
@test vvprod(log, A1, dims=1) prod(log, A1, dims=1)
10-
@test vvminimum(sin, A1, dims=(3,4)) minimum(sin, A1, dims=(3,4))
11-
@test vvmaximum(sin, A1, dims=(3,4)) maximum(sin, A1, dims=(3,4))
9+
@test vvprod(log, A, dims=1) prod(log, A, dims=1)
10+
@test vvminimum(sin, A, dims=(3,4)) minimum(sin, A, dims=(3,4))
11+
@test vvmaximum(sin, A, dims=(3,4)) maximum(sin, A, dims=(3,4))
1212
end
1313
@testset "vvmapreduce_vararg" begin
1414
A1 = rand(5,5,5,5)
@@ -28,35 +28,47 @@ using Test
2828
@test r vvmapreduce(*, +, A1, A2, A3, dims=:, init=0)
2929
@test r vvmapreduce(*, +, A1, A2, A3, dims=:, init=zero)
3030
@test r vvmapreduce(*, +, as)
31-
# And for really strange stuff (e.g. posterior predictive transformations)
32-
@benchmark vvmapreduce((x,y,z) -> ifelse(x*y+z 1, 1, 0), +, A1, A2, A3)
33-
@benchmark vvmapreduce((x,y,z) -> ifelse(x*y+z 1, 1, 0), +, A1, A2, A3, dims=(2,3,4))
34-
# using ifelse for just a boolean is quite slow, but the above is just for demonstration
35-
@benchmark vvmapreduce(, +, A1, A2)
36-
@benchmark vvmapreduce((x,y,z) -> (x*y+z, 1), +, A1, A2, A3)
37-
@benchmark vvmapreduce((x,y,z) -> (x*y+z, 1), +, A1, A2, A3, dims=(2,3,4))
38-
@benchmark mapreduce((x,y,z) -> (x*y+z, 1), +, A1, A2, A3)
39-
# What I mean by posterior predictive transformation? Well, one might encounter
40-
# this in Bayesian model checking, which provides a convenient example.
41-
# If one wishes to compute the Pr = ∫∫𝕀(T(yʳᵉᵖ, θ) ≥ T(y, θ))p(yʳᵉᵖ|θ)p(θ|y)dyʳᵉᵖdθ
42-
# Let's imagine that A1 represents T(yʳᵉᵖ, θ) and A2 represents T(y, θ)
43-
# i.e. the test variable samples computed as a functional of the Markov chain (samples of θ)
31+
# # And for really strange stuff (e.g. posterior predictive transformations)
32+
# @benchmark vvmapreduce((x,y,z) -> ifelse(x*y+z ≥ 1, 1, 0), +, A1, A2, A3)
33+
# @benchmark vvmapreduce((x,y,z) -> ifelse(x*y+z ≥ 1, 1, 0), +, A1, A2, A3, dims=(2,3,4))
34+
# # using ifelse for just a boolean is quite slow, but the above is just for demonstration
35+
# @benchmark vvmapreduce(≥, +, A1, A2)
36+
# @benchmark vvmapreduce((x,y,z) -> ≥(x*y+z, 1), +, A1, A2, A3)
37+
# @benchmark vvmapreduce((x,y,z) -> ≥(x*y+z, 1), +, A1, A2, A3, dims=(2,3,4))
38+
# @benchmark mapreduce((x,y,z) -> ≥(x*y+z, 1), +, A1, A2, A3)
39+
# # What I mean by posterior predictive transformation? Well, one might encounter
40+
# # this in Bayesian model checking, which provides a convenient example.
41+
# # If one wishes to compute the Pr = ∫∫𝕀(T(yʳᵉᵖ, θ) ≥ T(y, θ))p(yʳᵉᵖ|θ)p(θ|y)dyʳᵉᵖdθ
42+
# # Let's imagine that A1 represents T(yʳᵉᵖ, θ) and A2 represents T(y, θ)
43+
# # i.e. the test variable samples computed as a functional of the Markov chain (samples of θ)
4444
# Then, Pr is computed as
4545
vvmapreduce(, +, A1, A2) / length(A1)
4646
# Or, if only the probability is of interest, and we do not wish to use the functionals
4747
# for any other purpose, we could compute it as:
48-
vvmapreduce((x, y) -> (f(x), f(y)), +, A1, A2)
48+
# vvmapreduce((x, y) -> ≥(f(x), f(y)), +, A1, A2)
4949
# where `f` is the functional of interest, e.g.
50-
@benchmark vvmapreduce((x, y) -> (abs2(x), abs2(y)), +, A1, A2)
51-
@benchmark vvmapreduce((x, y) -> (abs2(x), abs2(y)), +, A1, A2, dims=(2,3,4))
50+
f(x, y) = (abs2(x), abs2(y))
51+
# r = mapreduce((x, y) -> ≥(abs2(x), abs2(y)), +, A1, A2)
52+
r = mapreduce(f, +, A1, A2)
53+
# @test r ≈ vvmapreduce((x, y) -> ≥(abs2(x), abs2(y)), +, A1, A2)
54+
@test r vvmapreduce(f, +, A1, A2)
55+
# R = mapreduce((x, y) -> ≥(abs2(x), abs2(y)), +, A1, A2, dims=(2,3,4))
56+
# @test R ≈ vvmapreduce((x, y) -> ≥(abs2(x), abs2(y)), +, A1, A2, dims=(2,3,4))
57+
R = mapreduce(f, +, A1, A2, dims=(2,3,4))
58+
@test R vvmapreduce(f, +, A1, A2, dims=(2,3,4))
5259
# One can also express commonly encountered reductions with ease;
5360
# these will be fused once a post-reduction operator can be specified
5461
# MSE
55-
B = vvmapreduce((x, y) -> abs2(x - y), +, A1, A2, dims=(2,4)) ./ (size(A1, 2) * size(A1, 4) )
56-
@test B mapreduce((x, y) -> abs2(x - y), +, A1, A2, dims=(2,4)) ./ (size(A1, 2) * size(A1, 4) )
62+
sqdiff(x, y) = abs2(x -y)
63+
# B = vvmapreduce((x, y) -> abs2(x - y), +, A1, A2, dims=(2,4)) ./ (size(A1, 2) * size(A1, 4) )
64+
# @test B ≈ mapreduce((x, y) -> abs2(x - y), +, A1, A2, dims=(2,4)) ./ (size(A1, 2) * size(A1, 4) )
65+
B = vvmapreduce(sqdiff, +, A1, A2, dims=(2,4)) ./ (size(A1, 2) * size(A1, 4))
66+
@test B mapreduce(sqdiff, +, A1, A2, dims=(2,4)) ./ (size(A1, 2) * size(A1, 4))
5767
# Euclidean distance
58-
B = ().(vvmapreduce((x, y) -> abs2(x - y), +, A1, A2, dims=(2,4)))
59-
@test B ().(mapreduce((x, y) -> abs2(x - y), +, A1, A2, dims=(2,4)))
68+
# B = (√).(vvmapreduce((x, y) -> abs2(x - y), +, A1, A2, dims=(2,4)))
69+
# @test B ≈ (√).(mapreduce((x, y) -> abs2(x - y), +, A1, A2, dims=(2,4)))
70+
B = ().(vvmapreduce(sqdiff, +, A1, A2, dims=(2,4)))
71+
@test B ().(mapreduce(sqdiff, +, A1, A2, dims=(2,4)))
6072
end
6173
@testset "vfindminmax" begin
6274
# Simple
@@ -87,6 +99,9 @@ using Test
8799
@test ind1 == ind2 && val1 val2
88100
end
89101
@testset "vfindminmax_vararg" begin
102+
A1 = rand(5,5)
103+
A2 = rand(5,5)
104+
A3 = rand(5,5)
90105
A′ = @. A1 * A2 + A3;
91106
@test findmin(A′) == vfindmin((x, y, z) -> x * y + z, A1, A2, A3)
92107
val1, ind1 = findmin(A′, dims=2)

0 commit comments

Comments
 (0)