|
1 |
| -using LIBSVM, Test |
2 | 1 | using DelimitedFiles
|
| 2 | +using LIBSVM |
3 | 3 | using SparseArrays
|
| 4 | +using Test |
4 | 5 |
|
5 |
| -iris = readdlm(joinpath(dirname(@__FILE__), "iris.csv"), ',') |
6 |
| -labels = iris[:, 5] |
7 |
| -instances = convert(Matrix{Float64}, iris[:, 1:4]') |
8 |
| -model = svmtrain(instances[:, 1:2:end], labels[1:2:end]; verbose=true) |
9 |
| -GC.gc() |
10 |
| -(class, decvalues) = svmpredict(model, instances[:, 2:2:end]) |
11 |
| -correct = Bool[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1] |
12 |
| -@test (class .== labels[2:2:end]) == correct |
| 6 | +@testset "LibSVM" begin |
13 | 7 |
|
14 |
| -skmodel = fit!(SVC(), instances[:,1:2:end]', labels[1:2:end]) |
15 |
| -skclass = predict(skmodel, instances[:, 2:2:end]') |
16 |
| -@test skclass == class |
17 | 8 |
|
18 |
| -model = svmtrain(sparse(instances[:, 1:2:end]), labels[1:2:end]; verbose=true) |
19 |
| -GC.gc() |
20 |
| -(class, decvalues) = svmpredict(model, sparse(instances[:, 2:2:end])) |
21 |
| -correct = Bool[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1] |
22 |
| -@test (class .== labels[2:2:end]) == correct |
| 9 | +@testset "IRIS" begin |
| 10 | + iris = readdlm(joinpath(@__DIR__, "iris.csv"), ',') |
| 11 | + labels = iris[:, 5] |
23 | 12 |
|
24 |
| -#Regression tests, results confirmed using e1071 R-package |
25 |
| -#whiteside = RDatasets.dataset("MASS", "whiteside") |
26 |
| -whiteside, hdr = readdlm(joinpath(dirname(@__FILE__), "whiteside.csv"), ',', header=true) |
27 |
| -ws = convert(Matrix{Float64}, whiteside[:,2:3]) |
28 |
| -X = Array{Float64, 2}(ws[:, 2]') |
29 |
| -y = ws[:, 1] |
| 13 | + instances = convert(Matrix{Float64}, iris[:, 1:4]') |
| 14 | + model = svmtrain(instances[:, 1:2:end], labels[1:2:end]; verbose = true) |
| 15 | + GC.gc() |
| 16 | + (class, decvalues) = svmpredict(model, instances[:, 2:2:end]) |
| 17 | + correct = Bool[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1] |
| 18 | + @test (class .== labels[2:2:end]) == correct |
30 | 19 |
|
31 |
| -m = svmtrain(X, y, svmtype = EpsilonSVR, cost = 10., gamma = 1.) |
32 |
| -yeps, d = svmpredict(m, X) |
33 |
| -@test sum(yeps - y) ≈ 7.455509045783046 |
34 |
| -skm = fit!(EpsilonSVR(cost = 10., gamma = 1.), X', y) |
35 |
| -ysk = predict(skm, X') |
36 |
| -@test isapprox(yeps,ysk) |
| 20 | + skmodel = fit!(SVC(), instances[:,1:2:end]', labels[1:2:end]) |
| 21 | + skclass = predict(skmodel, instances[:, 2:2:end]') |
| 22 | + @test skclass == class |
| 23 | + model = svmtrain(sparse(instances[:, 1:2:end]), labels[1:2:end]; verbose=true) |
| 24 | + GC.gc() |
| 25 | + (class, decvalues) = svmpredict(model, sparse(instances[:, 2:2:end])) |
| 26 | + correct = Bool[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1] |
| 27 | + @test (class .== labels[2:2:end]) == correct |
| 28 | +end |
37 | 29 |
|
38 |
| -nu1 = svmtrain(X, y, svmtype = NuSVR, cost = 10., |
39 |
| - nu = .7, gamma = 2., tolerance = .001) |
40 |
| -ynu1, d = svmpredict(nu1, X) |
41 |
| -@test sum(ynu1 - y) ≈ 14.184665717092 |
42 |
| -sknu1 = fit!(NuSVR(cost = 10., nu=.7, gamma = 2.), X', y) |
43 |
| -ysknu1 = predict(sknu1, X') |
44 |
| -@test isapprox(ysknu1,ynu1) |
45 | 30 |
|
46 |
| -nu2 = svmtrain(X, y, svmtype = NuSVR, cost = 10., nu = .9) |
47 |
| -ynu2, d =svmpredict(nu2, X) |
48 |
| -@test sum(ynu2 - y) ≈ 6.686819661799177 |
49 |
| -sknu2 = fit!(NuSVR(cost = 10., nu=.9), X', y) |
50 |
| -ysknu2 = predict(sknu2, X') |
51 |
| -@test isapprox(ysknu2,ynu2) |
| 31 | +@testset "Whiteside" begin |
| 32 | + #Regression tests, results confirmed using e1071 R-package |
| 33 | + #whiteside = RDatasets.dataset("MASS", "whiteside") |
| 34 | + whiteside, hdr = readdlm(joinpath(@__DIR__, "whiteside.csv"), ',', header=true) |
| 35 | + ws = convert(Matrix{Float64}, whiteside[:,2:3]) |
| 36 | + X = Array{Float64, 2}(ws[:, 2]') |
| 37 | + y = ws[:, 1] |
52 | 38 |
|
53 |
| -# Multithreading testing |
| 39 | + m = svmtrain(X, y, svmtype = EpsilonSVR, cost = 10., gamma = 1.) |
| 40 | + yeps, d = svmpredict(m, X) |
| 41 | + @test sum(yeps - y) ≈ 7.455509045783046 |
| 42 | + skm = fit!(EpsilonSVR(cost = 10., gamma = 1.), X', y) |
| 43 | + ysk = predict(skm, X') |
| 44 | + @test isapprox(yeps,ysk) |
54 | 45 |
|
55 |
| -# Assign by maximum number of threads |
56 |
| -ntnu1 = svmtrain(X, y, svmtype = NuSVR, cost = 10., |
57 |
| - nu = .7, gamma = 2., tolerance = .001, |
58 |
| - nt = -1) |
59 |
| -ntynu1, ntd = svmpredict(ntnu1, X) |
60 |
| -@test sum(ntynu1 - y) ≈ 14.184665717092 |
| 46 | + nu1 = svmtrain(X, y, svmtype = NuSVR, cost = 10., |
| 47 | + nu = .7, gamma = 2., tolerance = .001) |
| 48 | + ynu1, d = svmpredict(nu1, X) |
| 49 | + @test sum(ynu1 - y) ≈ 14.184665717092 |
| 50 | + sknu1 = fit!(NuSVR(cost = 10., nu=.7, gamma = 2.), X', y) |
| 51 | + ysknu1 = predict(sknu1, X') |
| 52 | + @test isapprox(ysknu1,ynu1) |
61 | 53 |
|
62 |
| -# Assign by environment |
63 |
| -ENV["OMP_NUM_THREADS"] = 2 |
| 54 | + nu2 = svmtrain(X, y, svmtype = NuSVR, cost = 10., nu = .9) |
| 55 | + ynu2, d =svmpredict(nu2, X) |
| 56 | + @test sum(ynu2 - y) ≈ 6.686819661799177 |
| 57 | + sknu2 = fit!(NuSVR(cost = 10., nu=.9), X', y) |
| 58 | + ysknu2 = predict(sknu2, X') |
| 59 | + @test isapprox(ysknu2, ynu2) |
64 | 60 |
|
65 |
| -ntm = svmtrain(X, y, svmtype = EpsilonSVR, cost = 10., gamma = 1.) |
66 |
| -ntyeps, ntd = svmpredict(m, X) |
67 |
| -@test sum(ntyeps - y) ≈ 7.455509045783046 |
| 61 | + @testset "Multithreading" begin |
| 62 | + # Assign by maximum number of threads |
| 63 | + ntnu1 = svmtrain(X, y, svmtype = NuSVR, cost = 10., |
| 64 | + nu = .7, gamma = 2., tolerance = .001, |
| 65 | + nt = -1) |
| 66 | + ntynu1, ntd = svmpredict(ntnu1, X) |
| 67 | + @test sum(ntynu1 - y) ≈ 14.184665717092 |
| 68 | + |
| 69 | + # Assign by environment |
| 70 | + ENV["OMP_NUM_THREADS"] = 2 |
| 71 | + |
| 72 | + ntm = svmtrain(X, y, svmtype = EpsilonSVR, cost = 10., gamma = 1.) |
| 73 | + ntyeps, ntd = svmpredict(m, X) |
| 74 | + @test sum(ntyeps - y) ≈ 7.455509045783046 |
| 75 | + end |
| 76 | +end |
| 77 | + |
| 78 | + |
| 79 | +end # @testset "LIBSVM" |
0 commit comments