forked from JuliaFirstOrder/ProximalOperators.jl
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathtest_sum.jl
49 lines (32 loc) · 1006 Bytes
/
test_sum.jl
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
using Random
Random.seed!(123)
# smooth case
f1 = SqrNormL2()
f2 = Translate(SqrNormL2(2.5), randn(10))
f = Sum(f1, f2)
predicates_test(f)
@test ProximalOperators.is_quadratic(f) == true
@test ProximalOperators.is_strongly_convex(f) == true
@test ProximalOperators.is_set(f) == false
xtest = randn(10)
result = f1(xtest) + f2(xtest)
@test f(xtest) ≈ result
grad1, val1 = gradient_test(f1, xtest)
grad2, val2 = gradient_test(f2, xtest)
gradsum, valsum = gradient_test(f, xtest)
@test gradsum ≈ grad1 + grad2
# nonsmooth case
g1 = NormL2()
g2 = Translate(SqrNormL2(2.5), randn(10))
g = Sum(g1, g2)
predicates_test(g)
@test ProximalOperators.is_smooth(g) == false
@test ProximalOperators.is_strongly_convex(g) == true
@test ProximalOperators.is_set(g) == false
xtest = randn(10)
result = g1(xtest) + g2(xtest)
@test g(xtest) ≈ result
grad1, val1 = gradient_test(g1, xtest)
grad2, val2 = gradient_test(g2, xtest)
gradsum, valsum = gradient_test(g, xtest)
@test gradsum ≈ grad1 + grad2