diff --git a/src/NNlib.jl b/src/NNlib.jl index 60d707da2..853273ec6 100644 --- a/src/NNlib.jl +++ b/src/NNlib.jl @@ -2,13 +2,14 @@ module NNlib using Requires -export σ, sigmoid, relu, leakyrelu, elu, swish, selu, softplus, softsign, +export σ, sigmoid, relu, leakyrelu, elu, swish, selu, softplus, softsign, logσ, logsigmoid, softmax, logsoftmax, conv2d, maxpool2d, avgpool2d const libnnlib = Libdl.find_library("nnlib.$(Libdl.dlext)", [joinpath(@__DIR__, "..", "deps")]) include("numeric.jl") include("activation.jl") +include("logsigmoid.jl") include("softmax.jl") include("logsoftmax.jl") include("linalg.jl") diff --git a/src/logsigmoid.jl b/src/logsigmoid.jl new file mode 100644 index 000000000..3a6e4a550 --- /dev/null +++ b/src/logsigmoid.jl @@ -0,0 +1,24 @@ + +""" + logσ(x) + +Return `log(σ(x))` which is computed in a numerically stable way. + + julia> logσ(0.) + -0.6931471805599453 + julia> logσ.([-100, -10, 100.]) + 3-element Array{Float64,1}: + -100.0 + -10.0 + -0.0 +""" +function logσ(x) + max_v = max(zero(x), -x) + z = exp(-max_v) + exp(-x-max_v) + -(max_v + log(z)) +end + +∇logσ(Δ, x) = Δ * (1 - σ(x)) + +const logsigmoid = logσ +const ∇logsigmoid = ∇logσ diff --git a/test/runtests.jl b/test/runtests.jl index b35148cfd..dc60174e1 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -9,10 +9,16 @@ include("conv.jl") xs = rand(5) @test softmax(xs) ≈ exp.(xs) ./ sum(exp.(xs)) @test logsoftmax(xs) ≈ log.(softmax(xs)) +@test logsigmoid.(xs) ≈ log.(sigmoid.(xs)) xs = rand(5,10) @test softmax(xs) ≈ exp.(xs) ./ sum(exp.(xs),1) @test logsoftmax(xs) ≈ log.(softmax(xs)) +@test logsigmoid.(xs) ≈ log.(sigmoid.(xs)) + +for T in [:Float32, :Float64] + @eval @test logsigmoid.($T[-100_000, 100_000.]) ≈ $T[-100_000, 0.] +end ## compare the outputs with the PyTorch nn.LogSoftmax returns xs = Float32[1, 2, 3000.]