Skip to content

Commit

Permalink
Merge pull request #25 from GiggleLiu/ulog
Browse files Browse the repository at this point in the history
logarithmic number system
  • Loading branch information
GiggleLiu authored Jul 30, 2020
2 parents 51a9722 + 8406495 commit 2b5b80f
Show file tree
Hide file tree
Showing 19 changed files with 373 additions and 6 deletions.
7 changes: 5 additions & 2 deletions Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ version = "0.7.1"
[deps]
FixedPointNumbers = "53c48c17-4a7d-5ca2-90c5-79b7896eea93"
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
LogarithmicNumbers = "aa2f6b4e-9042-5d33-9679-40d3a6b85899"
MatchCore = "5dd3f0b1-72a9-48ad-ae6e-79f673da005f"
NiLangCore = "575d3204-02a4-11ea-3f62-238caa8bf11e"
Reexport = "189a3867-3050-52da-a836-e630ba90ab69"
Expand All @@ -14,17 +15,19 @@ TupleTools = "9d95972d-f1c8-5527-a6e0-b4b365fa01f6"

[compat]
FixedPointNumbers = "0.6"
LogarithmicNumbers = "0.4"
MatchCore = "0.1"
NiLangCore = "0.7"
Reexport = "0.2"
TupleTools = "1.2"
julia = "1.3,1.4"

[extras]
ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210"
Distributions = "31c24e10-a181-5473-b8eb-7969acd0382f"
Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
Statistics = "10745b16-79ce-11e8-11f9-7d13ad32a3b2"
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
Distributions = "31c24e10-a181-5473-b8eb-7969acd0382f"

[targets]
test = ["Test", "Random", "Statistics", "Distributions"]
test = ["Test", "Random", "Statistics", "Distributions", "ForwardDiff"]
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ NiLang features:
* any program written in NiLang is differentiable,
* a reversible language with abstraction and arrays,
* complex values
* reversible logarithmic number system

[![Stable](https://img.shields.io/badge/docs-stable-blue.svg)](https://GiggleLiu.github.io/NiLang.jl/stable)
[![Dev](https://img.shields.io/badge/docs-dev-blue.svg)](https://GiggleLiu.github.io/NiLang.jl/dev)
Expand Down
3 changes: 2 additions & 1 deletion docs/make.jl
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ using SparseArrays
using Literate
tutorialpath = joinpath(@__DIR__, "src/examples")
sourcepath = joinpath(@__DIR__, "../examples")
for jlfile in ["besselj.jl", "sparse.jl", "sharedwrite.jl", "qr.jl", "port_zygote.jl", "fib.jl", "unitary.jl", "nice.jl", "realnvp.jl", "boxmuller.jl"]
for jlfile in ["besselj.jl", "sparse.jl", "sharedwrite.jl", "qr.jl", "port_zygote.jl", "fib.jl", "unitary.jl", "nice.jl", "realnvp.jl", "boxmuller.jl", "lognumber.jl"]
Literate.markdown(joinpath(sourcepath, jlfile), tutorialpath)
end

Expand All @@ -22,6 +22,7 @@ makedocs(;
"examples/fib.md",
"examples/besselj.md",
"examples/sparse.md",
"examples/lognumber.md",
"examples/unitary.md",
"examples/qr.md",
"examples/nice.md",
Expand Down
1 change: 1 addition & 0 deletions docs/src/index.md
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ Pages = [
"examples/fib.md",
"examples/besselj.md",
"examples/sparse.md",
"examples/lognumber.md",
"examples/unitary.md",
"examples/qr.md",
"examples/nice.md",
Expand Down
36 changes: 36 additions & 0 deletions examples/fixedlog.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
"""
## Reference
[1] C. S. Turner, "A Fast Binary Logarithm Algorithm", IEEE Signal
Processing Mag., pp. 124,140, Sep. 2010.
"""
function log2fix(x::Fixed{T, P}) where {T, P}
PREC = UInt(P)
x.i == 0 && return typemin(T) # represents negative infinity

y = zero(T)
xi = unsigned(x.i)
while xi < UInt(1) << PREC
xi <<= UInt(1)
y -= 1 << PREC
end

while xi >= UInt(2) << PREC
xi >>= UInt(1)
y += 1 << PREC
end

z = Int128(xi)
b = 1 << (PREC - UInt(1))
for i = 1:P
z = (z * z) >> PREC
if z >= 2 << PREC
z >>= UInt(1)
y += b
end
b >>= UInt(1)
end

return Fixed{T,PREC}(y, nothing)
end

@test log2fix(Fixed43(2^1.24)) 1.24
68 changes: 68 additions & 0 deletions examples/lognumber.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
# # Logarithmic number system

# Computing basic functions like `power`, `exp` and `besselj` is not trivial for reversible programming.
# There is no efficient constant memory algorithm using pure fixed point numbers only.
# For example, to compute `x ^ n` reversiblly with fixed point numbers,
# we need to allocate a vector of size $O(n)$.
# With logarithmic numbers, the above computation is straight forward.

using LogarithmicNumbers
using NiLang, NiLang.AD
using FixedPointNumbers

@i function i_power(y::T, x::T, n::Int) where T
@routine begin
lx one(ULogarithmic{T})
ly one(ULogarithmic{T})
## convert `x` to a logarithmic number
## Here, `*=` is reversible for log numbers
lx *= convert(x)
for i=1:n
ly *= lx
end
end

## convert back to fixed point numbers
y += convert(ly)

~@routine
end

# To check the function
i_power(Fixed43(0.0), Fixed43(0.4), 3)

# ## `exp` function as an example
# The following example computes `exp(x)`.

@i function i_exp(y!::T, x::T) where T<:Union{Fixed, GVar{<:Fixed}}
@invcheckoff begin
@routine begin
s one(ULogarithmic{T})
lx one(ULogarithmic{T})
k 0
end
lx *= convert(x)
y! += convert(s)
while (s.log > -20, k != 0)
k += 1
s *= lx / k
y! += convert(s)
end
~(while (s.log > -20, k != 0)
k += 1
s *= x / k
end)
lx /= convert(x)
~@routine
end
end

x = Fixed43(3.5)

# We can check the reversibility
out, _ = i_exp(Fixed43(0.0), x)
@assert out exp(3.5)

# Computing the gradients
_, gx = NiLang.AD.gradient(Val(1), i_exp, (Fixed43(0.0), x))
@assert gx exp(3.5)
1 change: 1 addition & 0 deletions src/NiLang.jl
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ const Fixed43 = Q20f43
include("utils.jl")
include("vars.jl")
include("instructs.jl")
include("ulog.jl")
include("stack.jl")
include("complex.jl")
include("autobcast.jl")
Expand Down
2 changes: 2 additions & 0 deletions src/autodiff/autodiff.jl
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ using MatchCore, TupleTools

import ..NiLang: ROT, IROT, SWAP,
chfield, value, NoGrad, loaddata, INC, DEC
using NiLangCore: default_constructor

export GVar, grad, Loss, NoGrad, @nograd

Expand All @@ -14,6 +15,7 @@ include("gradfunc.jl")
include("checks.jl")

include("instructs.jl")
include("ulog.jl")
include("jacobian.jl")
include("hessian_backback.jl")
include("complex.jl")
Expand Down
5 changes: 5 additions & 0 deletions src/autodiff/instructs.jl
Original file line number Diff line number Diff line change
Expand Up @@ -318,3 +318,8 @@ end
out!.x -= mf.f(x .|> value; kwargs...)
x.g += (@skip! out!.g) * primitive_grad(mf.f, x.x; kwargs...)
end

@i function :(-=)(convert)(out!::GVar{Tx, Tg}, y::GVar) where {Tx, Tg}
out!.x -= convert(y.x)
y.g += convert(out!.g)
end
29 changes: 29 additions & 0 deletions src/autodiff/ulog.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
@i function (:-=)(gaussian_log)(y!::GVar{T}, x::GVar{T}) where T
y!.x -= gaussian_log(x.x)
@routine @invcheckoff begin
exp_x zero(x)
jac zero(x)
exp_x += exp(-x)
exp_x += 1
jac += 1/exp_x
end
x.g += y!.g * jac
~@routine
end

@i function (:-=)(gaussian_nlog)(y!::GVar{T}, x::GVar{T}) where T
y!.x -= gaussian_nlog(x.x)
@routine @invcheckoff begin
exp_x zero(x)
jac zero(x)
exp_x += exp(-x)
exp_x -= 1
jac -= 1/exp_x
end
x.g += y!.g * jac
~@routine
end

@i function :(-=)(convert)(out!::GVar{Tx, Tg}, y::ULogarithmic) where {Tx, Tg}
out! -= exp(y.log)
end
28 changes: 28 additions & 0 deletions src/autodiff/vars.jl
Original file line number Diff line number Diff line change
Expand Up @@ -145,3 +145,31 @@ end
function loaddata(::Type{AGT}, x::AT) where {T, GT, AT<:AbstractArray{T}, AGT<:AbstractArray{GVar{T,T}}}
map(x->GVar(x, zero(x)), x)
end

#=
Base.convert(::Type{GVar{Tx, Tg}}, x::GVar) where {Tx, Tg} = GVar(convert(Tx, x.x), convert(Tg, x.g))
function Base.convert(::Type{GVar{Tx, Tg}}, x::ULogarithmic{<:GVar}) where {Tx, Tg}
expx = exp(x.log.x)
@show expx, Tx, Tg, x.log.g/expx
@show GVar(convert(Tx, expx), convert(Tg, x.log.g/expx))
GVar(convert(Tx, expx), convert(Tg, x.log.g/expx))
end
=#

# ULogarithmic
_content(x::ULogarithmic) = x.log
for T in [:ULogarithmic]
@eval NiLang.AD.GVar(x::$T) = default_constructor($T, GVar(_content(x), zero(_content(x))))
@eval (_::Type{Inv{$T}})(x::$T) = _content(x)
#@eval NiLang.AD.grad(x::$T{<:GVar}) = default_constructor($T, grad(_content(x)))
@eval (_::Type{Inv{GVar}})(x::$T{<:GVar}) = default_constructor($T, (~GVar)(_content(x)))

@eval Base.one(x::$T{GVar{T,GT}}) where {T, GT} = one($T{GVar{T,GT}})
@eval Base.one(::Type{$T{GVar{T,GT}}}) where {T,GT} = default_constructor($T, GVar(zero(T), zero(GT)))
@eval Base.zero(x::$T{GVar{T,GT}}) where {T,GT} =zero($T{GVar{T,GT}})
@eval Base.zero(::Type{$T{GVar{T,T}}}) where T = default_constructor($T, GVar(zero(T), zero(T)))
end

function NiLang.loaddata(::Type{Array{<:ULogarithmic{GVar{T,T}}}}, data::Array{<:ULogarithmic{T}}) where {T}
GVar.(data)
end
12 changes: 11 additions & 1 deletion src/instructs.jl
Original file line number Diff line number Diff line change
Expand Up @@ -120,9 +120,19 @@ function (f::MinusEq{typeof(/)})(out!::T, x::Integer, y::Integer) where T<:Fixed
end

for F in [:exp, :log, :sin, :cos]
@eval Base.$F(x::Fixed43) = Fixed43($F(Float64(Fixed43(x))))
@eval Base.$F(x::Fixed43) = Fixed43($F(Float64(x)))
@eval (f::PlusEq{typeof($F)})(out!::Fixed43, x::Real) = out! + Fixed43($F(x)), x
@eval (f::MinusEq{typeof($F)})(out!::Fixed43, x::Real) = out! - Fixed43($F(x)), x
end

Base.:^(x::Integer, y::Fixed43) = Fixed43(x^(Float64(y)))
Base.:^(x::Fixed43, y::Fixed43) = Fixed43(x^(Float64(y)))
Base.:^(x::T, y::Fixed43) where T<:AbstractFloat = x^(T(y))

function (::PlusEq{typeof(convert)})(out!::T, y) where T<:Real
out! + convert(T, y), y
end

function (::MinusEq{typeof(convert)})(out!::T, y) where T<:Real
out! - convert(T, y), y
end
4 changes: 2 additions & 2 deletions src/stdlib/statistics.jl
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,9 @@ get the `mean` and `sum` of `x`.
end

"""
var_and_mean_sq(var!, varsum!, mean!, sqv)
i_var_mean_sum(var!, varsum!, mean!, sum!, sqv)
The `variance`, `variance * (n-1)`, `mean` and `sum` of `sqv`, where `n` is the size of `sqv`.
Compute the variance, the accumulated variance, mean and sum.
"""
@i function i_var_mean_sum(var!, varsum!, mean!, sum!, v::AbstractVector{T}) where T
i_mean_sum(mean!, sum!, v)
Expand Down
76 changes: 76 additions & 0 deletions src/ulog.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,76 @@
using LogarithmicNumbers
export gaussian_log, gaussian_nlog
export ULogarithmic

function NiLangCore.default_constructor(ln::Type{<:ULogarithmic}, x)
exp(ULogarithmic, x)
end

@i @inline function (:*=(identity))(x::T, y::T) where T<:ULogarithmic
x.log += y.log
end

for (OP1, OP2, OP3) in [(:*, :+, :(+=)), (:/, :-, :(-=))]
@eval @i @inline function (:*=($OP1))(out!::T, x::T, y::T) where T<:ULogarithmic
out!.log += $OP2(x.log, y.log)
end

@eval @i @inline function (:*=($OP1))(out!::T, x::Real, y::Real) where T<:ULogarithmic
out!.log += log(x)
$(Expr(OP3, :(out!.log), :(log(y))))
end

@eval @i @inline function (:*=($OP1))(out!::T, x::T, y::Real) where T<:ULogarithmic
out!.log += x.log
$(Expr(OP3, :(out!.log), :(log(y))))
end

@eval @i @inline function (:*=($OP1))(out!::T, x::Real, y::T) where T<:ULogarithmic
out!.log += log(x)
$(Expr(OP3, :(out!.log), :(y.log)))
end
end

gaussian_log(x) = log1p(exp(x))
gaussian_nlog(x) = log1p(-exp(x))

@i function (:*=)(+)(out!::ULogarithmic{T}, x::ULogarithmic{T}, y::ULogarithmic{T}) where {T}
@invcheckoff if (x.log == y.log, ~)
out!.log += x.log
out!.log += log(2)
elseif (x.log y.log, ~)
out!.log += x.log
y.log -= x.log
out!.log += gaussian_log(y.log)
y.log += x.log
else
out!.log += y.log
x.log -= y.log
out!.log += gaussian_log(x.log)
x.log += y.log
end
end

@i function (:*=)(-)(out!::ULogarithmic{T}, x::ULogarithmic{T}, y::ULogarithmic{T}) where {T}
@safe @assert x.log y.log
@invcheckoff if (!iszero(x), ~)
out!.log += x.log
y.log -= x.log
out!.log += gaussian_nlog(y.log)
y.log += x.log
end
end

@i function :(*=)(convert)(out!::ULogarithmic{T}, y::ULogarithmic) where T
out!.log += convert((@skip! T), y.log)
end

@i function :(*=)(convert)(out!::ULogarithmic{T}, y::T) where T<:Real
out!.log += log(y)
end

Base.convert(::Type{T}, x::ULogarithmic{T}) where {T<:Fixed} = exp(x.log)

function NiLangCore.deanc(x::T, v::T) where T<:ULogarithmic
x === v || NiLangCore.deanc(x.log, v.log)
end
1 change: 1 addition & 0 deletions test/autodiff/autodiff.jl
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ include("vars.jl")
include("gradfunc.jl")

include("instructs.jl")
include("ulog.jl")
include("complex.jl")
include("manual.jl")
include("jacobian.jl")
Expand Down
1 change: 1 addition & 0 deletions test/autodiff/instructs.jl
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ using Test
@test check_grad(opm(sin), (1.0, 2.0); verbose=true, iloss=1)
@test check_grad(opm(tanh), (1.0, 2.0); verbose=true, iloss=1)
@test check_grad(opm(cos), (1.0, 2.0); verbose=true, iloss=1)
@test check_grad(opm(convert), (Fixed43(0.5), 2.0); verbose=true, iloss=1)
@test check_grad(opm(/), (1.0, 2.0, 2.0); verbose=true, iloss=1)
@test_broken check_grad(opm(÷), (1.0, 2.0, 2.0); verbose=true, iloss=1)
end
Expand Down
Loading

0 comments on commit 2b5b80f

Please sign in to comment.