Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Code Formatting -- SciMLStyle #31

Merged
merged 6 commits into from
May 25, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions .JuliaFormatter.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
style = "sciml"
whitespace_in_kwargs = false
43 changes: 43 additions & 0 deletions .github/workflows/FormatCheck.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
name: format-check

on:
push:
branches:
- 'main'
- 'release-'
tags: '*'
pull_request:

jobs:
build:
runs-on: ${{ matrix.os }}
strategy:
matrix:
julia-version: [1.7]
julia-arch: [x86]
os: [ubuntu-latest]
steps:
- uses: julia-actions/setup-julia@latest
with:
version: ${{ matrix.julia-version }}

- uses: actions/checkout@v1
- name: Install JuliaFormatter and format
# This will use the latest version by default but you can set the version like so:
#
# julia -e 'using Pkg; Pkg.add(PackageSpec(name="JuliaFormatter", version="0.13.0"))'
run: |
julia -e 'using Pkg; Pkg.add(PackageSpec(name="JuliaFormatter"))'
julia -e 'using JuliaFormatter; format(".", verbose=true)'
- name: Format check
run: |
julia -e '
out = Cmd(`git diff --name-only`) |> read |> String
if out == ""
exit(0)
else
@error "Some files have not been formatted !!!"
write(stdout, out)
exit(1)
end'

29 changes: 29 additions & 0 deletions .github/workflows/FormatPR.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
name: format-pr
on:
schedule:
- cron: '0 0 * * *'
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Install JuliaFormatter and format
run: |
julia -e 'using Pkg; Pkg.add(PackageSpec(name="JuliaFormatter"))'
julia -e 'using JuliaFormatter; format(".")'
# https://github.com/marketplace/actions/create-pull-request
# https://github.com/peter-evans/create-pull-request#reference-example
- name: Create Pull Request
id: cpr
uses: peter-evans/create-pull-request@v3
with:
token: ${{ secrets.GITHUB_TOKEN }}
commit-message: Format .jl files
title: 'Automatic JuliaFormatter.jl run'
branch: auto-juliaformatter-pr
delete-branch: true
labels: formatting, automated pr, no changelog
- name: Check outputs
run: |
echo "Pull Request Number - ${{ steps.cpr.outputs.pull-request-number }}"
echo "Pull Request URL - ${{ steps.cpr.outputs.pull-request-url }}"
84 changes: 47 additions & 37 deletions docs/make.jl
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,8 @@ if haskey(ENV, "GITHUB_ACTIONS")
end

deployconfig = Documenter.auto_detect_deploy_system()
Documenter.post_status(deployconfig; type="pending", repo="github.com/avik-pal/Lux.jl.git")
Documenter.post_status(deployconfig; type="pending",
repo="github.com/avik-pal/Lux.jl.git")

# Tutorials
get_example_path(p) = joinpath(@__DIR__, "..", "examples", p)
Expand All @@ -27,49 +28,58 @@ ADVANCED_TUTORIALS = []
ADVANCED_TUTORIAL_NAMES = []
MAPPING = Dict("beginner" => [], "intermediate" => [], "advanced" => [])

for (d, names, paths) in
(("beginner", BEGINNER_TUTORIAL_NAMES, BEGINNER_TUTORIALS), ("intermediate", INTERMEDIATE_TUTORIAL_NAMES, INTERMEDIATE_TUTORIALS), ("advanced", ADVANCED_TUTORIAL_NAMES, ADVANCED_TUTORIALS))
for (d, names, paths) in (("beginner", BEGINNER_TUTORIAL_NAMES, BEGINNER_TUTORIALS),
("intermediate", INTERMEDIATE_TUTORIAL_NAMES,
INTERMEDIATE_TUTORIALS),
("advanced", ADVANCED_TUTORIAL_NAMES, ADVANCED_TUTORIALS))
for (n, p) in zip(names, paths)
Literate.markdown(get_example_path(p), joinpath(OUTPUT, d, dirname(p)); documenter=true)
push!(MAPPING[d], n => joinpath("examples/generated", d, dirname(p), splitext(basename(p))[1] * ".md"))
Literate.markdown(get_example_path(p), joinpath(OUTPUT, d, dirname(p));
documenter=true)
push!(MAPPING[d],
n => joinpath("examples/generated", d, dirname(p),
splitext(basename(p))[1] * ".md"))
end
end

display(MAPPING)

makedocs(;
sitename="Lux",
authors="Avik Pal et al.",
clean=true,
doctest=false,
modules=[Lux],
format=Documenter.HTML(;
prettyurls=get(ENV, "CI", nothing) == "true",
assets=["assets/custom.css"],
analytics = "G-Q8GYTEVTZ2"
),
pages=[
"Lux: Explicitly Parameterized Neural Networks" => "index.md",
"Introduction" => ["All about Lux" => "introduction/overview.md", "Ecosystem" => "introduction/ecosystem.md"],
"Examples" => [
"Beginner" => MAPPING["beginner"],
"Intermediate" => MAPPING["intermediate"],
"Advanced" => MAPPING["advanced"],
"Additional Examples" => "examples.md",
],
"API" => [
"Layers" => "api/layers.md",
"Functional" => "api/functional.md",
"Core" => "api/core.md",
"Utilities" => "api/utilities.md",
],
"Design Docs" => [
"Documentation" => "design/documentation.md",
"Recurrent Neural Networks" => "design/recurrent.md",
]
],
)
sitename="Lux",
authors="Avik Pal et al.",
clean=true,
doctest=false,
modules=[Lux],
format=Documenter.HTML(;
prettyurls=get(ENV, "CI", nothing) == "true",
assets=["assets/custom.css"]
# analytics = "G-Q8GYTEVTZ2"
),
pages=[
"Lux: Explicitly Parameterized Neural Networks" => "index.md",
"Introduction" => [
"All about Lux" => "introduction/overview.md",
"Ecosystem" => "introduction/ecosystem.md",
],
"Examples" => [
"Beginner" => MAPPING["beginner"],
"Intermediate" => MAPPING["intermediate"],
"Advanced" => MAPPING["advanced"],
"Additional Examples" => "examples.md",
],
"API" => [
"Layers" => "api/layers.md",
"Functional" => "api/functional.md",
"Core" => "api/core.md",
"Utilities" => "api/utilities.md",
],
"Design Docs" => [
"Documentation" => "design/documentation.md",
"Recurrent Neural Networks" => "design/recurrent.md",
"Add new functionality to Lux" => "design/core.md",
],
])

deploydocs(; repo="github.com/avik-pal/Lux.jl.git", push_preview=true, devbranch="main")
deploydocs(; repo="github.com/avik-pal/Lux.jl.git", push_preview=true,
devbranch="main")

Pkg.activate(@__DIR__)
19 changes: 19 additions & 0 deletions docs/src/design/core.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
# Adding New Functionality/Layers

For Style we try to follow [SciMLStyle](https://github.com/SciML/SciMLStyle). The only reason we don't have a badge yet, is we haven't yet updated the package to followed all the guidelines. Here, I am documenting some additional guidelines we enforce:

## Mutability

See https://github.com/SciML/SciMLStyle#out-of-place-and-immutability-is-preferred-when-sufficient-performant for reference. This is strictly enforced, i.e. all layers/functions provided as part of the external API must be pure functions, even if they come with a performance penalty.

## Branching -- Generated Functions

Zygote doesn't like branches in code. Like it or not, we are stuck with it for the near future. Even if julia is able to optimize branches away, Zygote will most certainly throw away those optimizations (these can be tested via `Zygote.@code_ir`).

### Writing efficient non-branching code to make Zygote happy

* Rely on `@generated` functions to remove **most** runtime branching. Certain examples:
* Layers behaving differently during training and inference -- we know at compile-time whether a layer is being run in training/inference mode via `istraining(st)`.
* Composite Layers relying on a variable number of internal layers -- Again we know the length of the number of internal layers at compile time. Hence we can manually unroll the loops. See [`Parallel`](@ref), [`Chain`](@ref), etc.
* Pass around `Val` in state. `Flux.jl` sets `training` to be `(:auto, true, false)`. Hence, which branch will be evaluated, will have to be determined at runtime time (*bad*). Instead if we pass `Val(true)`, we will be able to specialize functions directly based on `true`, `false`, etc. ensuring there is no runtime cost for these operations. See [`BatchNorm`](@ref), [`Dropout`](@ref), etc.

17 changes: 10 additions & 7 deletions examples/Basics/main.jl
Original file line number Diff line number Diff line change
Expand Up @@ -68,11 +68,11 @@ x .+ 1

# We can see Julia tile the column vector `1:5` across all rows of the larger array.

zeros(5,5) .+ (1:5)
zeros(5, 5) .+ (1:5)

# The x' syntax is used to transpose a column `1:5` into an equivalent row, and Julia will tile that across columns.

zeros(5,5) .+ (1:5)'
zeros(5, 5) .+ (1:5)'

# We can use this to make a times table.

Expand Down Expand Up @@ -121,13 +121,13 @@ Random.seed!(rng, 0)

# First, let us run a random number generator 3 times with the `replicate`d rng

for i = 1:3
for i in 1:3
println("Iteration $i ", rand(Lux.replicate(rng), 10))
end

# As expected we get the same output. We can remove the `replicate` call and we will get different outputs

for i = 1:3
for i in 1:3
println("Iteration $i ", rand(rng, 10))
end

Expand Down Expand Up @@ -155,8 +155,10 @@ v = randn(rng, Float32, 4)
# Let's use AbstractDifferentiation and Zygote to compute the gradients

println("Actual Gradient: ", ∇f(v))
println("Computed Gradient via Reverse Mode AD (Zygote): ", AD.gradient(AD.ZygoteBackend(), f, v)[1])
println("Computed Gradient via Forward Mode AD (ForwardDiff): ", AD.gradient(AD.ForwardDiffBackend(), f, v)[1])
println("Computed Gradient via Reverse Mode AD (Zygote): ",
AD.gradient(AD.ZygoteBackend(), f, v)[1])
println("Computed Gradient via Forward Mode AD (ForwardDiff): ",
AD.gradient(AD.ForwardDiffBackend(), f, v)[1])

# Note that `AD.gradient` will only work for scalar valued outputs

Expand Down Expand Up @@ -248,6 +250,7 @@ for i in 1:100
## Perform parameter update
opt_state, ps = Optimisers.update(opt_state, ps, gs)
if i % 10 == 1 || i == 100
println("Loss Value after $i iterations: ", mse(model, ps, st, x_samples, y_samples))
println("Loss Value after $i iterations: ",
mse(model, ps, st, x_samples, y_samples))
end
end
7 changes: 3 additions & 4 deletions examples/BayesianNN/main.jl
Original file line number Diff line number Diff line change
Expand Up @@ -32,14 +32,14 @@ x2s = rand(rng, Float32, M) * 4.5f0;
xt1s = Array([[x1s[i] + 0.5f0; x2s[i] + 0.5f0] for i in 1:M])
x1s = rand(rng, Float32, M) * 4.5f0;
x2s = rand(rng, Float32, M) * 4.5f0;
append!(xt1s, Array([[x1s[i] - 5f0; x2s[i] - 5f0] for i in 1:M]))
append!(xt1s, Array([[x1s[i] - 5.0f0; x2s[i] - 5.0f0] for i in 1:M]))

x1s = rand(rng, Float32, M) * 4.5f0;
x2s = rand(rng, Float32, M) * 4.5f0;
xt0s = Array([[x1s[i] + 0.5f0; x2s[i] - 5f0] for i in 1:M])
xt0s = Array([[x1s[i] + 0.5f0; x2s[i] - 5.0f0] for i in 1:M])
x1s = rand(rng, Float32, M) * 4.5f0;
x2s = rand(rng, Float32, M) * 4.5f0;
append!(xt0s, Array([[x1s[i] - 5f0; x2s[i] + 0.5f0] for i in 1:M]))
append!(xt0s, Array([[x1s[i] - 5.0f0; x2s[i] + 0.5f0] for i in 1:M]))

## Store all the data for later.
xs = [xt1s; xt0s]
Expand Down Expand Up @@ -140,7 +140,6 @@ x2_range = collect(range(-6; stop=6, length=25))
Z = [nn_forward([x1, x2], theta[i, :])[1] for x1 in x1_range, x2 in x2_range]
contour!(x1_range, x2_range, Z)


# The contour plot above shows that the MAP method is not too bad at classifying our data. Now we can visualize our predictions.

# $p(\tilde{x} | X, \alpha) = \int_{\theta} p(\tilde{x} | \theta) p(\theta | X, \alpha) \approx \sum_{\theta \sim p(\theta | X, \alpha)}f_{\theta}(\tilde{x})$
Expand Down
Loading