Skip to content

Commit

Permalink
Get ready for release of 0.1.0 (#145)
Browse files Browse the repository at this point in the history
* Correct `encode` method for `NamedTuple`

* Implement `blocklossfn(block)` fallback method

* minor cleanup of plot code

* implement `Block` interface for `WrapperBlock`

* remove `collectobs` stub

* make `ProjectiveTransforms` work with `WrapperBlock`s

* fix `wd` option in `fitonecycle!`

* remove outdated docstring

* update doc serve sript

* fix `plotprediction!` for segmentation

* amend

* add Colab instructions to "Setup" page

* Update `fitonecycle` notebook
  • Loading branch information
lorenzoh committed Jul 27, 2021
1 parent 186c3dd commit 485ef9f
Show file tree
Hide file tree
Showing 12 changed files with 259 additions and 66 deletions.
4 changes: 2 additions & 2 deletions docs/serve.jl
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,9 @@ import DataAugmentation
using FilePathsBase
using Colors

function serve(lazy=true)
function serve(lazy=true; kwargs...)
refmodules = [FastAI, FluxTraining, DLPipelines, DataAugmentation, DataLoaders, FastAI.Datasets]
project = Pollen.documentationproject(FastAI; refmodules, watchpackage=true)
project = Pollen.documentationproject(FastAI; refmodules, watchpackage=true, kwargs...)
Pollen.serve(project, lazy=lazy)
end
serve()
Expand Down
6 changes: 3 additions & 3 deletions docs/setup.md
Original file line number Diff line number Diff line change
@@ -1,8 +1,6 @@
# Setup

FastAI.jl is a **Julia** package. You can download Julia from the [official website](http://localhost:8000/docs/setup.md.html).

**FastAI.jl** is not registered yet, but you can try it out by installing it manually. You should be able to install FastAI.jl using the REPL as follows (The package mode in the REPL can be entered by typing `]`).
FastAI.jl is a **Julia** package. You can download Julia from the [official website](http://localhost:8000/docs/setup.md.html). You can install FastAI.jl like any other Julia package using the REPL as follows (The package mode in the REPL can be entered by typing `]`).

```julia
pkg> add https://github.com/FluxML/FastAI.jl
Expand All @@ -14,6 +12,8 @@ pkg> add https://github.com/FluxML/FastAI.jl
pkg> add CairoMakie
```

**Colab** If you don't have access to a GPU or want to try out FastAI.jl without installing Julia, try out [this FastAI.jl Colab notebook](https://colab.research.google.com/gist/lorenzoh/2fdc91f9e42a15e633861c640c68e5e8). We're working on adding a "Launch Colab" button to every documentation page based off a notebook file, but for now you can copy the code over manually.

**Pretrained models** To use pretrained vision models, you currently have to install a WIP branch of Metalhead.jl:

```julia
Expand Down
1 change: 1 addition & 0 deletions notebooks/Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ FastAI = "5d0beca9-ade8-49ae-ad0b-a3cf890e669f"
FilePathsBase = "48062228-2e41-5def-b9a4-89aafe57970f"
FluxTraining = "7bf95e4d-ca32-48da-9824-f0dc5310474f"
Images = "916415d5-f1e6-5110-898d-aaa5f9f070e0"
Makie = "ee78f7c6-11fb-53f2-987a-cfe4a2b5a57a"
Metalhead = "dbeba491-748d-5e0e-a39e-b530a07fa0cc"
StaticArrays = "90137ffa-7385-5640-81b9-e52037218182"
Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f"
252 changes: 224 additions & 28 deletions notebooks/fitonecycle.ipynb

Large diffs are not rendered by default.

8 changes: 4 additions & 4 deletions src/datablock/encoding.jl
Original file line number Diff line number Diff line change
Expand Up @@ -81,11 +81,11 @@ function encode(encoding::Encoding, context, blocks::Tuple, datas::Tuple)
end

# Named tuples of data are handled like tuples, but the keys are preserved
function encode(encoding::Encoding, context, blocks::Union{Tuple, NamedTuple}, datas::NamedTuple)
function encode(encoding::Encoding, context, blocks::NamedTuple, datas::NamedTuple)
@assert length(blocks) == length(datas)
return NamedTuple(zip(
keys(datas),
encode(encoding, context, blocks, values(datas))
encode(encoding, context, values(blocks), values(datas))
))
end

Expand Down Expand Up @@ -114,11 +114,11 @@ function decode(encoding::Encoding, context, blocks::Tuple, datas::Tuple)
end

# Named tuples of data are handled like tuples, but the keys are preserved
function decode(encoding::Encoding, context, blocks::Union{Tuple, NamedTuple}, datas::NamedTuple)
function decode(encoding::Encoding, context, blocks::NamedTuple, datas::NamedTuple)
@assert length(blocks) == length(datas)
return NamedTuple(zip(
keys(datas),
decode(encoding, context, blocks, values(datas))
decode(encoding, context, values(blocks), values(datas))
))
end

Expand Down
2 changes: 1 addition & 1 deletion src/datablock/loss.jl
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ For example for `block = OneHotTensor{1}(classes)` (i.e. an encoded
function blocklossfn end


blocklossfn(predblock) = blocklossfn(predblock, targetblock)
blocklossfn(predblock) = blocklossfn(predblock, predblock)

function blocklossfn(outblock::OneHotTensor{0}, yblock::OneHotTensor{0})
outblock.classes == yblock.classes || error("Classes of $outblock and $yblock differ!")
Expand Down
19 changes: 12 additions & 7 deletions src/datablock/plot.jl
Original file line number Diff line number Diff line change
Expand Up @@ -68,11 +68,15 @@ function plotsample!(f, ctxs::Tuple{NDContext{2}, NDOverlayContext{2}}, blocks,
plotblock!(ax, blocks[1], datas[1], alpha = 0.6) # plots an N-D image
end

function plotsample!(f, ctxs::Tuple{NDContext{2}, NDContext{2}}, blocks, datas)
f[1, 1] = ax1 = imageaxis(f)
plotblock!(ax1, blocks[1], datas[1]) # plots an image
f[1, 2] = ax2 = imageaxis(f)
plotblock!(ax, blocks[2], datas[2]) # plots an image
function plotsample!(
f,
ctxs::NTuple{N, NDContext{2}},
blocks::NTuple{N},
datas::NTuple{N}) where N
for i in 1:N
f[1, i] = ax = imageaxis(f)
plotblock!(ax, blocks[i], datas[i]) # plots an image
end
end

function plotsample!(
Expand Down Expand Up @@ -102,13 +106,14 @@ function plotprediction!(f, method::BlockMethod, x, ŷ, y)
ŷblock = method.outputblock
blocks = (xblock, ŷblock, yblock)
input, target_pred, target = decode(method.encodings, Validation(), blocks, (x, ŷ, y))
inblocks = decodedblock(method.encodings, blocks)
inblocks = decodedblock(method.encodings, blocks, true)
contexts = plotcontext(inblocks)
plotprediction!(f, contexts, inblocks, (input, target_pred, target))
end

function plotprediction!(
f, ::Tuple{NDContext{2}, TextContext, TextContext},
f,
::Tuple{NDContext{2}, TextContext, TextContext},
blocks,
datas)

Expand Down
9 changes: 5 additions & 4 deletions src/datablock/wrappers.jl
Original file line number Diff line number Diff line change
@@ -1,5 +1,8 @@
abstract type WrapperBlock <: Block end

wrapped(w::WrapperBlock) = w.block
mockblock(w::WrapperBlock) = mockblock(wrapped(w))
checkblock(w::WrapperBlock, data) = checkblock(wrapped(w), data)

# If not overwritten, encodings are applied to the wrapped block
function encode(enc::Encoding, ctx, wrapper::WrapperBlock, data; kwargs...)
Expand All @@ -21,17 +24,15 @@ struct Named{Name, B<:Block} <: WrapperBlock
end
Named(name::Symbol, block::B) where {B<:Block} = Named{name, B}(block)

mockblock(named::Named) = mockblock(named.block)
checkblock(named::Named, data) = checkblock(named.block, data)

# the name is preserved through encodings and decodings
function encodedblock(enc::Encoding, named::Named{Name}) where Name
outblock = encodedblock(enc, named.block)
outblock = encodedblock(enc, wrapped(named))
return isnothing(outblock) ? nothing : Named(Name, outblock)
end

function decodedblock(enc::Encoding, named::Named{Name}) where Name
outblock = decodedblock(enc, named.block)
outblock = decodedblock(enc, wrapped(named))
return isnothing(outblock) ? nothing : Named(Name, outblock)
end

Expand Down
5 changes: 0 additions & 5 deletions src/datasets/transformations.jl
Original file line number Diff line number Diff line change
Expand Up @@ -157,8 +157,3 @@ getobs(jdata, 15) == 15
```
"""
joinobs(datas...) = JoinedData(datas)


function collectobs(data, parallel=false)
collect()
end
2 changes: 2 additions & 0 deletions src/encodings/projective.jl
Original file line number Diff line number Diff line change
Expand Up @@ -135,6 +135,7 @@ function blockitemtype(block::Mask{N}, n::Int) where N
end
end
blockitemtype(block::Keypoints{N}, n::Int) where N = N == n ? DataAugmentation.Keypoints : nothing
blockitemtype(block::WrapperBlock, n::Int) = blockitemtype(wrapped(block), n)


"""
Expand All @@ -152,6 +153,7 @@ end

grabbounds(block::Image{N}, a, n) where N = N == n ? DataAugmentation.Bounds(size(a)) : nothing
grabbounds(block::Mask{N}, a, n) where N = N == n ? DataAugmentation.Bounds(size(a)) : nothing
grabbounds(block::WrapperBlock, a, n) = grabbounds(wrapped(block), a, n)


function getsamplebounds(blocks, datas, N)
Expand Down
15 changes: 4 additions & 11 deletions src/plotting.jl
Original file line number Diff line number Diff line change
Expand Up @@ -12,16 +12,6 @@ defaultfigure(;kwargs...) = Figure(
Plot a `sample` of a `method`.
Learning methods should implement [`plotsample!`](#) to make this work.
## Examples
```julia
using FastAI, Colors
sample = (rand(Gray, 28, 28), 1)
method = ImageClassification(1:10, (16, 16))
plotsample(method, sample)
```
"""
function plotsample(method, sample; figkwargs...)
f = defaultfigure(; figkwargs...)
Expand All @@ -40,7 +30,10 @@ Part of the plotting interface for learning methods.
function plotsample! end

"""
plotxy(method, (x, y))
plotxy(method, x, y)
Plot a single pair of encoded input `x` and encoded target `y`.
Requires [`plotxy!`](#) to be implemented.
"""
function plotxy(method, x, y; figkwargs...)
f = defaultfigure(; figkwargs...)
Expand Down
2 changes: 1 addition & 1 deletion src/training/onecycle.jl
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ function fitonecycle!(
maxlr;
kwargs...))

wdoptim = wd > 0 ? decay_optim(learner.optimizer) : learner.optimizer
wdoptim = wd > 0 ? decay_optim(learner.optimizer, wd) : learner.optimizer
withfields(learner, optimizer=wdoptim) do
withcallbacks(learner, scheduler) do
fit!(learner, nepochs, dataiters)
Expand Down

0 comments on commit 485ef9f

Please sign in to comment.