From 462c69fcfbc0260081404a625dbbbb93ec3e1249 Mon Sep 17 00:00:00 2001 From: Ricardo Rosa Date: Mon, 1 May 2023 09:01:29 -0300 Subject: [PATCH] 4 backticks avoid vscode seemingly parsing error --- README.md | 32 ++++++++++++++++---------------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/README.md b/README.md index 543f5ed..cf51cc4 100644 --- a/README.md +++ b/README.md @@ -45,12 +45,12 @@ In all the examples below, one needs `Flux`, `ChainPlots` and `Plots`, while for ### Dense and Recurrent layers -```julia +````julia julia> nnr = Chain(Dense(2,5,σ),RNN(5,4,relu), LSTM(4,4), GRU(4,4), Dense(4,3)) Chain(Dense(2, 5, σ), Recur(RNNCell(5, 4, relu)), Recur(LSTMCell(4, 4)), Dense(4, 3)) julia> plot(nnr, title="With theme default", titlefontsize=10) -``` +```` ![nnr_default plot](examples/img/nnr_default.png) @@ -58,7 +58,7 @@ julia> plot(nnr, title="With theme default", titlefontsize=10) Variable-input functional layers are also accepted. If given as the first layer, then an initial input must be provided, otherwise, the input data is not needed. Here are two examples, illustrating each case. -```julia +````julia julia> dx(x) = x[2:end]-x[1:end-1] dx (generic function with 1 method) @@ -69,18 +69,18 @@ julia> nna = Chain(Dense(2,5,σ), dx, RNN(4,6,relu), x³, LSTM(6,4), GRU(4,4), D Chain(Dense(2, 5, σ), dx, Recur(RNNCell(4, 6, relu)), x³, Recur(LSTMCell(6, 4)), Recur(GRUCell(4, 4)), Dense(4, 3)) julia> plot(nna, title="$nna", titlefontsize=7) -``` +```` ![nna plot](examples/img/nna.png) -```julia +````julia julia> nnx = Chain(x³, dx, LSTM(5,10), Dense(10,5)) Chain(x³, dx, Recur(LSTMCell(5, 10)), Dense(10, 5)) julia> input_data = rand(6); julia> plot(nnx, input_data, title="$nnx", titlefontsize=9) -``` +```` ![nnx plot](examples/img/nnx.png) @@ -88,7 +88,7 @@ julia> plot(nnx, input_data, title="$nnx", titlefontsize=9) A neural network with a one-dimensional convolutional layer: -```julia +````julia julia> reshape6x1x1(a) = reshape(a, 6, 1, 1) reshape6x1x1 (generic function with 1 method) @@ -99,13 +99,13 @@ julia> nnrs = Chain(x³, Dense(3,6), reshape6x1x1, Conv((2,), 1=>1), slice, Dens Chain(x³, Dense(3, 6), reshape6x1x1, Conv((2,), 1=>1), slice, Dense(5, 4)) julia> plot(nnrs, Float32.(rand(3)), title="$nnrs", titlefontsize=9) -``` +```` ![nnrs plot](examples/img/nnrs.png) Now with a two-dimensional convolution: -```julia +````julia julia> reshape4x4x1x1(a) = reshape(a, 4, 4, 1, 1) reshape4x4x1x1 (generic function with 1 method) @@ -113,13 +113,13 @@ julia> nnrs2d = Chain(x³, Dense(4,16), reshape4x4x1x1, Conv((2,2), 1=>1), slice Chain(x³, Dense(4, 16), reshape4x4x1x1, Conv((2, 2), 1=>1), slice) julia> plot(nnrs2d, Float32.(rand(4)), title="$nnrs2d", titlefontsize=9) -``` +```` ![nnrs2d plot](examples/img/nnrs2d.png) With convolutional and pooling layers: -```julia +````julia julia> nncp = Chain( Conv((3, 3), 1=>2, pad=(1,1), bias=false), MaxPool((2,2)), @@ -138,7 +138,7 @@ Chain( ) # Total: 5 arrays, 242 parameters, 2.047 KiB. julia> plot(nncp, (16, 16, 1, 1), title="Chain with convolutional and pooling layers", titlefontsize=10) -``` +```` ![nncp plot](examples/img/nncp.png) @@ -146,7 +146,7 @@ julia> plot(nncp, (16, 16, 1, 1), title="Chain with convolutional and pooling la With `ChainPlots.chaingraph()` we can convert a `Flux.Chain` to a `MetaGraph`. -```julia +````julia julia> nnr = Chain(Dense(2,5,σ),RNN(5,4,relu), LSTM(4,4), GRU(4,4), Dense(4,3)) Chain(Dense(2, 5, σ), Recur(RNNCell(5, 4, relu)), Recur(LSTMCell(4, 4)), Recur(GRUCell(4, 4)), Dense(4, 3)) @@ -175,13 +175,13 @@ julia> get_prop.(Ref(mg_nnr), 15, [:loc_x, :loc_y]) 2-element Vector{Real}: 3.0 0.75 -``` +```` ### Visualizing the MetaGraph We may visualize the generated MetaGraph with [JuliaGraphs/GraphPlot.jl](https://github.com/JuliaGraphs/GraphPlot.jl). We use the attributes `:loc_x`, `:loc_y`, and `:neuron_color` to properly position and color every neuron. -```julia +````julia julia> nnr = Chain(Dense(2,5,σ),RNN(5,4,relu), LSTM(4,4), GRU(4,4), Dense(4,3)) Chain(Dense(2, 5, σ), Recur(RNNCell(5, 4, relu)), Recur(LSTMCell(4, 4)), Recur(GRUCell(4, 4)), Dense(4, 3)) @@ -222,7 +222,7 @@ julia> nodefillc = [parse(Colorant, get_prop(mg_nnr, v, :neuron_color)) for v in RGB{N0f8}(0.565,0.933,0.565) julia> draw(PNG("img/mg_nnr.png", 600, 400), gplot(mg_nnr, locs_x, locs_y, nodefillc=nodefillc)) -``` +```` And here is the result.