!1
!2
!3
!4




!5
!6
!7
!8
!9
!10
!11
!12
!13
!14
!15
!16
!17
using Flux, Flux.Data.MNIST
function prepare_dataset(train=true)
train_or_test = ifelse(train,:train,:test)
imgs = MNIST.images(train_or_test)
X = hcat(float.(vec.(imgs))...)
labels = MNIST.labels(train_or_test)
Y = onehotbatch(labels,0:9)
return X, Y
end
X, Y = prepare_dataset(train=true)
train_X, train_Y, val_X, val_Y = split_dataset_random(X, Y)
!19
using Flux: Chain, Dense
using NNlib: softmax, relu
function define_model()
mlp = Chain(Dense(28^2,100,relu),
Dense(100,100,relu),
Dense(100,10),
softmax)
return mlp
end
model = define_model()
!20
using Base.Iterators: partition
batchsize = 32
serial_iterator = partition(1:size(train_Y)[2],batchsize)
train_dataset = [(train_X[:,batch] ,train_Y[:,batch]) for
batch in serial_iterator]
#train_dataset
size(train_dataset[1][1]) #(784, 32)
size(train_dataset[1][2]) #(10, 32)
!21
using Flux: onecold, crossentropy, @epochs
using Flux: ADAM
loss(x,y)= crossentropy(model(x),y)
optimizer = ADAM(params(model))
epochs = 10
@epochs epochs Flux.train!(loss, train_dataset, optimizer)
!22
using BSON: @load, @save
pretrained = model |> cpu
weights = Tracker.data.(params(pretrained))
@save "pretrained.bson" pretrained
@save "weights.bson" weights
!23
using Statistics: mean
using Flux: onecold
function predict()
println("Start to evaluate testset")
println("loading pretrained model")
@load "pretrained.bson" pretrained
model = pretrained
accuracy(x, y) = mean(onecold(model(x)) .== onecold(y))
println("prepare dataset")
X, Y = prepare_dataset(train=false)
@show accuracy(X, Y)
println("Done")
end
predict()
!24
using CuArrays
model = define_model() |> gpu
train_dataset = [(train_X[:,batch] |> gpu, train_Y[:,batch] |> gpu)
for batch in serial_iterator]
# or
train_dataset = gpu.(train_dataset)
#
!25
X = Metalhead.trainimgs(CIFAR10)
batchsize = 16
getarray(im) = Float64.(permutedims(channelview(im), (2, 3, 1)))
imgs = [getarray(X[i].img) for i in 1:50000]
labels = onehotbatch([X[i].ground_truth.class for i in 1:50000],1:10)
data = [(cat(imgs[batch]..., dims=4), labels[:,batch]) for i in
partition(1:49000, batchsize)]
!26
!27
!28
!29
using Flux, Metalhead
using Metalhead: classify
#ImageNet
vgg = VGG19()
classify(vgg, "elephant.jpeg")
model = Chain(vgg[1:end-2],
Dense(4096,101),
softmax)
Flux.testmode!(model)
opt = ADAM(param(model[1][9:end],model[2:end]))
!30
struct Dataset
data::Array{Tuple{String,Int64},1}
augment::Bool
image_cache::Dict{Int,Array{RGB{Normed{UInt8,8}},2}}
use_cache::Bool
function Dataset(data; train=true)
augment=train
use_cache=train
image_cache = Dict{Int,Array{RGB{Normed{UInt8,8}},2}}()
new(length(data), data, augment, image_cache, use_cache)
end
end
function get_example(dataset::Dataset, i::Int)
path, label = dataset.data[i]
if dataset.use_cache && haskey(dataset.image_cache, i)
img = dataset.image_cache[i]
else
dataset.image_cache[i] = load(path)
end
img = copyimg(img)
#
return img, label
end
!31
using Random
struct SerialIterator
len::Int
get_example::Function
batchsize::Int
indices::Vector
function SerialIterator(dataset::Dataset, batchsize::Int; shuffle=true)
indices=Vector(1:dataset.len)
if shuffle
shuffle!(indices)
end
_get_example = i -> get_example(dataset, i)
new(length(dataset), _get_example, batchsize, indices)
end
end
function Base.iterate(diter::SerialIterator, state=(1, 0))
idx_start, count = state
if idx_start + diter.batchsize > diter.len
return nothing
else
indices = diter.indices[idx_start:idx_start + diter.batchsize-1]
element = diter.get_example.(indices)
return (element, (idx_start + diter.batchsize, count + 1))
end
end
!32
Chain(Flux.muri,
Flux.ganbareba
Flux.kanoudesu
softmax)
!33
Chain(Flux.muri,
Flux.ganbareba
Flux.kanoudesu
softmax)
!34
struct Zikiso
layers::Chain
activation_fn
end
function (ec::Zikiso)(x)
h=ec.layers(x)
activation_fn.(h + x)
end
function Zikiso()
chain = Chain(Flux.muri,
Flux.ganbareba,
Flux.kanousesu)
Zikiso(chain,Flux.softmax)
end
@treelike ExpandedConv
!35
using Flux
using Flux:Chain, DepthwiseConv
using Flux:@treelike
relu6(x) = min(max(zero(x),x), eltype(x)(6))
struct ExpandedConv
layers::Chain
stride::Int
end
function ExpandedConv(expand::Int, ch::Pair{<:Integer,<:Integer}; stride=1)
inch=ch[1]
outch=ch[2]
expandedch = inch * expand
if expand != 1
chain = Chain(Conv((1,1), inch=>expandedch),
BatchNorm(expandedch, relu6),
DepthwiseConv((3,3),expandedch,relu6,stride=stride,pad=1),
BatchNorm(expandedch, relu6),
Conv((1,1),expandedch=>outch),
BatchNorm(outch))
else
chain = Chain(DepthwiseConv((3,3),expandedch,relu6,stride=stride,pad=1),
BatchNorm(expandedch, relu6),
Conv((1,1),expandedch=>outch),
BatchNorm(outch))
end
ExpandedConv(chain, stride)
end
@treelike ExpandedConv
function (ec::ExpandedConv)(x)
h=ec.layers(x)
if size(h)==size(x)
relu6.(h + x)
else
relu6.(h)
end
end
struct ExpandedConv
layers::Chain
stride::Int
end
function ExpandedConv()
#
end
function (ec::ExpandedConv)(x)
h=ec.layers(x)
if size(h)==size(x)
relu6.(h + x)
else
relu6.(h)
end
end
!36
struct MobileNetv2
layers::Chain
end
mv2() = Chain(Conv((3,3),3=>32,stride=2,pad=1),
BatchNorm(32,relu6),
ExpandedConv(1,32=>16),
ExpandedConv(6,16=>24,stride=2),
ExpandedConv(6,24=>24),
ExpandedConv(6,24=>32,stride=2),
ExpandedConv(6,32=>32),
ExpandedConv(6,32=>32),
ExpandedConv(6,32=>64,stride=2),
ExpandedConv(6,64=>64),
ExpandedConv(6,64=>64),
ExpandedConv(6,64=>64),
ExpandedConv(6,64=>96),
ExpandedConv(6,96=>96),
ExpandedConv(6,96=>96),
ExpandedConv(6,96=>160,stride=2),
ExpandedConv(6,160=>160),
ExpandedConv(6,160=>160),
ExpandedConv(6,160=>320),
Conv((1,1),320=>120),
BatchNorm(120,relu6),
MeanPool((7,7)),
x -> reshape(x, :, size(x, 4)),
Dense(120,101),
softmax
)
MobileNetv2() = MobileNetv2(mv2())
@treelike MobileNetv2
(mv2::MobileNetv2)(x) = mv2.layers(x)
!37
!38
!39
!40
!41
some_number = 10
for i in some_number
println(i)
end
for i in 1:some_number
println(i)
end
!42
!43
!44

Deep Learning with Julia1.0 and Flux

  • 1.
  • 2.
  • 3.
  • 4.
  • 5.
  • 6.
  • 7.
  • 8.
  • 9.
  • 10.
  • 11.
  • 12.
  • 13.
  • 14.
  • 15.
  • 16.
  • 17.
  • 18.
    using Flux, Flux.Data.MNIST functionprepare_dataset(train=true) train_or_test = ifelse(train,:train,:test) imgs = MNIST.images(train_or_test) X = hcat(float.(vec.(imgs))...) labels = MNIST.labels(train_or_test) Y = onehotbatch(labels,0:9) return X, Y end X, Y = prepare_dataset(train=true) train_X, train_Y, val_X, val_Y = split_dataset_random(X, Y)
  • 19.
    !19 using Flux: Chain,Dense using NNlib: softmax, relu function define_model() mlp = Chain(Dense(28^2,100,relu), Dense(100,100,relu), Dense(100,10), softmax) return mlp end model = define_model()
  • 20.
    !20 using Base.Iterators: partition batchsize= 32 serial_iterator = partition(1:size(train_Y)[2],batchsize) train_dataset = [(train_X[:,batch] ,train_Y[:,batch]) for batch in serial_iterator] #train_dataset size(train_dataset[1][1]) #(784, 32) size(train_dataset[1][2]) #(10, 32)
  • 21.
    !21 using Flux: onecold,crossentropy, @epochs using Flux: ADAM loss(x,y)= crossentropy(model(x),y) optimizer = ADAM(params(model)) epochs = 10 @epochs epochs Flux.train!(loss, train_dataset, optimizer)
  • 22.
    !22 using BSON: @load,@save pretrained = model |> cpu weights = Tracker.data.(params(pretrained)) @save "pretrained.bson" pretrained @save "weights.bson" weights
  • 23.
    !23 using Statistics: mean usingFlux: onecold function predict() println("Start to evaluate testset") println("loading pretrained model") @load "pretrained.bson" pretrained model = pretrained accuracy(x, y) = mean(onecold(model(x)) .== onecold(y)) println("prepare dataset") X, Y = prepare_dataset(train=false) @show accuracy(X, Y) println("Done") end predict()
  • 24.
    !24 using CuArrays model =define_model() |> gpu train_dataset = [(train_X[:,batch] |> gpu, train_Y[:,batch] |> gpu) for batch in serial_iterator] # or train_dataset = gpu.(train_dataset) #
  • 25.
    !25 X = Metalhead.trainimgs(CIFAR10) batchsize= 16 getarray(im) = Float64.(permutedims(channelview(im), (2, 3, 1))) imgs = [getarray(X[i].img) for i in 1:50000] labels = onehotbatch([X[i].ground_truth.class for i in 1:50000],1:10) data = [(cat(imgs[batch]..., dims=4), labels[:,batch]) for i in partition(1:49000, batchsize)]
  • 26.
  • 27.
  • 28.
  • 29.
    !29 using Flux, Metalhead usingMetalhead: classify #ImageNet vgg = VGG19() classify(vgg, "elephant.jpeg") model = Chain(vgg[1:end-2], Dense(4096,101), softmax) Flux.testmode!(model) opt = ADAM(param(model[1][9:end],model[2:end]))
  • 30.
    !30 struct Dataset data::Array{Tuple{String,Int64},1} augment::Bool image_cache::Dict{Int,Array{RGB{Normed{UInt8,8}},2}} use_cache::Bool function Dataset(data;train=true) augment=train use_cache=train image_cache = Dict{Int,Array{RGB{Normed{UInt8,8}},2}}() new(length(data), data, augment, image_cache, use_cache) end end function get_example(dataset::Dataset, i::Int) path, label = dataset.data[i] if dataset.use_cache && haskey(dataset.image_cache, i) img = dataset.image_cache[i] else dataset.image_cache[i] = load(path) end img = copyimg(img) # return img, label end
  • 31.
    !31 using Random struct SerialIterator len::Int get_example::Function batchsize::Int indices::Vector functionSerialIterator(dataset::Dataset, batchsize::Int; shuffle=true) indices=Vector(1:dataset.len) if shuffle shuffle!(indices) end _get_example = i -> get_example(dataset, i) new(length(dataset), _get_example, batchsize, indices) end end function Base.iterate(diter::SerialIterator, state=(1, 0)) idx_start, count = state if idx_start + diter.batchsize > diter.len return nothing else indices = diter.indices[idx_start:idx_start + diter.batchsize-1] element = diter.get_example.(indices) return (element, (idx_start + diter.batchsize, count + 1)) end end
  • 32.
  • 33.
  • 34.
    !34 struct Zikiso layers::Chain activation_fn end function (ec::Zikiso)(x) h=ec.layers(x) activation_fn.(h+ x) end function Zikiso() chain = Chain(Flux.muri, Flux.ganbareba, Flux.kanousesu) Zikiso(chain,Flux.softmax) end @treelike ExpandedConv
  • 35.
    !35 using Flux using Flux:Chain,DepthwiseConv using Flux:@treelike relu6(x) = min(max(zero(x),x), eltype(x)(6)) struct ExpandedConv layers::Chain stride::Int end function ExpandedConv(expand::Int, ch::Pair{<:Integer,<:Integer}; stride=1) inch=ch[1] outch=ch[2] expandedch = inch * expand if expand != 1 chain = Chain(Conv((1,1), inch=>expandedch), BatchNorm(expandedch, relu6), DepthwiseConv((3,3),expandedch,relu6,stride=stride,pad=1), BatchNorm(expandedch, relu6), Conv((1,1),expandedch=>outch), BatchNorm(outch)) else chain = Chain(DepthwiseConv((3,3),expandedch,relu6,stride=stride,pad=1), BatchNorm(expandedch, relu6), Conv((1,1),expandedch=>outch), BatchNorm(outch)) end ExpandedConv(chain, stride) end @treelike ExpandedConv function (ec::ExpandedConv)(x) h=ec.layers(x) if size(h)==size(x) relu6.(h + x) else relu6.(h) end end struct ExpandedConv layers::Chain stride::Int end function ExpandedConv() # end function (ec::ExpandedConv)(x) h=ec.layers(x) if size(h)==size(x) relu6.(h + x) else relu6.(h) end end
  • 36.
    !36 struct MobileNetv2 layers::Chain end mv2() =Chain(Conv((3,3),3=>32,stride=2,pad=1), BatchNorm(32,relu6), ExpandedConv(1,32=>16), ExpandedConv(6,16=>24,stride=2), ExpandedConv(6,24=>24), ExpandedConv(6,24=>32,stride=2), ExpandedConv(6,32=>32), ExpandedConv(6,32=>32), ExpandedConv(6,32=>64,stride=2), ExpandedConv(6,64=>64), ExpandedConv(6,64=>64), ExpandedConv(6,64=>64), ExpandedConv(6,64=>96), ExpandedConv(6,96=>96), ExpandedConv(6,96=>96), ExpandedConv(6,96=>160,stride=2), ExpandedConv(6,160=>160), ExpandedConv(6,160=>160), ExpandedConv(6,160=>320), Conv((1,1),320=>120), BatchNorm(120,relu6), MeanPool((7,7)), x -> reshape(x, :, size(x, 4)), Dense(120,101), softmax ) MobileNetv2() = MobileNetv2(mv2()) @treelike MobileNetv2 (mv2::MobileNetv2)(x) = mv2.layers(x)
  • 37.
  • 38.
  • 39.
  • 40.
  • 41.
    !41 some_number = 10 fori in some_number println(i) end for i in 1:some_number println(i) end
  • 42.
  • 43.
  • 44.