Skip to content

Commit

Permalink
remove controversial println code, and make it shorter
Browse files Browse the repository at this point in the history
  • Loading branch information
mcabbott committed Nov 26, 2022
1 parent e4c306b commit 71dc45b
Showing 1 changed file with 1 addition and 4 deletions.
5 changes: 1 addition & 4 deletions docs/src/models/quickstart.md
Original file line number Diff line number Diff line change
Expand Up @@ -31,8 +31,8 @@ pars = Flux.params(model) # contains references to arrays in model
opt = Flux.Adam(0.01) # will store optimiser momentum, etc.

# Training loop, using the whole data set 1000 times:
losses = []
for epoch in 1:1_000
losses = []
for (x, y) in loader
loss, grad = withgradient(pars) do
# Evaluate model and loss inside gradient context:
Expand All @@ -42,9 +42,6 @@ for epoch in 1:1_000
Flux.update!(opt, pars, grad)
push!(losses, loss) # logging, outside gradient context
end
if isinteger(log2(epoch))
println("after epoch $epoch, loss is ", mean(losses))
end
end

pars # parameters, momenta and output have all changed
Expand Down

0 comments on commit 71dc45b

Please sign in to comment.