Thanks to visit codestin.com
Credit goes to github.com

Skip to content

Commit 4543f6a

Browse files
author
Will Feng
committed
Make all tutorials run their normal length
1 parent 97ae30b commit 4543f6a

4 files changed

Lines changed: 4 additions & 8 deletions

File tree

beginner_source/audio_classifier_tutorial.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -281,8 +281,7 @@ def test(model, epoch):
281281
#
282282

283283
log_interval = 20
284-
# for epoch in range(1, 41): # yf225 TODO: change this back
285-
for epoch in range(1, 2):
284+
for epoch in range(1, 41):
286285
if epoch == 31:
287286
print("First round of training complete. Setting learn rate to 0.001.")
288287
scheduler.step()

beginner_source/blitz/cifar10_tutorial.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -155,8 +155,7 @@ def forward(self, x):
155155
# We simply have to loop over our data iterator, and feed the inputs to the
156156
# network and optimize.
157157

158-
# for epoch in range(2): # loop over the dataset multiple times
159-
for epoch in range(1): # yf225 TODO: change this back
158+
for epoch in range(2): # loop over the dataset multiple times
160159

161160
running_loss = 0.0
162161
for i, data in enumerate(trainloader, 0):

beginner_source/dcgan_faces_tutorial.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -201,8 +201,7 @@
201201
ndf = 64
202202

203203
# Number of training epochs
204-
# num_epochs = 5
205-
num_epochs = 1 # yf225 TODO: DEBUG
204+
num_epochs = 5
206205

207206
# Learning rate for optimizers
208207
lr = 0.0002

intermediate_source/seq2seq_translation_tutorial.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -770,8 +770,7 @@ def evaluateRandomly(encoder, decoder, n=10):
770770
encoder1 = EncoderRNN(input_lang.n_words, hidden_size).to(device)
771771
attn_decoder1 = AttnDecoderRNN(hidden_size, output_lang.n_words, dropout_p=0.1).to(device)
772772

773-
# trainIters(encoder1, attn_decoder1, 75000, print_every=5000)
774-
trainIters(encoder1, attn_decoder1, 1, print_every=5000) # yf225 TODO: DEBUG
773+
trainIters(encoder1, attn_decoder1, 75000, print_every=5000)
775774

776775
######################################################################
777776
#

0 commit comments

Comments
 (0)