Thanks to visit codestin.com
Credit goes to github.com

Skip to content

Commit 9664852

Browse files
author
Will Feng
committed
Revert "Make all tutorials run their normal length"
This reverts commit 4543f6a.
1 parent fe4cbcd commit 9664852

4 files changed

Lines changed: 8 additions & 4 deletions

File tree

beginner_source/audio_classifier_tutorial.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -281,7 +281,8 @@ def test(model, epoch):
281281
#
282282

283283
log_interval = 20
284-
for epoch in range(1, 41):
284+
# for epoch in range(1, 41): # yf225 TODO: change this back
285+
for epoch in range(1, 2):
285286
if epoch == 31:
286287
print("First round of training complete. Setting learn rate to 0.001.")
287288
scheduler.step()

beginner_source/blitz/cifar10_tutorial.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -155,7 +155,8 @@ def forward(self, x):
155155
# We simply have to loop over our data iterator, and feed the inputs to the
156156
# network and optimize.
157157

158-
for epoch in range(2): # loop over the dataset multiple times
158+
# for epoch in range(2): # loop over the dataset multiple times
159+
for epoch in range(1): # yf225 TODO: change this back
159160

160161
running_loss = 0.0
161162
for i, data in enumerate(trainloader, 0):

beginner_source/dcgan_faces_tutorial.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -201,7 +201,8 @@
201201
ndf = 64
202202

203203
# Number of training epochs
204-
num_epochs = 5
204+
# num_epochs = 5
205+
num_epochs = 1 # yf225 TODO: DEBUG
205206

206207
# Learning rate for optimizers
207208
lr = 0.0002

intermediate_source/seq2seq_translation_tutorial.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -770,7 +770,8 @@ def evaluateRandomly(encoder, decoder, n=10):
770770
encoder1 = EncoderRNN(input_lang.n_words, hidden_size).to(device)
771771
attn_decoder1 = AttnDecoderRNN(hidden_size, output_lang.n_words, dropout_p=0.1).to(device)
772772

773-
trainIters(encoder1, attn_decoder1, 75000, print_every=5000)
773+
# trainIters(encoder1, attn_decoder1, 75000, print_every=5000)
774+
trainIters(encoder1, attn_decoder1, 1, print_every=5000) # yf225 TODO: DEBUG
774775

775776
######################################################################
776777
#

0 commit comments

Comments
 (0)