Thanks to visit codestin.com
Credit goes to github.com

Skip to content

Commit 392b813

Browse files
author
Dan Moldovan
committed
Update the training schedule for better convergence. The current hyperparameters tend to diverge in Colab.
1 parent a141d02 commit 392b813

File tree

1 file changed

+15
-4
lines changed

1 file changed

+15
-4
lines changed

samples/core/guide/autograph.ipynb

Lines changed: 15 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -10,14 +10,25 @@
1010
"collapsed_sections": [
1111
"Jxv6goXm7oGF"
1212
],
13-
"toc_visible": true
13+
"toc_visible": true,
14+
"include_colab_link": true
1415
},
1516
"kernelspec": {
1617
"name": "python3",
1718
"display_name": "Python 3"
1819
}
1920
},
2021
"cells": [
22+
{
23+
"cell_type": "markdown",
24+
"metadata": {
25+
"id": "view-in-github",
26+
"colab_type": "text"
27+
},
28+
"source": [
29+
"[View in Colaboratory](https://colab.research.google.com/github/mdanatg/models/blob/master/samples/core/guide/autograph.ipynb)"
30+
]
31+
},
2132
{
2233
"metadata": {
2334
"id": "Jxv6goXm7oGF",
@@ -740,7 +751,7 @@
740751
"@autograph.convert(recursive=True)\n",
741752
"def train(train_ds, test_ds, hp):\n",
742753
" m = mlp_model((28 * 28,))\n",
743-
" opt = tf.train.MomentumOptimizer(hp.learning_rate, 0.9)\n",
754+
" opt = tf.train.AdamOptimizer(hp.learning_rate)\n",
744755
" \n",
745756
" # We'd like to save our losses to a list. In order for AutoGraph\n",
746757
" # to convert these lists into their graph equivalent,\n",
@@ -802,7 +813,7 @@
802813
"source": [
803814
"with tf.Graph().as_default() as g:\n",
804815
" hp = tf.contrib.training.HParams(\n",
805-
" learning_rate=0.05,\n",
816+
" learning_rate=0.005,\n",
806817
" max_steps=500,\n",
807818
" )\n",
808819
" train_ds = setup_mnist_data(True, 50)\n",
@@ -837,4 +848,4 @@
837848
"outputs": []
838849
}
839850
]
840-
}
851+
}

0 commit comments

Comments
 (0)