1
+ {
2
+ "cells" : [
3
+ {
4
+ "cell_type" : " code" ,
5
+ "execution_count" : null ,
6
+ "metadata" : {},
7
+ "outputs" : [],
8
+ "source" : [
9
+ " using Tensorflow;"
10
+ ]
11
+ },
12
+ {
13
+ "cell_type" : " code" ,
14
+ "execution_count" : null ,
15
+ "metadata" : {},
16
+ "outputs" : [],
17
+ "source" : [
18
+ " using static Tensorflow.Python;"
19
+ ]
20
+ },
21
+ {
22
+ "cell_type" : " code" ,
23
+ "execution_count" : null ,
24
+ "metadata" : {},
25
+ "outputs" : [],
26
+ "source" : [
27
+ " using PlotNET;"
28
+ ]
29
+ },
30
+ {
31
+ "cell_type" : " code" ,
32
+ "execution_count" : null ,
33
+ "metadata" : {},
34
+ "outputs" : [],
35
+ "source" : [
36
+ " using NumSharp;"
37
+ ]
38
+ },
39
+ {
40
+ "cell_type" : " code" ,
41
+ "execution_count" : null ,
42
+ "metadata" : {},
43
+ "outputs" : [],
44
+ "source" : [
45
+ " int training_epochs = 1000;\n " ,
46
+ " \n " ,
47
+ " // Parameters\n " ,
48
+ " float learning_rate = 0.01f;\n " ,
49
+ " int display_step = 50;\n " ,
50
+ " \n " ,
51
+ " NumPyRandom rng = np.random;\n " ,
52
+ " NDArray train_X, train_Y;\n " ,
53
+ " int n_samples;"
54
+ ]
55
+ },
56
+ {
57
+ "cell_type" : " code" ,
58
+ "execution_count" : null ,
59
+ "metadata" : {},
60
+ "outputs" : [],
61
+ "source" : [
62
+ " train_X = np.array(3.3f, 4.4f, 5.5f, 6.71f, 6.93f, 4.168f, 9.779f, 6.182f, 7.59f, 2.167f,\n " ,
63
+ " 7.042f, 10.791f, 5.313f, 7.997f, 5.654f, 9.27f, 3.1f);\n " ,
64
+ " train_Y = np.array(1.7f, 2.76f, 2.09f, 3.19f, 1.694f, 1.573f, 3.366f, 2.596f, 2.53f, 1.221f,\n " ,
65
+ " 2.827f, 3.465f, 1.65f, 2.904f, 2.42f, 2.94f, 1.3f);\n " ,
66
+ " n_samples = train_X.shape[0];\n " ,
67
+ " \n " ,
68
+ " // tf Graph Input\n " ,
69
+ " var X = tf.placeholder(tf.float32);\n " ,
70
+ " var Y = tf.placeholder(tf.float32);\n " ,
71
+ " \n " ,
72
+ " // Set model weights \n " ,
73
+ " // We can set a fixed init value in order to debug\n " ,
74
+ " // var rnd1 = rng.randn<float>();\n " ,
75
+ " // var rnd2 = rng.randn<float>();\n " ,
76
+ " var W = tf.Variable(-0.06f, name: \" weight\" );\n " ,
77
+ " var b = tf.Variable(-0.73f, name: \" bias\" );\n " ,
78
+ " \n " ,
79
+ " // Construct a linear model\n " ,
80
+ " var pred = tf.add(tf.multiply(X, W), b);\n " ,
81
+ " \n " ,
82
+ " // Mean squared error\n " ,
83
+ " var cost = tf.reduce_sum(tf.pow(pred - Y, 2.0f)) / (2.0f * n_samples);\n " ,
84
+ " \n " ,
85
+ " // Gradient descent\n " ,
86
+ " // Note, minimize() knows to modify W and b because Variable objects are trainable=True by default\n " ,
87
+ " var optimizer = tf.train.GradientDescentOptimizer(learning_rate).minimize(cost);\n " ,
88
+ " \n " ,
89
+ " // Initialize the variables (i.e. assign their default value)\n " ,
90
+ " var init = tf.global_variables_initializer();\n "
91
+ ]
92
+ },
93
+ {
94
+ "cell_type" : " code" ,
95
+ "execution_count" : null ,
96
+ "metadata" : {
97
+ "scrolled" : false
98
+ },
99
+ "outputs" : [],
100
+ "source" : [
101
+ " // Start training\n " ,
102
+ " using (var sess = tf.Session())\n " ,
103
+ " {\n " ,
104
+ " // Run the initializer\n " ,
105
+ " sess.run(init);\n " ,
106
+ " \n " ,
107
+ " // Fit all training data\n " ,
108
+ " for (int epoch = 0; epoch < training_epochs; epoch++)\n " ,
109
+ " {\n " ,
110
+ " foreach (var (x, y) in zip<float>(train_X, train_Y))\n " ,
111
+ " {\n " ,
112
+ " sess.run(optimizer, \n " ,
113
+ " new FeedItem(X, x),\n " ,
114
+ " new FeedItem(Y, y));\n " ,
115
+ " }\n " ,
116
+ " \n " ,
117
+ " // Display logs per epoch step\n " ,
118
+ " if ((epoch + 1) % display_step == 0)\n " ,
119
+ " {\n " ,
120
+ " var c = sess.run(cost, \n " ,
121
+ " new FeedItem(X, train_X),\n " ,
122
+ " new FeedItem(Y, train_Y));\n " ,
123
+ " Console.WriteLine($\" Epoch: {epoch + 1} cost={c} \" + $\" W={sess.run(W)} b={sess.run(b)}\" );\n " ,
124
+ " }\n " ,
125
+ " }\n " ,
126
+ " \n " ,
127
+ " Console.WriteLine(\" Optimization Finished!\" );\n " ,
128
+ " \n " ,
129
+ " var training_cost = sess.run(cost,\n " ,
130
+ " new FeedItem(X, train_X),\n " ,
131
+ " new FeedItem(Y, train_Y));\n " ,
132
+ " \n " ,
133
+ " var plotter = new Plotter();\n " ,
134
+ " \n " ,
135
+ " plotter.Plot(\n " ,
136
+ " train_X,\n " ,
137
+ " train_Y,\n " ,
138
+ " \" Original data\" , ChartType.Scatter,\" markers\" );\n " ,
139
+ " plotter.Plot(\n " ,
140
+ " train_X,\n " ,
141
+ " sess.run(W) * train_X + sess.run(b),\n " ,
142
+ " \" Fitted line\" , ChartType.Scatter, \" Fitted line\" );\n " ,
143
+ " \n " ,
144
+ " plotter.Show();\n " ,
145
+ " \n " ,
146
+ " // Testing example\n " ,
147
+ " var test_X = np.array(6.83f, 4.668f, 8.9f, 7.91f, 5.7f, 8.7f, 3.1f, 2.1f);\n " ,
148
+ " var test_Y = np.array(1.84f, 2.273f, 3.2f, 2.831f, 2.92f, 3.24f, 1.35f, 1.03f);\n " ,
149
+ " \n " ,
150
+ " Console.WriteLine(\" Testing... (Mean square loss Comparison)\" );\n " ,
151
+ " \n " ,
152
+ " var testing_cost = sess.run(tf.reduce_sum(tf.pow(pred - Y, 2.0f)) / (2.0f * test_X.shape[0]),\n " ,
153
+ " new FeedItem(X, test_X), \n " ,
154
+ " new FeedItem(Y, test_Y));\n " ,
155
+ " \n " ,
156
+ " Console.WriteLine($\" Testing cost={testing_cost}\" );\n " ,
157
+ " \n " ,
158
+ " var diff = Math.Abs((float)training_cost - (float)testing_cost);\n " ,
159
+ " Console.WriteLine($\" Absolute mean square loss difference: {diff}\" );\n " ,
160
+ " \n " ,
161
+ " plotter.Plot(\n " ,
162
+ " test_X,\n " ,
163
+ " test_Y,\n " ,
164
+ " \" Testing data\" , ChartType.Scatter, \" markers\" );\n " ,
165
+ " plotter.Plot(\n " ,
166
+ " train_X,\n " ,
167
+ " sess.run(W) * train_X + sess.run(b),\n " ,
168
+ " \" Fitted line\" , ChartType.Scatter);\n " ,
169
+ " \n " ,
170
+ " plotter.Show();\n " ,
171
+ " \n " ,
172
+ " return diff < 0.01;\n " ,
173
+ " }"
174
+ ]
175
+ },
176
+ {
177
+ "cell_type" : " code" ,
178
+ "execution_count" : null ,
179
+ "metadata" : {},
180
+ "outputs" : [],
181
+ "source" : []
182
+ }
183
+ ],
184
+ "metadata" : {
185
+ "kernelspec" : {
186
+ "display_name" : " SciSharp Cube" ,
187
+ "language" : " csharp" ,
188
+ "name" : " csharpcore"
189
+ },
190
+ "language_info" : {
191
+ "file_extension" : " .cs" ,
192
+ "mimetype" : " text/x-csharp" ,
193
+ "name" : " .netstandard" ,
194
+ "pygments_lexer" : " CSharp" ,
195
+ "version" : " 4.0.30319"
196
+ }
197
+ },
198
+ "nbformat" : 4 ,
199
+ "nbformat_minor" : 2
200
+ }
0 commit comments