0%

Example 2 - Linear Regression Example with Python and theano from MSDN

Series

code example

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
#!/usr/bin/python
# -*- coding: utf-8 -*-

# https://blogs.msdn.microsoft.com/lukassteindl/2015/12/13/linear-regression-example-with-python-and-theano/
# http://jakevdp.github.io/blog/2013/05/12/embedding-matplotlib-animations/

"""
在Python Notebook中无法展现动画,只能展示静态的图像。
The problem is that so far the integration of IPython with matplotlib is entirely static,
while animations are by their nature dynamic.
"""

import matplotlib.pyplot as plt
import matplotlib.animation as animation
from matplotlib import style
import theano
from theano import tensor as T
import numpy as np

style.use('fivethirtyeight')
fig = plt.figure()
ax1 = fig.add_subplot(1,1,1)

trX = np.linspace(-1,1,101) # (101,)
trY = 2 * trX + np.random.randn(*trX.shape) * 0.33 # (101,)

X = T.scalar()
Y = T.scalar()

def model (X,w):
return X * w

w = theano.shared(np.asarray(0., dtype=theano.config.floatX)) #scalar
y = model(X,w)

cost = T.mean(T.sqr(y-Y))
gradient = T.grad(cost=cost, wrt = w)
updates = [[w,w-gradient * 0.001]]

#train = theano.function(inputs=[X,Y], outputs=cost, updates = updates, allow_input_downcast= True)
train = theano.function(inputs=[X,Y], outputs=cost, updates = updates)

def run():
for i in range(100):
for x,y in zip (trX,trY):
train(x,y)
print (w.eval())

def animate(i):
# i: iteration of animate
#print i
ax1.clear()
plt.scatter(trX, trY, label='Gradient Descent on GPU',
alpha=0.3, edgecolors='none')
plt.legend()
plt.grid(True)
for x,y in zip (trX,trY):
train(x,y)
#print (w.eval())

xs = [-1,1]
ys = [-1*w.eval(),w.eval()]
ax1.plot(xs,ys)

def show_animate():
ani = animation.FuncAnimation(fig, animate, interval = 250)
plt.show()

def main():
#run()
show_animate()

main()

Reference

History

  • 20180807: created.