Skip to content

Commit 45c89be

Browse files
normanheckscherhunkim
authored andcommitted
Refactor for TensorFlow 1.0 (#69)
* Refactor for TensorFlow 1.0 Tested with TensorFlow r1.0 alpha Signed-off-by: Norman Heckscher <norman.heckscher@gmail.com> * Update comments. Signed-off-by: Norman Heckscher <norman.heckscher@gmail.com> * close writer Signed-off-by: Norman Heckscher <norman.heckscher@gmail.com> * refactor softmax arguments Signed-off-by: Norman Heckscher <norman.heckscher@gmail.com>
1 parent c30061d commit 45c89be

23 files changed

+285
-1136
lines changed

.gitignore

+4-1
Original file line numberDiff line numberDiff line change
@@ -57,4 +57,7 @@ target/
5757
MNIST_data/
5858

5959
# OSX stuff
60-
.DS_Store
60+
.DS_Store
61+
/in_vis.png
62+
/word2vec.png
63+
/pred_vis.png

.travis.yml

+2-2
Original file line numberDiff line numberDiff line change
@@ -11,9 +11,9 @@ install:
1111
- pip install matplotlib
1212
# install TensorFlow from https://storage.googleapis.com/tensorflow/
1313
- if [[ "$TRAVIS_PYTHON_VERSION" == "2.7" ]]; then
14-
pip install https://storage.googleapis.com/tensorflow/linux/cpu/tensorflow-0.9.0-cp27-none-linux_x86_64.whl;
14+
pip install https://storage.googleapis.com/tensorflow/linux/cpu/tensorflow-1.0.0a0-cp27-none-linux_x86_64.whl;
1515
elif [[ "$TRAVIS_PYTHON_VERSION" == "3.4" ]]; then
16-
pip install https://storage.googleapis.com/tensorflow/linux/cpu/tensorflow-0.9.0-cp34-cp34m-linux_x86_64.whl;
16+
pip install https://storage.googleapis.com/tensorflow/linux/cpu/tensorflow-1.0.0a0-cp34-cp34m-linux_x86_64.whl;
1717
fi
1818
script:
1919
- sed -i -- 's/range(100)/range(1)/g' ??_*.py # change range to 1 for quick testing

01_linear_regression.ipynb

+15-6
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
"cell_type": "code",
55
"execution_count": 1,
66
"metadata": {
7-
"collapsed": true
7+
"collapsed": false
88
},
99
"outputs": [],
1010
"source": [
@@ -48,7 +48,7 @@
4848
},
4949
{
5050
"cell_type": "code",
51-
"execution_count": 4,
51+
"execution_count": 5,
5252
"metadata": {
5353
"collapsed": false
5454
},
@@ -57,22 +57,31 @@
5757
"name": "stdout",
5858
"output_type": "stream",
5959
"text": [
60-
"2.00683\n"
60+
"2.00863\n"
6161
]
6262
}
6363
],
6464
"source": [
6565
"# Launch the graph in a session\n",
6666
"with tf.Session() as sess:\n",
6767
" # you need to initialize variables (in this case just variable W)\n",
68-
" tf.initialize_all_variables().run()\n",
68+
" tf.global_variables_initializer().run()\n",
6969
"\n",
7070
" for i in range(100):\n",
7171
" for (x, y) in zip(trX, trY):\n",
7272
" sess.run(train_op, feed_dict={X: x, Y: y})\n",
7373
"\n",
7474
" print(sess.run(w)) # It should be something around 2"
7575
]
76+
},
77+
{
78+
"cell_type": "code",
79+
"execution_count": null,
80+
"metadata": {
81+
"collapsed": true
82+
},
83+
"outputs": [],
84+
"source": []
7685
}
7786
],
7887
"metadata": {
@@ -91,9 +100,9 @@
91100
"name": "python",
92101
"nbconvert_exporter": "python",
93102
"pygments_lexer": "ipython2",
94-
"version": "2.7.6"
103+
"version": "2.7.13"
95104
}
96105
},
97106
"nbformat": 4,
98-
"nbformat_minor": 1
107+
"nbformat_minor": 0
99108
}

01_linear_regression.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ def model(X, w):
2424
# Launch the graph in a session
2525
with tf.Session() as sess:
2626
# you need to initialize variables (in this case just variable W)
27-
tf.initialize_all_variables().run()
27+
tf.global_variables_initializer().run()
2828

2929
for i in range(100):
3030
for (x, y) in zip(trX, trY):

02_logistic_regression.ipynb

+21-130
Original file line numberDiff line numberDiff line change
@@ -15,22 +15,11 @@
1515
},
1616
{
1717
"cell_type": "code",
18-
"execution_count": 2,
18+
"execution_count": null,
1919
"metadata": {
2020
"collapsed": false
2121
},
22-
"outputs": [
23-
{
24-
"name": "stdout",
25-
"output_type": "stream",
26-
"text": [
27-
"Extracting MNIST_data/train-images-idx3-ubyte.gz\n",
28-
"Extracting MNIST_data/train-labels-idx1-ubyte.gz\n",
29-
"Extracting MNIST_data/t10k-images-idx3-ubyte.gz\n",
30-
"Extracting MNIST_data/t10k-labels-idx1-ubyte.gz\n"
31-
]
32-
}
33-
],
22+
"outputs": [],
3423
"source": [
3524
"def init_weights(shape):\n",
3625
" return tf.Variable(tf.random_normal(shape, stddev=0.01))\n",
@@ -46,7 +35,7 @@
4635
"cell_type": "code",
4736
"execution_count": 3,
4837
"metadata": {
49-
"collapsed": true
38+
"collapsed": false
5039
},
5140
"outputs": [],
5241
"source": [
@@ -57,156 +46,58 @@
5746
"\n",
5847
"py_x = model(X, w)\n",
5948
"\n",
60-
"cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(py_x, Y)) # compute mean cross entropy (softmax is applied internally)\n",
49+
"cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=py_x, labels=Y)) # compute mean cross entropy (softmax is applied internally)\n",
6150
"train_op = tf.train.GradientDescentOptimizer(0.05).minimize(cost) # construct optimizer\n",
6251
"predict_op = tf.argmax(py_x, 1) # at predict time, evaluate the argmax of the logistic regression"
6352
]
6453
},
6554
{
6655
"cell_type": "code",
67-
"execution_count": 4,
56+
"execution_count": null,
6857
"metadata": {
6958
"collapsed": false
7059
},
71-
"outputs": [
72-
{
73-
"name": "stdout",
74-
"output_type": "stream",
75-
"text": [
76-
"0 0.8842\n",
77-
"1 0.8969\n",
78-
"2 0.9031\n",
79-
"3 0.9076\n",
80-
"4 0.9102\n",
81-
"5 0.9118\n",
82-
"6 0.912\n",
83-
"7 0.9132\n",
84-
"8 0.9145\n",
85-
"9 0.9157\n",
86-
"10 0.9164\n",
87-
"11 0.9165\n",
88-
"12 0.917\n",
89-
"13 0.9175\n",
90-
"14 0.9174\n",
91-
"15 0.9178\n",
92-
"16 0.9184\n",
93-
"17 0.9192\n",
94-
"18 0.9193\n",
95-
"19 0.9198\n",
96-
"20 0.9197\n",
97-
"21 0.92\n",
98-
"22 0.9203\n",
99-
"23 0.9205\n",
100-
"24 0.9207\n",
101-
"25 0.9211\n",
102-
"26 0.9208\n",
103-
"27 0.921\n",
104-
"28 0.921\n",
105-
"29 0.9212\n",
106-
"30 0.9212\n",
107-
"31 0.9217\n",
108-
"32 0.9215\n",
109-
"33 0.9213\n",
110-
"34 0.9215\n",
111-
"35 0.9214\n",
112-
"36 0.9213\n",
113-
"37 0.9218\n",
114-
"38 0.9217\n",
115-
"39 0.9215\n",
116-
"40 0.9216\n",
117-
"41 0.9219\n",
118-
"42 0.9222\n",
119-
"43 0.9225\n",
120-
"44 0.9226\n",
121-
"45 0.9225\n",
122-
"46 0.9225\n",
123-
"47 0.9225\n",
124-
"48 0.9225\n",
125-
"49 0.9226\n",
126-
"50 0.9225\n",
127-
"51 0.9226\n",
128-
"52 0.9225\n",
129-
"53 0.9224\n",
130-
"54 0.9223\n",
131-
"55 0.9225\n",
132-
"56 0.9225\n",
133-
"57 0.9225\n",
134-
"58 0.9228\n",
135-
"59 0.9229\n",
136-
"60 0.9229\n",
137-
"61 0.923\n",
138-
"62 0.9231\n",
139-
"63 0.9233\n",
140-
"64 0.9234\n",
141-
"65 0.9236\n",
142-
"66 0.9236\n",
143-
"67 0.9236\n",
144-
"68 0.9237\n",
145-
"69 0.9235\n",
146-
"70 0.9235\n",
147-
"71 0.9235\n",
148-
"72 0.9235\n",
149-
"73 0.9235\n",
150-
"74 0.9236\n",
151-
"75 0.9236\n",
152-
"76 0.9235\n",
153-
"77 0.9235\n",
154-
"78 0.9236\n",
155-
"79 0.9238\n",
156-
"80 0.9239\n",
157-
"81 0.9238\n",
158-
"82 0.9238\n",
159-
"83 0.9238\n",
160-
"84 0.9236\n",
161-
"85 0.9236\n",
162-
"86 0.9236\n",
163-
"87 0.9238\n",
164-
"88 0.9238\n",
165-
"89 0.9238\n",
166-
"90 0.9237\n",
167-
"91 0.9236\n",
168-
"92 0.9235\n",
169-
"93 0.9235\n",
170-
"94 0.9237\n",
171-
"95 0.9237\n",
172-
"96 0.9237\n",
173-
"97 0.9237\n",
174-
"98 0.9237\n",
175-
"99 0.9237\n"
176-
]
177-
}
178-
],
60+
"outputs": [],
17961
"source": [
18062
"# Launch the graph in a session\n",
18163
"with tf.Session() as sess:\n",
18264
" # you need to initialize all variables\n",
183-
" tf.initialize_all_variables().run()\n",
65+
" tf.global_variables_initializer().run()\n",
18466
"\n",
18567
" for i in range(100):\n",
18668
" for start, end in zip(range(0, len(trX), 128), range(128, len(trX)+1, 128)):\n",
18769
" sess.run(train_op, feed_dict={X: trX[start:end], Y: trY[start:end]})\n",
18870
" print(i, np.mean(np.argmax(teY, axis=1) ==\n",
18971
" sess.run(predict_op, feed_dict={X: teX})))"
19072
]
73+
},
74+
{
75+
"cell_type": "code",
76+
"execution_count": null,
77+
"metadata": {
78+
"collapsed": true
79+
},
80+
"outputs": [],
81+
"source": []
19182
}
19283
],
19384
"metadata": {
19485
"kernelspec": {
195-
"display_name": "Python 3",
86+
"display_name": "Python 2",
19687
"language": "python",
197-
"name": "python3"
88+
"name": "python2"
19889
},
19990
"language_info": {
20091
"codemirror_mode": {
20192
"name": "ipython",
202-
"version": 3
93+
"version": 2
20394
},
20495
"file_extension": ".py",
20596
"mimetype": "text/x-python",
20697
"name": "python",
20798
"nbconvert_exporter": "python",
208-
"pygments_lexer": "ipython3",
209-
"version": "3.5.2"
99+
"pygments_lexer": "ipython2",
100+
"version": "2.7.13"
210101
}
211102
},
212103
"nbformat": 4,

02_logistic_regression.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -23,14 +23,14 @@ def model(X, w):
2323

2424
py_x = model(X, w)
2525

26-
cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(py_x, Y)) # compute mean cross entropy (softmax is applied internally)
26+
cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=py_x, labels=Y)) # compute mean cross entropy (softmax is applied internally)
2727
train_op = tf.train.GradientDescentOptimizer(0.05).minimize(cost) # construct optimizer
2828
predict_op = tf.argmax(py_x, 1) # at predict time, evaluate the argmax of the logistic regression
2929

3030
# Launch the graph in a session
3131
with tf.Session() as sess:
3232
# you need to initialize all variables
33-
tf.initialize_all_variables().run()
33+
tf.global_variables_initializer().run()
3434

3535
for i in range(100):
3636
for start, end in zip(range(0, len(trX), 128), range(128, len(trX)+1, 128)):

0 commit comments

Comments
 (0)