Skip to content

Commit 80ca1d9

Browse files
Merge pull request #2 from imamdigmi/master
Fix error on TensorFlow 1.12 deprecated function
2 parents 0850376 + a43af79 commit 80ca1d9

File tree

2 files changed

+14
-6
lines changed

2 files changed

+14
-6
lines changed

.gitignore

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
.ipynb_checkpoints/
2+
traffic-signs-data/*.p

Traffic_Sign_Classifier.ipynb

Lines changed: 12 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -42,8 +42,8 @@
4242
"#### Environement:\n",
4343
"- Ubuntu 16.04\n",
4444
"- Anaconda 5.0.1\n",
45-
"- Python 3.6.2\n",
46-
"- TensorFlow 0.12.1 (GPU support)"
45+
"- Python 3.6.6\n",
46+
"- TensorFlow 1.12.0 (GPU support)"
4747
]
4848
},
4949
{
@@ -65,7 +65,13 @@
6565
"import os\n",
6666
"import tensorflow as tf\n",
6767
"from tensorflow.contrib.layers import flatten\n",
68-
"from sklearn.metrics import confusion_matrix"
68+
"from sklearn.metrics import confusion_matrix\n",
69+
"\n",
70+
"# is it using the GPU?\n",
71+
"print(tf.test.gpu_device_name())\n",
72+
"\n",
73+
"# Show current TensorFlow version\n",
74+
"tf.__version__"
6975
]
7076
},
7177
{
@@ -696,7 +702,7 @@
696702
"\n",
697703
" # Training operation\n",
698704
" self.one_hot_y = tf.one_hot(y, n_out)\n",
699-
" self.cross_entropy = tf.nn.softmax_cross_entropy_with_logits(self.logits, self.one_hot_y)\n",
705+
" self.cross_entropy = tf.nn.softmax_cross_entropy_with_logits_v2(logits=self.logits, labels=self.one_hot_y)\n",
700706
" self.loss_operation = tf.reduce_mean(self.cross_entropy)\n",
701707
" self.optimizer = tf.train.AdamOptimizer(learning_rate = learning_rate)\n",
702708
" self.training_operation = self.optimizer.minimize(self.loss_operation)\n",
@@ -900,7 +906,7 @@
900906
"\n",
901907
" # Training operation\n",
902908
" self.one_hot_y = tf.one_hot(y, n_out)\n",
903-
" self.cross_entropy = tf.nn.softmax_cross_entropy_with_logits(self.logits, self.one_hot_y)\n",
909+
" self.cross_entropy = tf.nn.softmax_cross_entropy_with_logits_v2(logits=self.logits, labels=self.one_hot_y)\n",
904910
" self.loss_operation = tf.reduce_mean(self.cross_entropy)\n",
905911
" self.optimizer = tf.train.AdamOptimizer(learning_rate = learning_rate)\n",
906912
" self.training_operation = self.optimizer.minimize(self.loss_operation)\n",
@@ -1491,7 +1497,7 @@
14911497
"name": "python",
14921498
"nbconvert_exporter": "python",
14931499
"pygments_lexer": "ipython3",
1494-
"version": "3.5.2"
1500+
"version": "3.6.6"
14951501
},
14961502
"widgets": {
14971503
"state": {},

0 commit comments

Comments
 (0)