-
Notifications
You must be signed in to change notification settings - Fork 2
/
multilayer_perceptron.py.patch
40 lines (34 loc) · 1.32 KB
/
multilayer_perceptron.py.patch
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
--- multilayer_perceptron.py 2019-02-18 17:25:10.000000000 +0000
+++ example-nn.py 2019-02-18 17:33:24.000000000 +0000
@@ -22,7 +22,7 @@
# Import MNIST data
from tensorflow.examples.tutorials.mnist import input_data
-mnist = input_data.read_data_sets("/tmp/data/", one_hot=True)
+mnist = input_data.read_data_sets("/home/a.cos/neuralnet_data/", one_hot=True)
import tensorflow as tf
@@ -65,16 +65,19 @@
out_layer = tf.matmul(layer_2, weights['out']) + biases['out']
return out_layer
-# Construct model
-logits = multilayer_perceptron(X)
-# Define loss and optimizer
-loss_op = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(
- logits=logits, labels=Y))
-optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate)
-train_op = optimizer.minimize(loss_op)
-# Initializing the variables
-init = tf.global_variables_initializer()
+#setup GPU
+with tf.device('/gpu:0'):
+
+ # Construct model
+ logits = multilayer_perceptron(X)
+
+ # Define loss and optimizer
+ loss_op = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=logits, labels=Y))
+ optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate)
+ train_op = optimizer.minimize(loss_op)
+ # Initializing the variables
+ init = tf.global_variables_initializer()
with tf.Session() as sess:
sess.run(init)