Open chenghong-lin-nu opened 6 years ago
x = tf.Variable(5)
init = tf.global_variables_initializer()
with tf.Session() as sess:
sess.run(init)
n_features = 120
n_labels = 5
weights = tf.Variable(tf.truncated_normal((n_features, n_labels)))
n_labels = 5
bias = tf.Variable(tf.zeros(n_labels)) # [ 0. 0. 0. 0. 0.]
x = tf.nn.softmax([2.0, 1.0, 0.2])
import numpy as np
from sklearn import preprocessing
labels = np.array([1,5,3,2,1,4,2,1,3])
lb = preprocessing.LabelBinarizer()
lb.fit(labels)
lb.transform(labels)
array([[1, 0, 0, 0, 0], [0, 0, 0, 0, 1], [0, 0, 1, 0, 0], [0, 1, 0, 0, 0], [1, 0, 0, 0, 0], [0, 0, 0, 1, 0], [0, 1, 0, 0, 0], [1, 0, 0, 0, 0], [0, 0, 1, 0, 0]])
# Hidden Layer with ReLU activation function
hidden_layer = tf.add(tf.matmul(features, hidden_weights), hidden_biases)
hidden_layer = tf.nn.relu(hidden_layer)
output = tf.add(tf.matmul(hidden_layer, output_weights), output_biases)
import tensorflow as tf
save_file = './model.ckpt'
weights = tf.Variable(tf.truncated_normal([2, 3])) bias = tf.Variable(tf.truncated_normal([3]))
saver = tf.train.Saver()
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
# Show the values of weights and bias
print('Weights:')
print(sess.run(weights))
print('Bias:')
print(sess.run(bias))
# Save the model
saver.save(sess, save_file)
### Loading Variables
```python
# Remove the previous weights and bias
tf.reset_default_graph()
# Two Variables: weights and bias
weights = tf.Variable(tf.truncated_normal([2, 3]))
bias = tf.Variable(tf.truncated_normal([3]))
# Class used to save and/or restore Tensor Variables
saver = tf.train.Saver()
with tf.Session() as sess:
# Load the weights and bias
saver.restore(sess, save_file)
# Show the values of weights and bias
print('Weight:')
print(sess.run(weights))
print('Bias:')
print(sess.run(bias))
keep_prob = tf.placeholder(tf.float32) # probability to keep units
hidden_layer = tf.add(tf.matmul(features, weights[0]), biases[0]) hidden_layer = tf.nn.relu(hidden_layer) hidden_layer = tf.nn.dropout(hidden_layer, keep_prob)
logits = tf.add(tf.matmul(hidden_layer, weights[1]), biases[1])
- tf.nn.dropout()中有两个参数:
- 1.hidden_layer: the tensor to which you would like **to apply dropout**
- 2.keep_prob: **the probability of keeping (i.e. not dropping) any given unit**
- (keep_prob allows you to adjust the number of units to drop)
- During **training**, a good starting value for keep_prob is 0.5.
- During **testing**, use a keep_prob value of 1.0 to keep all units and maximize the power of the model.
Intro to Tensorflow
Session
Create TensorFlow object called hello_constant
hello_constant = tf.constant('Hello World!')
with tf.Session() as sess:
Run the tf.constant operation in the session
Subtraction & Multiplication
Converting types(转变类型)