Skip to main content

Get weight and bias with get_weights method in TensorFlow, Keras, in Python

The 'tf.keras.layers.Layer' class has the method 'get_weights()' to get parameters (weight and bias).
For example, make a neural network for predicting handwritten digits as follows.
import tensorflow as tf
model = tf.keras.models.Sequential([
    tf.keras.layers.Flatten(input_shape=(28,28,1)),
    tf.keras.layers.Dense(16, activation='softmax'),
    tf.keras.layers.Dense(16, activation='softmax'),
    tf.keras.layers.Dense(10, activation='softmax')
])

print(type(model))
print(type(model.layers[0]))
print(type(model.layers[1]))
print(type(model.layers[2]))
print(type(model.layers[3]))
# <class 'keras.engine.sequential.Sequential'>
# <class 'keras.layers.core.flatten.Flatten'>
# <class 'keras.layers.core.dense.Dense'>
# <class 'keras.layers.core.dense.Dense'>
# <class 'keras.layers.core.dense.Dense'>
You can check the summary of the model with the summary() method.
model.summary()
_________________________________________________________________
 Layer (type)                Output Shape              Param #   
=================================================================
 flatten_1 (Flatten)         (None, 784)               0         
                                                                 
 dense_3 (Dense)             (None, 16)                12560     
                                                                 
 dense_4 (Dense)             (None, 16)                272       
                                                                 
 dense_5 (Dense)             (None, 10)                170       
                                                                 
=================================================================
Total params: 13,002
Trainable params: 13,002
Non-trainable params: 0
_________________________________________________________________
When you need weights and bias for each layer, use 'get_weights', which gives 'numpy.ndarray' of weight and bias.
weights, bias = model.layers[1].get_weights()
print(weights)
[[-0.04334281 -0.07965886  0.04804216 ... -0.07723255  0.04162325
   0.03558747]
 [-0.0382161  -0.08222143  0.04041284 ...  0.04629825 -0.03842923
  -0.03821559]
 [ 0.05765057 -0.04607654  0.02304497 ... -0.00319601 -0.02462361
  -0.05735615]
 ...
 [-0.01985393  0.03288492 -0.01998654 ... -0.02868129  0.06519817
  -0.01714767]
 [ 0.0096809  -0.02735789  0.06469741 ...  0.00344118 -0.02688923
  -0.07211724]
 [-0.07438072  0.04900783  0.07029073 ...  0.06927463  0.02509315
  -0.00857842]]
The 'weight' method also available but its elements are ResourceVariable that is the subclass of tf.Variable rather than numpy.ndarray.
weights, bias = model.layers[1].weights
print(type(weights))
# <class 'tensorflow.python.ops.resource_variable_ops.ResourceVariable'>
print(weights)
<tf.Variable 'dense_3/kernel:0' shape=(784, 16) dtype=float32, numpy=
array([[-0.04334281, -0.07965886,  0.04804216, ..., -0.07723255,
         0.04162325,  0.03558747],
       [-0.0382161 , -0.08222143,  0.04041284, ...,  0.04629825,
        -0.03842923, -0.03821559],
       [ 0.05765057, -0.04607654,  0.02304497, ..., -0.00319601,
        -0.02462361, -0.05735615],
       ...,
       [-0.01985393,  0.03288492, -0.01998654, ..., -0.02868129,
         0.06519817, -0.01714767],
       [ 0.0096809 , -0.02735789,  0.06469741, ...,  0.00344118,
        -0.02688923, -0.07211724],
       [-0.07438072,  0.04900783,  0.07029073, ...,  0.06927463,
         0.02509315, -0.00857842]], dtype=float32)>
print(weights.shape)
print(weights.numpy)
(784, 16)
[[-0.04334281 -0.07965886  0.04804216 ... -0.07723255  0.04162325
   0.03558747]
 [-0.0382161  -0.08222143  0.04041284 ...  0.04629825 -0.03842923
  -0.03821559]
 [ 0.05765057 -0.04607654  0.02304497 ... -0.00319601 -0.02462361
  -0.05735615]
 ...
 [-0.01985393  0.03288492 -0.01998654 ... -0.02868129  0.06519817
  -0.01714767]
 [ 0.0096809  -0.02735789  0.06469741 ...  0.00344118 -0.02688923
  -0.07211724]
 [-0.07438072  0.04900783  0.07029073 ...  0.06927463  0.02509315
  -0.00857842]]
 

Comments