TensorFlow variable definition initialization and sharing

Directly define and use the get ﹣ variable method. If the name of the direct definition is consistent, tf will automatically modify the name, and get ﹣ variable will report an error

import tensorflow as tf

tf.reset_default_graph()

var1 = tf.Variable(1.0, name='firstvar')
print("var1:", var1.name)  # var1: firstvar:0

var1 = tf.Variable(2.0, name='firstvar')
print("var1:", var1.name)  # var1: firstvar_1:0

var2 = tf.Variable(3.0)
print("var2:", var2.name)  # var2: Variable:0

var2 = tf.Variable(4.0)
print("var1:", var2.name)  # var1: Variable_1:0

with tf.Session() as sess:
    sess.run(tf.global_variables_initializer())
    print("var1=", var1.eval())  # var1= 2.0

    print("var2=", var2.eval())  # var2= 4.0

get_var1 = tf.get_variable("firstvar", [1],
                           initializer=tf.constant_initializer(0.3))
print("get_var1:", get_var1.name)  # get_var1: firstvar_2:0

# Error will be reported for the same variable name
# get_var1 = tf.get_variable("firstvar",[1], initializer=tf.constant_initializer(0.4))
# print ("get_var1:",get_var1.name)

get_var1 = tf.get_variable("firstvar1", [1],
                           initializer=tf.constant_initializer(0.4))
print("get_var1:", get_var1.name)  # get_var1: firstvar1:0

with tf.Session() as sess:
    sess.run(tf.global_variables_initializer())
    print("get_var1=", get_var1.eval())  # get_var1= [0.4]

 

Add variable scope, and separate different scopes with namespace, so that the same variable name can appear in different scopes

import tensorflow as tf

tf.reset_default_graph()

# Error will be reported if the names are consistent
# var1 = tf.get_variable("firstvar",shape=[2],dtype=tf.float32)
# var2 = tf.get_variable("firstvar",shape=[2],dtype=tf.float32)

# Join the scope
with tf.variable_scope("test1", ):
    var1 = tf.get_variable("firstvar", shape=[2], dtype=tf.float32)

with tf.variable_scope("test2"):
    var2 = tf.get_variable("firstvar", shape=[2], dtype=tf.float32)

print("var1:", var1.name)  # var1: test1/firstvar:0
print("var2:", var2.name)  # var2: test2/firstvar:0

 

Nesting of scopes and sharing of variables

Use reuse=True to indicate that if the variable already exists, the existing variable will be used. Otherwise, create it. If it is set to false, the code will report an error, because a variable with the same name appears in the same scope

import tensorflow as tf

tf.reset_default_graph()

with tf.variable_scope("test1", ):
    var1 = tf.get_variable("firstvar", shape=[2], dtype=tf.float32)

    with tf.variable_scope("test2"):
        var2 = tf.get_variable("firstvar", shape=[2], dtype=tf.float32)

print("var1:", var1.name) # var1: test1/firstvar:0
print("var2:", var2.name) # var2: test1/test2/firstvar:0


with tf.variable_scope("test1", reuse=True):
    var3 = tf.get_variable("firstvar", shape=[2], dtype=tf.float32)
    with tf.variable_scope("test2"):
        var4 = tf.get_variable("firstvar", shape=[2], dtype=tf.float32)

print("var3:", var3.name) # var3: test1/firstvar:0
print("var4:", var4.name) # var4: test1/test2/firstvar:0

 

Initialization of variables

import tensorflow as tf

with tf.variable_scope("test1", initializer=tf.constant_initializer(0.4)):
    var1 = tf.get_variable("firstvar", shape=[2], dtype=tf.float32)

    with tf.variable_scope("test2"):
        var2 = tf.get_variable("firstvar", shape=[2], dtype=tf.float32)
        var3 = tf.get_variable("var3", shape=[2],
                               initializer=tf.constant_initializer(0.3))

with tf.Session() as sess:
    sess.run(tf.global_variables_initializer())
    # Inherit initialization value
    print("var1=", var1.eval()) # var1= [0.4 0.4]
    print("var2=", var2.eval()) # var2= [0.4 0.4]
    # If display initialization, the value of display initialization is used
    print("var3=", var3.eval()) # var3= [0.3 0.3]

Tags: Session

Posted on Sun, 09 Feb 2020 14:26:47 -0500 by infini