转载
https://blog.csdn.net/MiniCnCoder/article/details/80007213
tf.get_variable() 域tf.variable()的区别于联系:
前者是对后者加以限制,两次以上的调用会出错。
with tf.variable_scope('conv1') as scope:
weights = tf.get_variable('weights',
shape=[3,3,3,16],
dtype=tf.float32,
initializer=tf.truncated_normal_initializer(stddev=0.1,dtype=tf.float32))
biases = tf.get_variable('biases',
shape=[16],
dtype=tf.float32,
initializer=tf.constant_initializer(0.1))
conv = tf.nn.conv2d(images,weights,strides=[1,1,1,1],padding='SAME')
pre_activation = tf.nn.bias_add(conv,biases)
conv1 = tf.nn.relu(pre_activation,name=scope.name)
tf.variable_scope() 与tf.name_scpoe():
后者只能管住操作,但是管不住名字。
def trainning(loss,learning_rate):
with tf.name_scope('optimiezer'):
optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate)
global_step= tf.Variable(0,name='gloabl_step',trainable=False)
train_op = optimizer.minimize(loss,global_step=global_step)
return train_op
网友评论