gpt4 book ai didi

python - ValueError : Variable d_w1/Adam/does not exist, 或不是用 tf.get_variable() 创建的

转载 作者:太空宇宙 更新时间:2023-11-03 11:18:37 27 4
gpt4 key购买 nike

我遇到了一个相当令人费解的问题,我要么非常盲目并且遗漏了一些东西,要么我的重用变量同时设置为 FalseTrue同时。

这些是我的模型定义:

def discriminator(data, reuse=False):
if reuse:
tf.get_variable_scope().reuse_variables()

# Fully Connected 1
d_w1 = tf.get_variable('d_w1', [41, 1024], initializer=tf.truncated_normal_initializer(stddev=0.02))
d_b1 = tf.get_variable('d_b1', [1024], initializer=tf.constant_initializer(0))
d1 = tf.nn.relu(tf.matmul(data, d_w1) + d_b1)

# Fully Connected 2 Wide
d_w2 = tf.get_variable('d_w2', [1024, 6144], initializer=tf.truncated_normal_initializer(stddev=0.02))
d_b2 = tf.get_variable('d_b2', [6144], initializer=tf.constant_initializer(0))
d2 = tf.nn.relu(tf.matmul(d1, d_w2) + d_b2)

# Fully Connected 3 Choking
d_w3 = tf.get_variable('d_w3', [6144, 1024], initializer=tf.truncated_normal_initializer(stddev=0.02))
d_b3 = tf.get_variable('d_b3', [1024], initializer=tf.constant_initializer(0))
d3 = tf.nn.relu(tf.matmul(d2, d_w3) + d_b3)

d_w4 = tf.get_variable('d_w4', [1024, 1], initializer=tf.truncated_normal_initializer(stddev=0.02))
d_b4 = tf.get_variable('d_b4', [1], initializer=tf.constant_initializer(0))

output = tf.nn.sigmoid(tf.matmul(d3, d_w4) + d_b4)
return output

def generator(z, batch_size, z_dim):
# Input layer
g_w1 = tf.get_variable('g_w1', [z_dim, 41], dtype=tf.float32,
initializer=tf.truncated_normal_initializer(stddev=0.02))
g_b1 = tf.get_variable('g_b1', [41], initializer=tf.truncated_normal_initializer(stddev=0.02))
g1 = tf.matmul(z, g_w1) + g_b1
g1 = tf.reshape(g1, [-1, 41])
g1 = tf.contrib.layers.batch_norm(g1, epsilon=1e-5, scope='bn1')
g1 = tf.nn.relu(g1)

g_w2 = tf.get_variable('g_w2', [41, 1024], dtype=tf.float32,
initializer=tf.truncated_normal_initializer(stddev=0.02))
g_b2 = tf.get_variable('g_b2', [1024], initializer=tf.truncated_normal_initializer(stddev=0.02))
g2 = tf.matmul(g1, g_w2) + g_b2
g2 = tf.contrib.layers.batch_norm(g2, epsilon=1e-5, scope='bn2')
g2 = tf.nn.relu(g2)

g_w3 = tf.get_variable('g_w3', [1024, 5120], dtype=tf.float32,
initializer=tf.truncated_normal_initializer(stddev=0.02))
g_b3 = tf.get_variable('g_b3', [5120], initializer=tf.truncated_normal_initializer(stddev=0.02))
g3 = tf.matmul(g2, g_w3) + g_b3
g3 = tf.contrib.layers.batch_norm(g3, epsilon=1e-5, scope='bn3')
g3 = tf.nn.relu(g3)

g_w4 = tf.get_variable('g_w4', [5120, 41], dtype=tf.float32,
initializer=tf.truncated_normal_initializer(stddev=0.02))
g_b4 = tf.get_variable('g_b4', [41], initializer=tf.truncated_normal_initializer(stddev=0.02))
g4 = tf.matmul(g3, g_w4) + g_b4
g4 = tf.sigmoid(g4)

return g4

这是我对优化器/训练器的定义:

batch_size = 50
tf.reset_default_graph()
sess = tf.Session()
z_placeholder = tf.placeholder(tf.float32, [None, z_dimensions], name='z_placeholder') # Hold my noise

x_placeholder = tf.placeholder(tf.float32, shape=[None, 41], name='x_placeholder') # Hold my data

Gz = generator(z_placeholder, batch_size, z_dimensions) # Hold my counterfeits

Dx = discriminator(x_placeholder) # Hold Predictions on the real data

Dg = discriminator(Gz, reuse=True) # Hold Predictions on the fake data

# Loss

d_loss_real = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(logits=Dx, labels=tf.ones_like(Dx)))
d_loss_fake = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(logits=Dg, labels=tf.ones_like(Dg)))

g_loss = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(logits=Dg, labels=tf.ones_like(Dg)))

# Trainable Vars
# Separate weights and biases via a name prefix basis, Thanks Jon Bruner and Adit Deshpande.


tvars = tf.trainable_variables()

d_vars = [var for var in tvars if 'd_' in var.name]
g_vars = [var for var in tvars if 'g_' in var.name]

print([v.name for v in d_vars])
print([v.name for v in g_vars])

# Optimizers!
with tf.variable_scope(tf.get_variable_scope(), reuse=False):
print("reuse or not: {}".format(tf.get_variable_scope().reuse))
assert tf.get_variable_scope().reuse == False, "Problems!"
d_trainer_fake = tf.train.AdamOptimizer(0.0003).minimize(d_loss_fake, var_list=d_vars)
d_trainer_real = tf.train.AdamOptimizer(0.0003).minimize(d_loss_real, var_list=d_vars)

g_trainer = tf.train.AdamOptimizer(0.0001).minimize(g_loss, var_list=g_vars)

运行我的代码后,我得到了这个奇妙的错误:

Traceback (most recent call last):   File "C:/Users/FW/PycharmProjects/GAN IDS/GAN 2.py", line 151, in <module> ['d_w1:0', 'd_b1:0', 'd_w2:0', 'd_b2:0', 'd_w3:0', 'd_b3:0', 'd_w4:0', 'd_b4:0'] ['g_w1:0', 'g_b1:0', 'g_w2:0', 'g_b2:0', 'g_w3:0', 'g_b3:0', 'g_w4:0', 'g_b4:0'] reuse or not: True
assert tf.get_variable_scope().reuse == False, "Problems!" AssertionError: Problems!

如果没有捕获到位,它会变成这样:

Traceback (most recent call last):
File "C:/Users/FW/PycharmProjects/GAN IDS/GAN 2.py", line 152, in <module>
d_trainer_fake = tf.train.AdamOptimizer(0.0003).minimize(d_loss_fake, var_list=d_vars)
File "C:\Users\FW\Anaconda3\lib\site-packages\tensorflow\python\training\optimizer.py", line 325, in minimize
name=name)
File "C:\Users\FW\Anaconda3\lib\site-packages\tensorflow\python\training\optimizer.py", line 446, in apply_gradients
self._create_slots([_get_variable_for(v) for v in var_list])
File "C:\Users\FW\Anaconda3\lib\site-packages\tensorflow\python\training\adam.py", line 128, in _create_slots
self._zeros_slot(v, "m", self._name)
File "C:\Users\FW\Anaconda3\lib\site-packages\tensorflow\python\training\optimizer.py", line 766, in _zeros_slot
named_slots[_var_key(var)] = slot_creator.create_zeros_slot(var, op_name)
File "C:\Users\FW\Anaconda3\lib\site-packages\tensorflow\python\training\slot_creator.py", line 174, in create_zeros_slot
colocate_with_primary=colocate_with_primary)
File "C:\Users\FW\Anaconda3\lib\site-packages\tensorflow\python\training\slot_creator.py", line 146, in create_slot_with_initializer
dtype)
File "C:\Users\FW\Anaconda3\lib\site-packages\tensorflow\python\training\slot_creator.py", line 66, in _create_slot_var
validate_shape=validate_shape)
File "C:\Users\FW\Anaconda3\lib\site-packages\tensorflow\python\ops\variable_scope.py", line 1065, in get_variable
use_resource=use_resource, custom_getter=custom_getter)
File "C:\Users\FW\Anaconda3\lib\site-packages\tensorflow\python\ops\variable_scope.py", line 962, in get_variable
use_resource=use_resource, custom_getter=custom_getter)
File "C:\Users\FW\Anaconda3\lib\site-packages\tensorflow\python\ops\variable_scope.py", line 367, in get_variable
validate_shape=validate_shape, use_resource=use_resource)
File "C:\Users\FW\Anaconda3\lib\site-packages\tensorflow\python\ops\variable_scope.py", line 352, in _true_getter
use_resource=use_resource)
File "C:\Users\FW\Anaconda3\lib\site-packages\tensorflow\python\ops\variable_scope.py", line 682, in _get_single_variable
"VarScope?" % name)
ValueError: Variable d_w1/Adam/ does not exist, or was not created with tf.get_variable(). Did you mean to set reuse=None in VarScope?

我已经查找了出现此问题的最常见线程,但到目前为止它们的解决方案都没有奏效,所以我不知道出了什么问题。

我假设我的模型或代码的优化器部分出现了严重错误。

最佳答案

因此,您正在创建三个 AdamOptimizer 实例。如果您想将它们分开,最好的解决方案是在自己的范围内创建每个:

with tf.variable_scope('fake-optimizer'):
d_trainer_fake = tf.train.AdamOptimizer(0.0003).minimize(d_loss_fake, var_list=d_vars)

with tf.variable_scope('real-optimizer'):
d_trainer_real = tf.train.AdamOptimizer(0.0003).minimize(d_loss_real, var_list=d_vars)

with tf.variable_scope('optimizer'):
g_trainer = tf.train.AdamOptimizer(0.0001).minimize(g_loss, var_list=g_vars)

如果出于某种原因,您希望共享它们的内部参数,则应使用 reuse=None 创建第一个优化器,接下来的两个使用 reuse=True。或者所有这三个都带有 reuse=tf.AUTO_REUSE(在 tensorflow 1.4 中支持),其作用相同。

关于python - ValueError : Variable d_w1/Adam/does not exist, 或不是用 tf.get_variable() 创建的,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/47775771/

27 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com