首页 > 解决方案 > 将 TensorFlow 转换为 PyTorch

问题描述

我正在尝试将以下代码从 tensorflow 转换为 pytorch:

decoder = GLOGenerator()
g_optimizer = tf.keras.optimizers.Adam(0.0001)
z_optimizer = tf.keras.optimizers.Adam(0.01)

def train_step(images, z_train):
    with tf.GradientTape() as g_tape, tf.GradientTape() as z_tape:
        result_images = decoder(z_train)
        g_loss = tf.keras.losses.mean_squared_error(images, tf.squeeze(result_images))
    gradients_z = z_tape.gradient(g_loss, z_train)
    gradients_g = g_tape.gradient(g_loss, decoder.trainable_variables)

    z_optimizer.apply_gradients(zip([gradients_z], [z_train]))
    g_optimizer.apply_gradients(zip(gradients_g, decoder.trainable_variables))

如果有必要,调用函数在这里(如果你需要了解 train_step 函数的参数是什么):

z_vectors = np.random.normal(0, 1, (number_of_images, 10))
BATCH_SIZE = 32
mnist = tf.keras.datasets.mnist
(x_train, y_train), (x_test, y_test) = mnist.load_data()
x_train, x_test = x_train / 255.0, x_test / 255.0
images = x_train
number_of_images = images.shape[0]

def train(number_of_epoches):
    image_indexs = np.arange(images.shape[0])
    number_of_steps = images.shape[0] // BATCH_SIZE
    np.random.shuffle(image_indexs)
    for epoch in range(number_of_epoches):
        print("epoch: ", epoch)
        np.random.shuffle(image_indexs)
        for index in range(number_of_steps):
            z_for_train = tf.Variable(z_vectors[image_indexs[BATCH_SIZE * index: index * BATCH_SIZE + BATCH_SIZE]], dtype='float32')
            train_step(images[image_indexs[BATCH_SIZE * index: index * BATCH_SIZE + BATCH_SIZE]], z_for_train)
            z_train = z_for_train.numpy()
            z_train /= np.max(np.linalg.norm(z_vectors, axis=0, keepdims=True), 1)
            z_vectors[image_indexs[BATCH_SIZE * index: index * BATCH_SIZE + BATCH_SIZE]] = z_train

标签: tensorflowkerasneural-networkpytorchtensorflow2.0

解决方案


推荐阅读