• GANs (Generative Adversarial Networks)


    #!/usr/bin/python2.7
    #coding:utf-8
    import tensorflow as tf
    import numpy as np
    import matplotlib.pyplot as plt
    from matplotlib.pyplot import savefig
    import os
    os.environ['TF_CPP_MIN_LOG_LEVEL']='2'
    # Hyper Parameters
    BATCH_SIZE = 64
    LR_G = 0.0001 # learning rate for generator
    LR_D = 0.0001 # learning rate for discriminator
    N_IDEAS = 5 # think of this as number of ideas for generating an art work(Generator)
    ART_COMPONENTS = 15
    # it could be total point G can draw in the canvas 5个灵感生成的15个线段
    PAINT_POINTS = np.vstack([np.linspace(-1, 1, ART_COMPONENTS) for _ in range(BATCH_SIZE)]) #纵轴连接(64,15)
    # show our beautiful painting range
    plt.plot(PAINT_POINTS[0], 2 * np.power(PAINT_POINTS[0], 2) + 1, c='#74BCFF', lw=3,label='upper bound')
    plt.plot(PAINT_POINTS[0], 1 * np.power(PAINT_POINTS[0], 2) + 0, c='#FF9359', lw=3,label='lower bound')
    plt.legend(loc='upper right')
    # savefig('./GAN_range.jpg')
    plt.show()
    def artist_works():
    # painting from the famous artist (real target)
        a = np.random.uniform(1, 2, size=BATCH_SIZE)[:, np.newaxis]
    
    # 随机生成一个一元二次函数的参数
        paintings = a * np.power(PAINT_POINTS, 2) + (a-1)
        return paintings
    with tf.variable_scope('Generator'):
        G_in = tf.placeholder(tf.float32, [None, N_IDEAS])
    # random ideas (could from normal distribution)
        G_l1 = tf.layers.dense(G_in, 128, tf.nn.relu)
        G_out = tf.layers.dense(G_l1, ART_COMPONENTS)
    # making a painting fromthese random ideas
    with tf.variable_scope('Discriminator'):
        real_art = tf.placeholder(tf.float32, [None, ART_COMPONENTS], name='real_in')
        #receive art work from the famous artist
        D_l0 = tf.layers.dense(real_art, 128, tf.nn.relu, name='l')
        prob_artist0 = tf.layers.dense(D_l0, 1, tf.nn.sigmoid, name='out')
    #probability that the art work is made by artist
    # reuse layers for generator
        D_l1 = tf.layers.dense(G_out, 128, tf.nn.relu, name='l', reuse=True)
    #receive art work from a newbie like G
        prob_artist1 = tf.layers.dense(D_l1, 1, tf.nn.sigmoid, name='out', reuse=True)
    #probability that the art work is made by artist
    D_loss = -tf.reduce_mean(tf.log(prob_artist0) + tf.log(1-prob_artist1)) #minimize -
    G_loss = tf.reduce_mean(tf.log(1-prob_artist1))
    train_D = tf.train.AdamOptimizer(LR_D).minimize(
    D_loss, var_list=tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES,scope='Discriminator'))
    train_G = tf.train.AdamOptimizer(LR_G).minimize(
    G_loss, var_list=tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES,scope='Generator'))
    sess = tf.Session()
    sess.run(tf.global_variables_initializer())
    plt.ion()
    # something about continuous plotting
    for step in range(5000):
        artist_paintings = artist_works()
    # real painting from artist
        G_ideas = np.random.randn(BATCH_SIZE, N_IDEAS)
    # 通过灵感来画画
        G_paintings, pa0, Dl = sess.run([G_out, prob_artist0, D_loss, train_D, train_G],
    # train and get results
        {G_in: G_ideas, real_art: artist_paintings})[:3]
        if step % 50 == 0:
    # plotting
            plt.cla()
            plt.plot(PAINT_POINTS[0], G_paintings[0], c='#4AD631', lw=3, label='Generatedpainting',)
            plt.plot(PAINT_POINTS[0], 2 * np.power(PAINT_POINTS[0], 2) + 1, c='#74BCFF',lw=3, label='upper bound')
            plt.plot(PAINT_POINTS[0], 1 * np.power(PAINT_POINTS[0], 2) + 0, c='#FF9359',lw=3, label='lower bound')
            plt.text(-.5, 2.3, 'D accuracy=%.2f (0.5 for D to converge)' % pa0.mean(),fontdict={'size': 15})
            plt.text(-.5, 2, 'D score= %.2f (-1.38 for G to converge)' % -Dl, fontdict={'size': 15})
            plt.ylim((0, 3)); plt.legend(loc='upper right', fontsize=12); plt.draw();
            plt.pause(0.01)
    plt.ioff()
    # savefig('./GAN.jpg')
    plt.show()
    
  • 相关阅读:
    halcongen_region_contour_xld轮廓转化成区域
    fit_circle_contour_xld拟合圆
    关于.Net Core下因客户端主动取消导致Request请求[FromBody]模型绑定读取异常BadHttpRequestException
    Java有了synchronized,为什么还要提供Lock
    源码详解数据结构Linked List
    CVE20213129:Laravel远程代码漏洞复现分析
    缓存最关心的问题是什么
    如何使用Java AWT 创建一个简易计算器
    聊聊数仓中TPCDDS&TPCH与查询性能的那些事儿
    6张图为你分析Kafka Producer 消息缓存模型
  • 原文地址:https://www.cnblogs.com/narjaja/p/9513477.html
Copyright © 2020-2023  润新知