• tensorflow1


    1、什么是tensorflow

    tensorflow是一个开源软件库,使用data flow graphs进行数值计算,最初由Google大脑团队开发,用于机器学习和深度卷积网络的研究,同样适用于其他广泛的领域。

     2、访问tensorflow官网:在Windows的host文件(位置在C:WindowsSystem32driversetchosts)末尾添加如下内容:

    #TensorFlow start

    64.233.188.121 www.tensorflow.org

    #TensorFlow end

    打开cmd,输入如下命令:ipconfig /flushdns     接着访问www.tensorflow.org,就能打开主页了。

    3、例子一

    import tensorflow as tf #载入tensorflow库
    m1 = tf.constant([[1, 2]]) #op(节点)
    m2 = tf.constant([[3],[4]]) #op(节点)
    m1xm2 = tf.matmul(m1, m2) #op(节点)

    # method1 use session
    sess = tf.Session()
    result = sess.run(m1xm2)
    print(result)
    sess.close()

    # method2 use session, 在被称之为 会话 (Session) 的上下文 (context) 中执行图,此处默认图
    with tf.Session() as sess:
    result = sess.run(m1xm2)
    print(result)

    4、例子二 

    import tensorflow as tf #载入tensorflow库
    x1 = tf.placeholder(dtype = tf.float32) #占位符,运行的时候通过feed_dict参数传递
    y1 = tf.placeholder(dtype=tf.float32) #占位符,运行的时候通过feed_dict参数传递
    z1 = x1 + y1

    x2 = tf.placeholder(dtype = tf.float32, shape = [2,3])
    y2 = tf.placeholder(dtype = tf.float32, shape = [3,2])
    z2 = tf.matmul(x2, y2)

    with tf.Session() as sess:
    z1_out = sess.run(z1, feed_dict = {x1:2, y1:4}) # 通过feed_dict参数传递,x1=2, y1=4
    print(z1_out)

    z2_out= sess.run(z2, feed_dict = {x2 : [[1,2,3], [4,5,6]], y2 : [[1,2],[3,4],[5,6]]})
    print (z2_out)

    5、例子三

    import tensorflow as tf #载入tensorflow库

    v1 = tf.Variable(1)
    addTest = tf.add(v1,2)
    update_addTest = tf.assign(v1, addTest) #将addTestf赋值给v1

    with tf.Session() as sess:
    #变量必须要初始化
    sess.run(tf.global_variables_initializer())

    for i in range(5):
    sess.run(update_addTest)
    print(sess.run(v1))

    6、例子四

    import tensorflow as tf
    import numpy as np
    import matplotlib.pyplot as plt

    # fake data
    x = np.linspace(-10, 10, 100) # [-10,10],共100个点

    # following are popular activation functions
    y_relu = tf.nn.relu(x) #relu
    y_sigmoid = tf.nn.sigmoid(x) #sigmoid
    y_tanh = tf.nn.tanh(x) #tanh
    y_softplus = tf.nn.softplus(x) #softplus
    # y_softmax = tf.nn.softmax(x) softmax is a special kind of activation function, it is about probability


    with tf.Session() as sess:
    y_relu, y_sigmoid, y_tanh, y_softplus = sess.run([y_relu, y_sigmoid, y_tanh, y_softplus])

    # plt to visualize these activation function
    plt.figure(1, figsize=(8, 6)) #figure1,宽度8,高度6
    plt.subplot(221)
    plt.plot(x, y_relu, c='red', label='relu')#红色线,左上角relu
    plt.ylim((-2, 5))
    plt.legend(loc='best')

    plt.subplot(222)
    plt.plot(x, y_sigmoid, c='red', label='sigmoid')
    plt.ylim((-0.5, 1))
    plt.legend(loc='best')

    plt.subplot(223)
    plt.plot(x, y_tanh, c='red', label='tanh')
    plt.ylim((-1.2, 1.2))
    plt.legend(loc='best')

    plt.subplot(224)
    plt.plot(x, y_softplus, c='red', label='softplus')
    plt.ylim((-0.2, 6))
    plt.legend(loc='best')

    plt.show()

     7、例子五

    import tensorflow as tf
    import numpy as np
    import matplotlib.pyplot as plt

    #数据
    x = np.linspace(-1,1,100)[:,np.newaxis] #列向量,100行1列
    noise = np.random.normal(0,0.1,size = x.shape) #高斯噪声,均值0,方差0.1
    y = np.power(x,2) + noise #x的平方+noise

    plt.scatter(x,y) #散点图
    plt.show()

    tf_x = tf.placeholder(tf.float32, x.shape)
    tf_y = tf.placeholder(tf.float32, y.shape)

    hid = tf.layers.dense(tf_x, 10, tf.nn.relu)
    output = tf.layers.dense(hid, 1)

    loss = tf.losses.mean_squared_error(tf_y, output)
    optimizer = tf.train.GradientDescentOptimizer(learning_rate=0.1)
    train_op = optimizer.minimize(loss)

    sess = tf.Session()
    sess.run(tf.global_variables_initializer())

    plt.ion()

    for step in range(500):
    _,l,predict = sess.run([train_op,loss,output], {tf_x:x, tf_y:y})
    if step % 10 == 0:
    plt.cla()
    plt.scatter(x,y)
    plt.plot(x,predict,'r-',lw=5)
    plt.text(0.5, 0, 'loss=%0.4f'% l, fontdict={'size':20, 'color':'red'})
    plt.pause(0.1)

    plt.ioff()
    plt.show()
    sess.close()
    print('end...')

    8、例子六

  • 相关阅读:
    Linux下svn服务器搭建
    mybatis-generator自动生成代码插件使用详解
    java中Class.forName("xxx")和ClassLoader().loadClass("xxx")的区别
    ExecutorService中submit()和execute()的区别
    Redis学习总结(1)——数据持久化
    Java内存模型及性能优化
    (转)Lock和synchronized比较详解
    SpringBoot中获取spring.profiles.active
    SpringBoot添加拦截器
    SpringBoot与Kafka集成
  • 原文地址:https://www.cnblogs.com/crazybird123/p/7337068.html
Copyright © 2020-2023  润新知