tf.__version__>>>>>2.2.0
端午节假期在宿舍学keras,实验室跑了一条蛇,结果都没人管,该怎么说呢,环境真的是差到极致。今年的yq,也不能回北京实习,被迫在zh已经呆了一个半月了,吃饭喝水都特别不方便,真的是环境很糟糕,月底学术报告完打算回家,现在也找不到实习,暑假一个人呆在这里,不说每天吃饭点外卖吃到肚子难受,就一个人在这种环境也难受,还是要回家学习。这次回家要安排好计划,每天充实的度过。对于sbrz我好像没那么生气,毕竟我从来没在乎过lj。
import tensorflow as tf import pandas as pd print('Tensorflow Version:{}'.format(tf.__version__)) data=pd.read_csv(r'D:DeepLearningTensorflowmy_studyIncome1.csv') import matplotlib.pyplot as plt %matplotlib inline plt.scatter(data.Education,data.Income) x=data.Education y=data.Income model=tf.keras.Sequential() model.add(tf.keras.layers.Dense(1,input_shape=(1,)))#输出维度是1,输入维度是1,元组的形式,dense层ax+b model.summary() model.compile(optimizer='adam',loss='mse')#mse:mean square error history=model.fit(x,y,epochs=1000) model.predict(pd.Series([20]))
线性回归
import tensorflow as tf import pandas as ps import numpy as np import matplotlib.pyplot as plt %matplotlib inline data=pd.read_csv(r'D:DeepLearningTensorflowmy_studyAdvertising.csv') data.head() plt.scatter(data.TV,data.sales) plt.scatter(data.radio,data.sales) plt.scatter(data.newspaper,data.sales) x=data.iloc[:,1:-1]#数字,取所有行,列第一列到倒数第二列 y=data.iloc[:,-1] model=tf.keras.Sequential([tf.keras.layers.Dense(10,input_shape=(3,),activation='relu'),tf.keras.layers.Dense(1)]) model.summary() model.compile(optimizer='adam',loss='mse') model.fit(x,y,epochs=100)
二分类逻辑回归
import tensorflow as tf import numpy as np import pandas as pd import matplotlib.pyplot as plt %matplotlib inline data=pd.read_csv(r'D:DeepLearningTensorflowmy_studycredit-a.csv',header=None)#没有表头columns,则header=none data.head() data.iloc[:,-1].value_counts() x=data.iloc[:,:-1] y=data.iloc[:,-1].replace(-1,0)#把最后一列的-1换成0,这种适合回归模型,-1和1适合支持向量机模型 model=tf.keras.Sequential() model.add(tf.keras.layers.Dense(4,input_shape=(15,),activation='relu')) model.add(tf.keras.layers.Dense(4,activation='relu')) model.add(tf.keras.layers.Dense(1,activation='sigmoid')) model.summary() model.compile(optimizer='adam',loss='binary_crossentropy',metrics=['acc']) history=model.fit(x,y,epochs=100) history.history.keys() plt.plot(history.epoch,history.history.get('loss')) plt.plot(history.epoch,history.history.get('acc'))
多分类逻辑回归
import tensorflow as tf import numpy as np import pandas as pd import matplotlib.pyplot as plt %matplotlib inline (train_images,train_labels),(test_images,test_labels)=tf.keras.datasets.fashion_mnist.load_data() train_images.shape,train_labels.shape plt.imshow(train_images[0])#展示第一幅图 #数据归一化 train_images=train_images/255 test_images=test_images/255 model=tf.keras.Sequential() model.add(tf.keras.layers.Flatten(input_shape=(28,28)))#变成28*28的一维、 model.add(tf.keras.layers.Dense(128,activation='relu')) model.add(tf.keras.layers.Dense(10,activation='softmax')) model.compile(optimizer='adam',loss='sparse_categorical_crossentropy',metrics=['acc']) model.fit(train_images,train_labels,epochs=5) model.evaluate(test_images,test_labels) #改成独热编码,损失函数要换成categorical_crossentropy train_label_onehot=tf.keras.utils.to_categorical(train_labels) test_label_onehot=tf.keras.utils.to_categorical(test_labels) mdoel=tf.keras.Sequential() mdoel.add(tf.keras.layers.Flatten(input_shape=(28,28))) mdoel.add(tf.keras.layers.Dense(128,activation='relu')) mdoel.add(tf.keras.layers.Dropout(0.5)) model.add(tf.keras.layers.Dense(128,activation='relu')) model.add(tf.keras.layers.Dropout(0.5)) model.add(tf.keras.layers.Dense(128,activation='relu')) model.add(tf.keras.layers.Dense(10,activation='softmax')) model.compile(optimizer=tf.keras.optimizers.Adam(lr=0.01),loss='categorical_crossentropy',metrics=['acc']) history=model.fit(train_images,train_label_onehot,epochs=5,validation_data=(test_images,test_label_onehot)) history.history.keys() plt.plot(history.epoch,history.history.get('loss'),label='loss') plt.plot(history.epoch,history.history.get('val_loss'),label='val_loss') plt.legend() plt.plot(history.epoch,history.history.get('acc'),label='acc') plt.plot(history.epoch,history.history.get('val_acc'),label='val_acc') plt.legend() predict=model.predict(test_images) predict.shape np.argmax(predict[0])#横向的最大值下标
函数式API
import tensorflow as tf import numpy as np import pandas as pd import matplotlib.pyplot as plt %matplotlib inline from tensorflow import keras (train_images,train_labels),(test_images,test_labels)=tf.keras.datasets.fashion_mnist.load_data() train_images=train_images/255 test_images=test_images/255 input=keras.Input(shape=(28,28)) x=keras.layers.Flatten()(input) x=keras.layers.Dense(32,activation='relu')(x) x=keras.layers.Dropout(0.5)(x) x=keras.layers.Dense(64,activation='relu')(x) output=keras.layers.Dense(10,activation='softmax')(x) model=keras.Model(inputs=input,outputs=output) model.summary()
tf.data模块
import tensorflow as tf dataset=tf.data.Dataset.from_tensor_slices([1,2,3,4,5,6,7])#数据转换成tensor类型 for ele in dataset: print(ele.numpy())#tensor类型转换成numpy类型 for ele in dataset.take(4):#取出前4个 print(ele) dataset=tf.data.Dataset.from_tensor_slices([[1,2],[3,4],[5,6]]) for ele in dataset: print(ele.numpy()) dataset=tf.data.Dataset.from_tensor_slices({'a':[1,2,3,4],'b':[6,7,8,9],'c':[2,3,4,5]}) for ele in dataset: print(ele) next(iter(dataset.take(1)))#'TakeDataset' object is not an iterator:iter()可以转换成可迭代对象 #dataset对于数据的变换: dataset=dataset.shuffle(buffer_size=7)#buffer_size:对多少数据乱序 dataset=dataset.repeat(count=3)#重复shuffle多少次,默认无限,重复3次 data=dataset.batch(3)#每次出来3个数 dataset=tf.data.Dataset.from_tensor_slices([1,2,3,4,5,6,7]) for ele in dataset: print(ele.numpy()) dataset=tf.data.Dataset.from_tensor_slices([1,2,3,4,5,6,7])#数据换成tensor类型 #使用函数对数变换 dataset=dataset.map(tf.square) for ele in dataset: print(ele.numpy()) (train_images,train_labels),(test_images,test_labels)=tf.keras.datasets.mnist.load_data() train_images=train_images/255 test_images=test_images/255 ds_train_img=tf.data.Dataset.from_tensor_slices(train_images) ds_train_lab=tf.data.Dataset.from_tensor_slices(train_labels) ds_train=tf.data.Dataset.zip((ds_train_img,ds_train_lab))#zip()里面是元组类型 ds_train=ds_train.shuffle(10000).repeat().batch(64)#每次 model=tf.keras.Sequential([tf.keras.layers.Flatten(input_shape=(28,28)),tf.keras.layers.Dense(128,activation='relu'), tf.keras.layers.Dense(10,activation='softmax')]) model.compile(optimizer='adam',loss='sparse_categorical_crossentropy',metrics=['accuracy']) steps_per_epoch=train_images.shape[0]//64#整除 model.fit(ds_train,epochs=5,steps_per_epoch=steps_per_epoch) ds_test=tf.data.Dataset.from_tensor_slices((test_images,test_labels)) ds_test=ds_test.batch(64) validation_steps=10000//64 model.fit(ds_train,epochs=5,steps_per_epoch=steps_per_epoch,validation_data=ds_test,validation_steps=validation_steps)
卷积神经网路
import tensorflow as tf print(tf.__version__) tf.test.is_gpu_available()#测试gpu是否可用 from tensorflow import keras import matplotlib.pyplot as plt %matplotlib inline import numpy as np (train_images,train_labels),(test_images,test_labels)=keras.datasets.fashion_mnist.load_data() train_images.shape #将一堆图片数据的维度扩张成4维的数据 train_images=np.expand_dims(train_images,-1) train_images.shape test_images=np.expand_dims(test_images,-1) model=tf.keras.Sequential() model.add(tf.keras.layers.Conv2D(64,(3,3),input_shape=train_images.shape[1:],activation='relu',padding='same')) model.add(tf.keras.layers.Conv2D(64,(3,3),activation='relu',padding='same')) model.add(tf.keras.layers.MaxPool2D())#默认2*2 model.add(tf.keras.layers.Dropout(0.5)) model.add(tf.keras.layers.Conv2D(128,(3,3),activation='relu',padding='same')) model.add(tf.keras.layers.Conv2D(128,(3,3),activation='relu',padding='same')) model.add(tf.keras.layers.MaxPool2D())#默认2*2 model.add(tf.keras.layers.Dropout(0.5)) model.add(tf.keras.layers.Conv2D(256,(3,3),activation='relu',padding='same')) model.add(tf.keras.layers.Conv2D(256,(3,3),activation='relu',padding='same')) model.add(tf.keras.layers.MaxPool2D())#默认2*2 model.add(tf.keras.layers.Dropout(0.5)) model.add(tf.keras.layers.Conv2D(512,(3,3),activation='relu',padding='same')) model.add(tf.keras.layers.Conv2D(512,(3,3),activation='relu',padding='same')) model.add(tf.keras.layers.Dropout(0.5)) model.add(tf.keras.layers.GlobalAveragePooling2D())#(None,12,12,64)》》》(None,64),用图片尺寸大小作为池化核大小,即把特征图片划分成1*1大小,现在每张图片提取到64个特征 model.add(tf.keras.layers.Dense(256,activation='softmax')) model.add(tf.keras.layers.Dense(10,activation='softmax')) model.summary() model.compile(optimizer='adam',loss='sparse_categorical_crossentropy',metrics=['acc']) history=model.fit(train_images,train_labels,epochs=30,validation_data=(test_images,test_labels)) history.history.keys() plt.plot(history.epoch,history.history.get('acc'),label='acc') plt.plot(history.epoch,history.history.get('val_acc'),label='val_acc') plt.plot(history.epoch,history.history.get('loss'),label='loss') plt.plot(history.epoch,history.history.get('val_loss'),label='val_loss')