From b172e3ae0c6ecf6e43b9b888384523b8462252af Mon Sep 17 00:00:00 2001 From: wbd0502 <1135711622@qq.com> Date: Fri, 28 Oct 2022 07:08:04 +0000 Subject: [PATCH 1/2] =?UTF-8?q?=E6=96=B0=E5=BB=BA=20=E7=AC=AC=E5=8D=81?= =?UTF-8?q?=E7=BB=84-=E5=9F=BA=E4=BA=8E=E6=AE=8B=E5=B7=AEU-Net=E7=A5=9E?= =?UTF-8?q?=E7=BB=8F=E7=BD=91=E7=BB=9C=E7=9A=84=E6=B5=B7=E4=B8=8A=E6=B6=8C?= =?UTF-8?q?=E6=B5=AA=E5=99=AA=E9=9F=B3=E5=8E=8B=E5=88=B6?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../.keep" | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 "code/2022_autumn/\347\254\254\345\215\201\347\273\204-\345\237\272\344\272\216\346\256\213\345\267\256U-Net\347\245\236\347\273\217\347\275\221\347\273\234\347\232\204\346\265\267\344\270\212\346\266\214\346\265\252\345\231\252\351\237\263\345\216\213\345\210\266/.keep" diff --git "a/code/2022_autumn/\347\254\254\345\215\201\347\273\204-\345\237\272\344\272\216\346\256\213\345\267\256U-Net\347\245\236\347\273\217\347\275\221\347\273\234\347\232\204\346\265\267\344\270\212\346\266\214\346\265\252\345\231\252\351\237\263\345\216\213\345\210\266/.keep" "b/code/2022_autumn/\347\254\254\345\215\201\347\273\204-\345\237\272\344\272\216\346\256\213\345\267\256U-Net\347\245\236\347\273\217\347\275\221\347\273\234\347\232\204\346\265\267\344\270\212\346\266\214\346\265\252\345\231\252\351\237\263\345\216\213\345\210\266/.keep" new file mode 100644 index 0000000..e69de29 -- Gitee From 20f838027b632115f052ad84ab887e96bb226121 Mon Sep 17 00:00:00 2001 From: wbd0502 <1135711622@qq.com> Date: Fri, 28 Oct 2022 07:08:17 +0000 Subject: [PATCH 2/2] =?UTF-8?q?=E4=B8=BB=E7=A8=8B=E5=BA=8F?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: wbd0502 <1135711622@qq.com> --- .../test_01.py" | 232 ++++++++++++++++++ 1 file changed, 232 insertions(+) create mode 100644 "code/2022_autumn/\347\254\254\345\215\201\347\273\204-\345\237\272\344\272\216\346\256\213\345\267\256U-Net\347\245\236\347\273\217\347\275\221\347\273\234\347\232\204\346\265\267\344\270\212\346\266\214\346\265\252\345\231\252\351\237\263\345\216\213\345\210\266/test_01.py" diff --git "a/code/2022_autumn/\347\254\254\345\215\201\347\273\204-\345\237\272\344\272\216\346\256\213\345\267\256U-Net\347\245\236\347\273\217\347\275\221\347\273\234\347\232\204\346\265\267\344\270\212\346\266\214\346\265\252\345\231\252\351\237\263\345\216\213\345\210\266/test_01.py" "b/code/2022_autumn/\347\254\254\345\215\201\347\273\204-\345\237\272\344\272\216\346\256\213\345\267\256U-Net\347\245\236\347\273\217\347\275\221\347\273\234\347\232\204\346\265\267\344\270\212\346\266\214\346\265\252\345\231\252\351\237\263\345\216\213\345\210\266/test_01.py" new file mode 100644 index 0000000..62875aa --- /dev/null +++ "b/code/2022_autumn/\347\254\254\345\215\201\347\273\204-\345\237\272\344\272\216\346\256\213\345\267\256U-Net\347\245\236\347\273\217\347\275\221\347\273\234\347\232\204\346\265\267\344\270\212\346\266\214\346\265\252\345\231\252\351\237\263\345\216\213\345\210\266/test_01.py" @@ -0,0 +1,232 @@ +from keras import Input +from keras.callbacks import ModelCheckpoint +from keras.layers import BatchNormalization,Dropout,Concatenate +from keras.layers import Conv2D,Conv2DTranspose,UpSampling2D +from keras.layers import Activation +import keras +import keras.optimizers +from keras.models import * +import numpy as np +import os +import matplotlib.pyplot as plt +os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' +os.environ["CUDA_VISIBLE_DEVICES"] = "0,1" + +p=0 +q=0 +t=0 +path1="C:/Users/OMG/pyprojects/data_train_origin" +path2="C:/Users/OMG/pyprojects/data_train_label" +path3="C:/Users/OMG/pyprojects/data_test_origin" +data_train_origin=np.zeros((90,2000,256,1)) +data_train_label=np.zeros((90,2000,256,1)) +data_test_origin=np.zeros((10,2000,256,1)) + +files1= os.listdir(path1) +files1.sort(key=lambda x:int(x[:-4])) +for file in files1: + position = path1+'/'+ file + with open(position, "r",encoding='utf-8')as f1: + data1=f1.read() + data1=data1.split(' ') + a1 = [] + for i in data1: + try: + i = eval(i) + a1.append(i) + except: + continue + data1 = np.array([n for n in a1], np.float32) + data1 = data1.reshape(2000, 256,1) + data_train_origin[p,:,:,:]=data1 + p=p+1 + + +print(data_train_origin.shape) +print(p) + +files2= os.listdir(path2) +files2.sort(key=lambda x:int(x[:-4])) +for file in files2: + position = path2+'/'+ file + with open(position, "r",encoding='utf-8')as f2: + data2=f2.read() + data2=data2.split(' ') + b1 = [] + for i in data2: + try: + i = eval(i) + b1.append(i) + except: + continue + data2 = np.array([n for n in b1], np.float32) + data2 = data2.reshape(2000, 256,1) + data_train_label[q,:,:,:]=data2 + q=q+1 + + +files3= os.listdir(path3) +files3.sort(key=lambda x:int(x[:-4])) +for file in files3: + position = path3+'/'+ file + with open(position, "r",encoding='utf-8')as f3: + data3=f3.read() + data3=data3.split(' ') + c1 = [] + for i in data3: + try: + i = eval(i) + c1.append(i) + except: + continue + data3 = np.array([n for n in c1], np.float32) + data3 = data3.reshape(2000, 256,1) + data_test_origin[t,:,:,:]=data3 + t=t+1 + +opt=keras.optimizers.Adam() +class myUnet(object): + + def get_unet(self): + #sequential_0 + inputs = Input((2000, 256, 1)) + conv=Conv2D(1, 3, padding="same", strides=1)(inputs) + conv0 = Conv2D(1, 3, padding="same", strides=2)(conv) + batch0 = BatchNormalization(momentum=0.9)(conv0) + act0 = Activation("relu")(batch0) + conv1 = Conv2D(1, 3, padding="same", strides=1)(act0) + batch1 = BatchNormalization(momentum=0.9)(conv1) + + conv2 = Conv2D(4, 3, padding="same", strides=2)(conv) + batch2 = BatchNormalization(momentum=0.9)(conv2) + residual1=keras.layers.Add()([batch2,batch1]) + act1=Activation("relu")(residual1) + + #sequential_1 + conv3 = Conv2D(8, 3, padding="same", strides=2)(act1) + batch3 = BatchNormalization(momentum=0.9)(conv3) + act2 = Activation("relu")(batch3) + conv4 = Conv2D(8, 3, padding="same", strides=1)(act2) + + batch4 = BatchNormalization(momentum=0.9)(conv4) + + conv5 = Conv2D(8, 3, padding="same", strides=2)(act1) + batch5 = BatchNormalization(momentum=0.9)(conv5) + residual2 = keras.layers.Add()([batch5, batch4]) + act3 = Activation("relu")(residual2) + + #sequential_2 + conv6 = Conv2D(16, 3, padding="same", strides=2)(act3) + batch6 = BatchNormalization(momentum=0.9)(conv6) + act4 = Activation("relu")(batch6) + conv7 = Conv2D(16, 3, padding="same", strides=1)(act4) + batch7 = BatchNormalization(momentum=0.9)(conv7) + + conv8 = Conv2D(16, 3, padding="same", strides=2)(act3) + batch8 = BatchNormalization(momentum=0.9)(conv8) + residual3 = keras.layers.Add()([batch8, batch7]) + act5 = Activation("relu")(residual3) + + #sequential_3 + conv9 = Conv2D(32, 3, padding="same", strides=2)(act5) + batch9 = BatchNormalization(momentum=0.9)(conv9) + act6 = Activation("relu")(batch9) + conv10 = Conv2D(32, 3, padding="same", strides=1)(act6) + batch10 = BatchNormalization(momentum=0.9)(conv10) + + conv11 = Conv2D(32, 3, padding="same", strides=2)(act5) + batch11 = BatchNormalization(momentum=0.9)(conv11) + print(batch11.shape) + residual4 = keras.layers.Add()([batch11, batch10]) + print(residual4.shape) + act7 = Activation("relu")(residual4) + print(act7.shape) + + #sequential_8 + conv12 = Conv2D(16, 3, padding="same")(UpSampling2D(size=(2, 2))(act7)) + print(conv12.shape) + merge1=Concatenate(axis=3)([conv12,act5]) + print(merge1.shape) + batch12=BatchNormalization(momentum=0.9)(merge1) + act8=Activation ("relu")(batch12) + conv13 = Conv2D(32, 3, padding="same", strides=1)(act8) + batch13=BatchNormalization(momentum=0.9)(conv13) + residual5=keras.layers.add([batch13,merge1]) + act9 = Activation("relu")(residual5) + + #sequential_9 + conv14 = Conv2D(8, 3, padding="same")(UpSampling2D(size=(2, 2))(act9)) + merge2=Concatenate(axis=3)([conv14,act3]) + batch14=BatchNormalization(momentum=0.9)(merge2) + act10=Activation ("relu")(batch14) + conv15 = Conv2D(16, 3, padding="same", strides=1)(act10) + batch15=BatchNormalization(momentum=0.9)(conv15) + residual6=keras.layers.add([batch15,merge2]) + act11 = Activation("relu")(residual6) + + #sequential_10 + conv16 = Conv2D(4, 3, padding="same")(UpSampling2D(size=(2, 2))(act11)) + merge3=Concatenate(axis=3)([conv16,act1]) + batch16=BatchNormalization(momentum=0.9)(merge3) + act12=Activation ("relu")(batch16) + conv17 = Conv2D(8, 3, padding="same", strides=1)(act12) + batch17=BatchNormalization(momentum=0.9)(conv17) + residual7=keras.layers.add([batch17,merge3]) + act13 = Activation("relu")(residual7) + + #sequential_11 + conv18 = Conv2D(1, 3, padding="same")(UpSampling2D(size=(2, 2))(act13)) + merge4=Concatenate(axis=3)([conv18,conv]) + batch18=BatchNormalization(momentum=0.9)(merge4) + act14=Activation ("relu")(batch18) + conv19 = Conv2D(2, 3, padding="same", strides=1)(act14) + batch19=BatchNormalization(momentum=0.9)(conv19) + residual8=keras.layers.add([batch19,merge4]) + conv20=Conv2D(1, 3, padding="same", strides=1)(residual8) + #act15 = Activation("sigmoid")(conv20) + model = Model(inputs=inputs, outputs=conv20) + model.compile(optimizer=opt, loss='mean_squared_error', metrics=['accuracy']) + return model + + def train(self): + imgs_train= data_train_origin + imgs_mask_train = data_train_label + print(imgs_train.shape) + print(imgs_mask_train.shape) + # data_test = np.zeros((2000, 256, 1)) + # data_test = imgs_train[0, :, :, :] - imgs_mask_train[0, :, :, :] + # data_test = data_test.reshape(2000, 256) + # plt.imshow(data_test, cmap=plt.cm.gray, aspect='auto') + # plt.show() + model = self.get_unet() + model_checkpoint = ModelCheckpoint(r'C:\Users\OMG\Desktop\unet2.hdf5', monitor='loss', verbose=1, save_best_only=True) + history=model.fit(x=imgs_train, y=imgs_mask_train, validation_split=0, batch_size=2, epochs=150, verbose=1, shuffle=True,callbacks=[model_checkpoint]) + print_history(history) + + def test(self): + print("loading data") + imgs_test = data_test_origin + print("loading data done") + model = self.get_unet() + print("got unet") + model.load_weights(r'C:\Users\OMG\Desktop\unet2.hdf5') + print('predict test data') + imgs_mask_test = model.predict(imgs_test, batch_size=10, verbose=1) + print(imgs_mask_test.shape) + np.save(r'C:\Users\OMG\Desktop\imgs_mask_test2.npy', imgs_mask_test) + +def print_history(history): + # 绘制训练 & 验证的准确率值 + # plt.plot(history.history['accuracy']) + plt.plot(history.history['loss']) + plt.title('Model loss') + plt.xlabel('Epoch') + plt.legend([ 'Train_loss' ]) + plt.show() + +if __name__ == '__main__': + unet = myUnet() + unet.get_unet() + unet.train() + + unet.test() -- Gitee