【715】D-LinkNet实现
代码:
from keras.layers import Conv2D, MaxPooling2D, Input, Conv2DTranspose, \
Add, Activation, \
BatchNormalization
from keras import Model
import keras
INPUT_SHAPE = (512, 512, 15)
def residual_block(input_tensor, num_filters):
x = Conv2D(num_filters, (3, 3), padding='same')(input_tensor)
x = Conv2D(num_filters, (3, 3), padding='same')(x)
x = BatchNormalization()(x)
input_tensor = Conv2D(num_filters, (1, 1), padding='same')(input_tensor)
input_tensor = BatchNormalization()(input_tensor)
res_tensor = Add()([input_tensor, x])
res_tensor = Activation('relu')(res_tensor)
return res_tensor
def dilated_center_block(input_tensor, num_filters):
dilation_1 = Conv2D(num_filters, kernel_size=(3, 3), dilation_rate=(1, 1), padding='same')(input_tensor)
dilation_1 = Activation('relu')(dilation_1)
dilation_2 = Conv2D(num_filters, kernel_size=(3, 3), dilation_rate=(2, 2), padding='same')(dilation_1)
dilation_2 = Activation('relu')(dilation_2)
dilation_4 = Conv2D(num_filters, kernel_size=(3, 3), dilation_rate=(4, 4), padding='same')(dilation_2)
dilation_4 = Activation('relu')(dilation_4)
dilation_8 = Conv2D(num_filters, kernel_size=(3, 3), dilation_rate=(8, 8), padding='same')(dilation_4)
dilation_8 = Activation('relu')(dilation_8)
final_diliation = Add()([input_tensor, dilation_1, dilation_2, dilation_4, dilation_8])
return final_diliation
def decoder_block(input_tensor, num_filters):
decoder_tensor = Conv2D(num_filters, (1, 1), padding='same')(input_tensor)
decoder_tensor = BatchNormalization()(decoder_tensor)
decoder_tensor = Activation('relu')(decoder_tensor)
decoder_tensor = Conv2DTranspose(num_filters, kernel_size=(3, 3), strides=(2, 2), padding='same')(decoder_tensor)
decoder_tensor = BatchNormalization()(decoder_tensor)
decoder_tensor = Activation('relu')(decoder_tensor)
decoder_tensor = Conv2D(num_filters, (1, 1), padding='same')(decoder_tensor)
decoder_tensor = BatchNormalization()(decoder_tensor)
decoder_tensor = Activation('relu')(decoder_tensor)
return decoder_tensor
def encoder_block(input_tensor, num_filters, num_res_blocks):
encoded = residual_block(input_tensor, num_filters)
while num_res_blocks > 1:
encoded = residual_block(encoded, num_filters)
num_res_blocks -= 1
encoded_pool = MaxPooling2D((2, 2), strides=(2, 2))(encoded)
return encoded, encoded_pool
def create_dlinknet():
inputs = Input(shape=INPUT_SHAPE)
inputs_ = Conv2D(64, kernel_size=(3, 3), padding='same')(inputs)
inputs_ = BatchNormalization()(inputs_)
inputs_ = Activation('relu')(inputs_)
max_pool_inputs = MaxPooling2D((2, 2), strides=(2, 2))(inputs_)
encoded_1, encoded_pool_1 = encoder_block(max_pool_inputs, num_filters=64, num_res_blocks=3)
encoded_2, encoded_pool_2 = encoder_block(encoded_pool_1, num_filters=128, num_res_blocks=4)
encoded_3, encoded_pool_3 = encoder_block(encoded_pool_2, num_filters=256, num_res_blocks=6)
encoded_4, encoded_pool_4 = encoder_block(encoded_pool_3, num_filters=512, num_res_blocks=3)
center = dilated_center_block(encoded_4, 512)
decoded_1 = Add()([decoder_block(center, 256), encoded_3])
decoded_2 = Add()([decoder_block(decoded_1, 128), encoded_2])
decoded_3 = Add()([decoder_block(decoded_2, 64), encoded_1])
decoded_4 = decoder_block(decoded_3, 64)
final = Conv2DTranspose(32, kernel_size=(3, 3), padding='same')(decoded_4)
outputs = Conv2D(1, (1, 1), activation='sigmoid')(final)
model_i = Model(inputs=[inputs], outputs=[outputs])
#model_i.compile(optimizer='adam', loss=combined_loss, metrics=[dice_coeff])
#model_i.summary()
# model_i.load_weights(save_model_path)
return model_i
model = create_dlinknet()
model.summary()
输出:
Model: "model_1"
__________________________________________________________________________________________________
Layer (type) Output Shape Param # Connected to
==================================================================================================
input_2 (InputLayer) [(None, 512, 512, 15 0
__________________________________________________________________________________________________
conv2d_62 (Conv2D) (None, 512, 512, 64) 8704 input_2[0][0]
__________________________________________________________________________________________________
batch_normalization_45 (BatchNo (None, 512, 512, 64) 256 conv2d_62[0][0]
__________________________________________________________________________________________________
activation_33 (Activation) (None, 512, 512, 64) 0 batch_normalization_45[0][0]
__________________________________________________________________________________________________
max_pooling2d_5 (MaxPooling2D) (None, 256, 256, 64) 0 activation_33[0][0]
__________________________________________________________________________________________________
conv2d_63 (Conv2D) (None, 256, 256, 64) 36928 max_pooling2d_5[0][0]
__________________________________________________________________________________________________
conv2d_65 (Conv2D) (None, 256, 256, 64) 4160 max_pooling2d_5[0][0]
__________________________________________________________________________________________________
conv2d_64 (Conv2D) (None, 256, 256, 64) 36928 conv2d_63[0][0]
__________________________________________________________________________________________________
batch_normalization_47 (BatchNo (None, 256, 256, 64) 256 conv2d_65[0][0]
__________________________________________________________________________________________________
batch_normalization_46 (BatchNo (None, 256, 256, 64) 256 conv2d_64[0][0]
__________________________________________________________________________________________________
add_20 (Add) (None, 256, 256, 64) 0 batch_normalization_47[0][0]
batch_normalization_46[0][0]
__________________________________________________________________________________________________
activation_34 (Activation) (None, 256, 256, 64) 0 add_20[0][0]
__________________________________________________________________________________________________
conv2d_66 (Conv2D) (None, 256, 256, 64) 36928 activation_34[0][0]
__________________________________________________________________________________________________
conv2d_68 (Conv2D) (None, 256, 256, 64) 4160 activation_34[0][0]
__________________________________________________________________________________________________
conv2d_67 (Conv2D) (None, 256, 256, 64) 36928 conv2d_66[0][0]
__________________________________________________________________________________________________
batch_normalization_49 (BatchNo (None, 256, 256, 64) 256 conv2d_68[0][0]
__________________________________________________________________________________________________
batch_normalization_48 (BatchNo (None, 256, 256, 64) 256 conv2d_67[0][0]
__________________________________________________________________________________________________
add_21 (Add) (None, 256, 256, 64) 0 batch_normalization_49[0][0]
batch_normalization_48[0][0]
__________________________________________________________________________________________________
activation_35 (Activation) (None, 256, 256, 64) 0 add_21[0][0]
__________________________________________________________________________________________________
conv2d_69 (Conv2D) (None, 256, 256, 64) 36928 activation_35[0][0]
__________________________________________________________________________________________________
conv2d_71 (Conv2D) (None, 256, 256, 64) 4160 activation_35[0][0]
__________________________________________________________________________________________________
conv2d_70 (Conv2D) (None, 256, 256, 64) 36928 conv2d_69[0][0]
__________________________________________________________________________________________________
batch_normalization_51 (BatchNo (None, 256, 256, 64) 256 conv2d_71[0][0]
__________________________________________________________________________________________________
batch_normalization_50 (BatchNo (None, 256, 256, 64) 256 conv2d_70[0][0]
__________________________________________________________________________________________________
add_22 (Add) (None, 256, 256, 64) 0 batch_normalization_51[0][0]
batch_normalization_50[0][0]
__________________________________________________________________________________________________
activation_36 (Activation) (None, 256, 256, 64) 0 add_22[0][0]
__________________________________________________________________________________________________
max_pooling2d_6 (MaxPooling2D) (None, 128, 128, 64) 0 activation_36[0][0]
__________________________________________________________________________________________________
conv2d_72 (Conv2D) (None, 128, 128, 128 73856 max_pooling2d_6[0][0]
__________________________________________________________________________________________________
conv2d_74 (Conv2D) (None, 128, 128, 128 8320 max_pooling2d_6[0][0]
__________________________________________________________________________________________________
conv2d_73 (Conv2D) (None, 128, 128, 128 147584 conv2d_72[0][0]
__________________________________________________________________________________________________
batch_normalization_53 (BatchNo (None, 128, 128, 128 512 conv2d_74[0][0]
__________________________________________________________________________________________________
batch_normalization_52 (BatchNo (None, 128, 128, 128 512 conv2d_73[0][0]
__________________________________________________________________________________________________
add_23 (Add) (None, 128, 128, 128 0 batch_normalization_53[0][0]
batch_normalization_52[0][0]
__________________________________________________________________________________________________
activation_37 (Activation) (None, 128, 128, 128 0 add_23[0][0]
__________________________________________________________________________________________________
conv2d_75 (Conv2D) (None, 128, 128, 128 147584 activation_37[0][0]
__________________________________________________________________________________________________
conv2d_77 (Conv2D) (None, 128, 128, 128 16512 activation_37[0][0]
__________________________________________________________________________________________________
conv2d_76 (Conv2D) (None, 128, 128, 128 147584 conv2d_75[0][0]
__________________________________________________________________________________________________
batch_normalization_55 (BatchNo (None, 128, 128, 128 512 conv2d_77[0][0]
__________________________________________________________________________________________________
batch_normalization_54 (BatchNo (None, 128, 128, 128 512 conv2d_76[0][0]
__________________________________________________________________________________________________
add_24 (Add) (None, 128, 128, 128 0 batch_normalization_55[0][0]
batch_normalization_54[0][0]
__________________________________________________________________________________________________
activation_38 (Activation) (None, 128, 128, 128 0 add_24[0][0]
__________________________________________________________________________________________________
conv2d_78 (Conv2D) (None, 128, 128, 128 147584 activation_38[0][0]
__________________________________________________________________________________________________
conv2d_80 (Conv2D) (None, 128, 128, 128 16512 activation_38[0][0]
__________________________________________________________________________________________________
conv2d_79 (Conv2D) (None, 128, 128, 128 147584 conv2d_78[0][0]
__________________________________________________________________________________________________
batch_normalization_57 (BatchNo (None, 128, 128, 128 512 conv2d_80[0][0]
__________________________________________________________________________________________________
batch_normalization_56 (BatchNo (None, 128, 128, 128 512 conv2d_79[0][0]
__________________________________________________________________________________________________
add_25 (Add) (None, 128, 128, 128 0 batch_normalization_57[0][0]
batch_normalization_56[0][0]
__________________________________________________________________________________________________
activation_39 (Activation) (None, 128, 128, 128 0 add_25[0][0]
__________________________________________________________________________________________________
conv2d_81 (Conv2D) (None, 128, 128, 128 147584 activation_39[0][0]
__________________________________________________________________________________________________
conv2d_83 (Conv2D) (None, 128, 128, 128 16512 activation_39[0][0]
__________________________________________________________________________________________________
conv2d_82 (Conv2D) (None, 128, 128, 128 147584 conv2d_81[0][0]
__________________________________________________________________________________________________
batch_normalization_59 (BatchNo (None, 128, 128, 128 512 conv2d_83[0][0]
__________________________________________________________________________________________________
batch_normalization_58 (BatchNo (None, 128, 128, 128 512 conv2d_82[0][0]
__________________________________________________________________________________________________
add_26 (Add) (None, 128, 128, 128 0 batch_normalization_59[0][0]
batch_normalization_58[0][0]
__________________________________________________________________________________________________
activation_40 (Activation) (None, 128, 128, 128 0 add_26[0][0]
__________________________________________________________________________________________________
max_pooling2d_7 (MaxPooling2D) (None, 64, 64, 128) 0 activation_40[0][0]
__________________________________________________________________________________________________
conv2d_84 (Conv2D) (None, 64, 64, 256) 295168 max_pooling2d_7[0][0]
__________________________________________________________________________________________________
conv2d_86 (Conv2D) (None, 64, 64, 256) 33024 max_pooling2d_7[0][0]
__________________________________________________________________________________________________
conv2d_85 (Conv2D) (None, 64, 64, 256) 590080 conv2d_84[0][0]
__________________________________________________________________________________________________
batch_normalization_61 (BatchNo (None, 64, 64, 256) 1024 conv2d_86[0][0]
__________________________________________________________________________________________________
batch_normalization_60 (BatchNo (None, 64, 64, 256) 1024 conv2d_85[0][0]
__________________________________________________________________________________________________
add_27 (Add) (None, 64, 64, 256) 0 batch_normalization_61[0][0]
batch_normalization_60[0][0]
__________________________________________________________________________________________________
activation_41 (Activation) (None, 64, 64, 256) 0 add_27[0][0]
__________________________________________________________________________________________________
conv2d_87 (Conv2D) (None, 64, 64, 256) 590080 activation_41[0][0]
__________________________________________________________________________________________________
conv2d_89 (Conv2D) (None, 64, 64, 256) 65792 activation_41[0][0]
__________________________________________________________________________________________________
conv2d_88 (Conv2D) (None, 64, 64, 256) 590080 conv2d_87[0][0]
__________________________________________________________________________________________________
batch_normalization_63 (BatchNo (None, 64, 64, 256) 1024 conv2d_89[0][0]
__________________________________________________________________________________________________
batch_normalization_62 (BatchNo (None, 64, 64, 256) 1024 conv2d_88[0][0]
__________________________________________________________________________________________________
add_28 (Add) (None, 64, 64, 256) 0 batch_normalization_63[0][0]
batch_normalization_62[0][0]
__________________________________________________________________________________________________
activation_42 (Activation) (None, 64, 64, 256) 0 add_28[0][0]
__________________________________________________________________________________________________
conv2d_90 (Conv2D) (None, 64, 64, 256) 590080 activation_42[0][0]
__________________________________________________________________________________________________
conv2d_92 (Conv2D) (None, 64, 64, 256) 65792 activation_42[0][0]
__________________________________________________________________________________________________
conv2d_91 (Conv2D) (None, 64, 64, 256) 590080 conv2d_90[0][0]
__________________________________________________________________________________________________
batch_normalization_65 (BatchNo (None, 64, 64, 256) 1024 conv2d_92[0][0]
__________________________________________________________________________________________________
batch_normalization_64 (BatchNo (None, 64, 64, 256) 1024 conv2d_91[0][0]
__________________________________________________________________________________________________
add_29 (Add) (None, 64, 64, 256) 0 batch_normalization_65[0][0]
batch_normalization_64[0][0]
__________________________________________________________________________________________________
activation_43 (Activation) (None, 64, 64, 256) 0 add_29[0][0]
__________________________________________________________________________________________________
conv2d_93 (Conv2D) (None, 64, 64, 256) 590080 activation_43[0][0]
__________________________________________________________________________________________________
conv2d_95 (Conv2D) (None, 64, 64, 256) 65792 activation_43[0][0]
__________________________________________________________________________________________________
conv2d_94 (Conv2D) (None, 64, 64, 256) 590080 conv2d_93[0][0]
__________________________________________________________________________________________________
batch_normalization_67 (BatchNo (None, 64, 64, 256) 1024 conv2d_95[0][0]
__________________________________________________________________________________________________
batch_normalization_66 (BatchNo (None, 64, 64, 256) 1024 conv2d_94[0][0]
__________________________________________________________________________________________________
add_30 (Add) (None, 64, 64, 256) 0 batch_normalization_67[0][0]
batch_normalization_66[0][0]
__________________________________________________________________________________________________
activation_44 (Activation) (None, 64, 64, 256) 0 add_30[0][0]
__________________________________________________________________________________________________
conv2d_96 (Conv2D) (None, 64, 64, 256) 590080 activation_44[0][0]
__________________________________________________________________________________________________
conv2d_98 (Conv2D) (None, 64, 64, 256) 65792 activation_44[0][0]
__________________________________________________________________________________________________
conv2d_97 (Conv2D) (None, 64, 64, 256) 590080 conv2d_96[0][0]
__________________________________________________________________________________________________
batch_normalization_69 (BatchNo (None, 64, 64, 256) 1024 conv2d_98[0][0]
__________________________________________________________________________________________________
batch_normalization_68 (BatchNo (None, 64, 64, 256) 1024 conv2d_97[0][0]
__________________________________________________________________________________________________
add_31 (Add) (None, 64, 64, 256) 0 batch_normalization_69[0][0]
batch_normalization_68[0][0]
__________________________________________________________________________________________________
activation_45 (Activation) (None, 64, 64, 256) 0 add_31[0][0]
__________________________________________________________________________________________________
conv2d_99 (Conv2D) (None, 64, 64, 256) 590080 activation_45[0][0]
__________________________________________________________________________________________________
conv2d_101 (Conv2D) (None, 64, 64, 256) 65792 activation_45[0][0]
__________________________________________________________________________________________________
conv2d_100 (Conv2D) (None, 64, 64, 256) 590080 conv2d_99[0][0]
__________________________________________________________________________________________________
batch_normalization_71 (BatchNo (None, 64, 64, 256) 1024 conv2d_101[0][0]
__________________________________________________________________________________________________
batch_normalization_70 (BatchNo (None, 64, 64, 256) 1024 conv2d_100[0][0]
__________________________________________________________________________________________________
add_32 (Add) (None, 64, 64, 256) 0 batch_normalization_71[0][0]
batch_normalization_70[0][0]
__________________________________________________________________________________________________
activation_46 (Activation) (None, 64, 64, 256) 0 add_32[0][0]
__________________________________________________________________________________________________
max_pooling2d_8 (MaxPooling2D) (None, 32, 32, 256) 0 activation_46[0][0]
__________________________________________________________________________________________________
conv2d_102 (Conv2D) (None, 32, 32, 512) 1180160 max_pooling2d_8[0][0]
__________________________________________________________________________________________________
conv2d_104 (Conv2D) (None, 32, 32, 512) 131584 max_pooling2d_8[0][0]
__________________________________________________________________________________________________
conv2d_103 (Conv2D) (None, 32, 32, 512) 2359808 conv2d_102[0][0]
__________________________________________________________________________________________________
batch_normalization_73 (BatchNo (None, 32, 32, 512) 2048 conv2d_104[0][0]
__________________________________________________________________________________________________
batch_normalization_72 (BatchNo (None, 32, 32, 512) 2048 conv2d_103[0][0]
__________________________________________________________________________________________________
add_33 (Add) (None, 32, 32, 512) 0 batch_normalization_73[0][0]
batch_normalization_72[0][0]
__________________________________________________________________________________________________
activation_47 (Activation) (None, 32, 32, 512) 0 add_33[0][0]
__________________________________________________________________________________________________
conv2d_105 (Conv2D) (None, 32, 32, 512) 2359808 activation_47[0][0]
__________________________________________________________________________________________________
conv2d_107 (Conv2D) (None, 32, 32, 512) 262656 activation_47[0][0]
__________________________________________________________________________________________________
conv2d_106 (Conv2D) (None, 32, 32, 512) 2359808 conv2d_105[0][0]
__________________________________________________________________________________________________
batch_normalization_75 (BatchNo (None, 32, 32, 512) 2048 conv2d_107[0][0]
__________________________________________________________________________________________________
batch_normalization_74 (BatchNo (None, 32, 32, 512) 2048 conv2d_106[0][0]
__________________________________________________________________________________________________
add_34 (Add) (None, 32, 32, 512) 0 batch_normalization_75[0][0]
batch_normalization_74[0][0]
__________________________________________________________________________________________________
activation_48 (Activation) (None, 32, 32, 512) 0 add_34[0][0]
__________________________________________________________________________________________________
conv2d_108 (Conv2D) (None, 32, 32, 512) 2359808 activation_48[0][0]
__________________________________________________________________________________________________
conv2d_110 (Conv2D) (None, 32, 32, 512) 262656 activation_48[0][0]
__________________________________________________________________________________________________
conv2d_109 (Conv2D) (None, 32, 32, 512) 2359808 conv2d_108[0][0]
__________________________________________________________________________________________________
batch_normalization_77 (BatchNo (None, 32, 32, 512) 2048 conv2d_110[0][0]
__________________________________________________________________________________________________
batch_normalization_76 (BatchNo (None, 32, 32, 512) 2048 conv2d_109[0][0]
__________________________________________________________________________________________________
add_35 (Add) (None, 32, 32, 512) 0 batch_normalization_77[0][0]
batch_normalization_76[0][0]
__________________________________________________________________________________________________
activation_49 (Activation) (None, 32, 32, 512) 0 add_35[0][0]
__________________________________________________________________________________________________
conv2d_111 (Conv2D) (None, 32, 32, 512) 2359808 activation_49[0][0]
__________________________________________________________________________________________________
activation_50 (Activation) (None, 32, 32, 512) 0 conv2d_111[0][0]
__________________________________________________________________________________________________
conv2d_112 (Conv2D) (None, 32, 32, 512) 2359808 activation_50[0][0]
__________________________________________________________________________________________________
activation_51 (Activation) (None, 32, 32, 512) 0 conv2d_112[0][0]
__________________________________________________________________________________________________
conv2d_113 (Conv2D) (None, 32, 32, 512) 2359808 activation_51[0][0]
__________________________________________________________________________________________________
activation_52 (Activation) (None, 32, 32, 512) 0 conv2d_113[0][0]
__________________________________________________________________________________________________
conv2d_114 (Conv2D) (None, 32, 32, 512) 2359808 activation_52[0][0]
__________________________________________________________________________________________________
activation_53 (Activation) (None, 32, 32, 512) 0 conv2d_114[0][0]
__________________________________________________________________________________________________
add_36 (Add) (None, 32, 32, 512) 0 activation_49[0][0]
activation_50[0][0]
activation_51[0][0]
activation_52[0][0]
activation_53[0][0]
__________________________________________________________________________________________________
conv2d_115 (Conv2D) (None, 32, 32, 256) 131328 add_36[0][0]
__________________________________________________________________________________________________
batch_normalization_78 (BatchNo (None, 32, 32, 256) 1024 conv2d_115[0][0]
__________________________________________________________________________________________________
activation_54 (Activation) (None, 32, 32, 256) 0 batch_normalization_78[0][0]
__________________________________________________________________________________________________
conv2d_transpose_5 (Conv2DTrans (None, 64, 64, 256) 590080 activation_54[0][0]
__________________________________________________________________________________________________
batch_normalization_79 (BatchNo (None, 64, 64, 256) 1024 conv2d_transpose_5[0][0]
__________________________________________________________________________________________________
activation_55 (Activation) (None, 64, 64, 256) 0 batch_normalization_79[0][0]
__________________________________________________________________________________________________
conv2d_116 (Conv2D) (None, 64, 64, 256) 65792 activation_55[0][0]
__________________________________________________________________________________________________
batch_normalization_80 (BatchNo (None, 64, 64, 256) 1024 conv2d_116[0][0]
__________________________________________________________________________________________________
activation_56 (Activation) (None, 64, 64, 256) 0 batch_normalization_80[0][0]
__________________________________________________________________________________________________
add_37 (Add) (None, 64, 64, 256) 0 activation_56[0][0]
activation_46[0][0]
__________________________________________________________________________________________________
conv2d_117 (Conv2D) (None, 64, 64, 128) 32896 add_37[0][0]
__________________________________________________________________________________________________
batch_normalization_81 (BatchNo (None, 64, 64, 128) 512 conv2d_117[0][0]
__________________________________________________________________________________________________
activation_57 (Activation) (None, 64, 64, 128) 0 batch_normalization_81[0][0]
__________________________________________________________________________________________________
conv2d_transpose_6 (Conv2DTrans (None, 128, 128, 128 147584 activation_57[0][0]
__________________________________________________________________________________________________
batch_normalization_82 (BatchNo (None, 128, 128, 128 512 conv2d_transpose_6[0][0]
__________________________________________________________________________________________________
activation_58 (Activation) (None, 128, 128, 128 0 batch_normalization_82[0][0]
__________________________________________________________________________________________________
conv2d_118 (Conv2D) (None, 128, 128, 128 16512 activation_58[0][0]
__________________________________________________________________________________________________
batch_normalization_83 (BatchNo (None, 128, 128, 128 512 conv2d_118[0][0]
__________________________________________________________________________________________________
activation_59 (Activation) (None, 128, 128, 128 0 batch_normalization_83[0][0]
__________________________________________________________________________________________________
add_38 (Add) (None, 128, 128, 128 0 activation_59[0][0]
activation_40[0][0]
__________________________________________________________________________________________________
conv2d_119 (Conv2D) (None, 128, 128, 64) 8256 add_38[0][0]
__________________________________________________________________________________________________
batch_normalization_84 (BatchNo (None, 128, 128, 64) 256 conv2d_119[0][0]
__________________________________________________________________________________________________
activation_60 (Activation) (None, 128, 128, 64) 0 batch_normalization_84[0][0]
__________________________________________________________________________________________________
conv2d_transpose_7 (Conv2DTrans (None, 256, 256, 64) 36928 activation_60[0][0]
__________________________________________________________________________________________________
batch_normalization_85 (BatchNo (None, 256, 256, 64) 256 conv2d_transpose_7[0][0]
__________________________________________________________________________________________________
activation_61 (Activation) (None, 256, 256, 64) 0 batch_normalization_85[0][0]
__________________________________________________________________________________________________
conv2d_120 (Conv2D) (None, 256, 256, 64) 4160 activation_61[0][0]
__________________________________________________________________________________________________
batch_normalization_86 (BatchNo (None, 256, 256, 64) 256 conv2d_120[0][0]
__________________________________________________________________________________________________
activation_62 (Activation) (None, 256, 256, 64) 0 batch_normalization_86[0][0]
__________________________________________________________________________________________________
add_39 (Add) (None, 256, 256, 64) 0 activation_62[0][0]
activation_36[0][0]
__________________________________________________________________________________________________
conv2d_121 (Conv2D) (None, 256, 256, 64) 4160 add_39[0][0]
__________________________________________________________________________________________________
batch_normalization_87 (BatchNo (None, 256, 256, 64) 256 conv2d_121[0][0]
__________________________________________________________________________________________________
activation_63 (Activation) (None, 256, 256, 64) 0 batch_normalization_87[0][0]
__________________________________________________________________________________________________
conv2d_transpose_8 (Conv2DTrans (None, 512, 512, 64) 36928 activation_63[0][0]
__________________________________________________________________________________________________
batch_normalization_88 (BatchNo (None, 512, 512, 64) 256 conv2d_transpose_8[0][0]
__________________________________________________________________________________________________
activation_64 (Activation) (None, 512, 512, 64) 0 batch_normalization_88[0][0]
__________________________________________________________________________________________________
conv2d_122 (Conv2D) (None, 512, 512, 64) 4160 activation_64[0][0]
__________________________________________________________________________________________________
batch_normalization_89 (BatchNo (None, 512, 512, 64) 256 conv2d_122[0][0]
__________________________________________________________________________________________________
activation_65 (Activation) (None, 512, 512, 64) 0 batch_normalization_89[0][0]
__________________________________________________________________________________________________
conv2d_transpose_9 (Conv2DTrans (None, 512, 512, 32) 18464 activation_65[0][0]
__________________________________________________________________________________________________
conv2d_123 (Conv2D) (None, 512, 512, 1) 33 conv2d_transpose_9[0][0]
==================================================================================================
Total params: 32,764,801
Trainable params: 32,746,497
Non-trainable params: 18,304
__________________________________________________________________________________________________
结构图:

浙公网安备 33010602011771号