U-Netの記述についての疑問

python

class Encoder(Model): def __init__(self, config): super().__init__() # Network self.block1_conv1 = tf.keras.layers.Conv2D(64, (3, 3) , name='block1_conv1', activation = 'relu', padding = 'same') self.block1_conv2 = tf.keras.layers.Conv2D(64, (3, 3) , name='block1_conv2', padding = 'same') self.block1_bn = tf.keras.layers.BatchNormalization() self.block1_act = tf.keras.layers.ReLU() self.block1_pool = tf.keras.layers.MaxPooling2D((2, 2), strides=None, name='block1_pool') self.block2_conv1 = tf.keras.layers.Conv2D(128, (3, 3) , name='block2_conv1', activation = 'relu', padding = 'same') self.block2_conv2 = tf.keras.layers.Conv2D(128, (3, 3) , name='block2_conv2', padding = 'same') self.block2_bn = tf.keras.layers.BatchNormalization() self.block2_act = tf.keras.layers.ReLU() self.block2_pool = tf.keras.layers.MaxPooling2D((2, 2), strides=None, name='block2_pool') self.block3_conv1 = tf.keras.layers.Conv2D(256, (3, 3) , name='block3_conv1', activation = 'relu', padding = 'same') self.block3_conv2 = tf.keras.layers.Conv2D(256, (3, 3) , name='block3_conv2', padding = 'same') self.block3_bn = tf.keras.layers.BatchNormalization() self.block3_act = tf.keras.layers.ReLU() self.block3_pool = tf.keras.layers.MaxPooling2D((2, 2), strides=None, name='block3_pool') self.block4_conv1 = tf.keras.layers.Conv2D(512, (3, 3) , name='block4_conv1', activation = 'relu', padding = 'same') self.block4_conv2 = tf.keras.layers.Conv2D(512, (3, 3) , name='block4_conv2', padding = 'same') self.block4_bn = tf.keras.layers.BatchNormalization() self.block4_act = tf.keras.layers.ReLU() self.block4_dropout = tf.keras.layers.Dropout(0.5) self.block4_pool = tf.keras.layers.MaxPooling2D((2, 2), strides=None, name='block4_pool') self.block5_conv1 = tf.keras.layers.Conv2D(1024, (3, 3) , name='block5_conv1', activation = 'relu', padding = 'same') self.block5_conv2 = tf.keras.layers.Conv2D(1024, (3, 3) , name='block5_conv2', padding = 'same') self.block5_bn = tf.keras.layers.BatchNormalization() self.block5_act = tf.keras.layers.ReLU() self.block5_dropout = tf.keras.layers.Dropout(0.5) def call(self, x): z1 = self.block1_conv1(x) z1 = self.block1_conv2(z1) z1 = self.block1_bn(z1) z1 = self.block1_act(z1) z1_pool = self.block1_pool(z1) z2 = self.block2_conv1(z1_pool) z2 = self.block2_conv2(z2) z2 = self.block2_bn(z2) z2 = self.block2_act(z2) z2_pool = self.block2_pool(z2) z3 = self.block3_conv1(z2_pool) z3 = self.block3_conv2(z3) z3 = self.block3_bn(z3) z3 = self.block3_act(z3) z3_pool = self.block3_pool(z3) z4 = self.block4_conv1(z3_pool) z4 = self.block4_conv2(z4) z4 = self.block4_bn(z4) z4 = self.block4_act(z4) z4_dropout = self.block4_dropout(z4) z4_pool = self.block4_pool(z4_dropout) z5 = self.block5_conv1(z4_pool) z5 = self.block5_conv2(z5) z5 = self.block5_bn(z5) z5 = self.block5_act(z5) z5_dropout = self.block5_dropout(z5) return z1, z2, z3, z4_dropout, z5_dropout

コメントを投稿

0 コメント