-
Notifications
You must be signed in to change notification settings - Fork 54
/
layers.py
executable file
·131 lines (109 loc) · 4.47 KB
/
layers.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
import tensorflow as tf
import tensorflow.keras.layers as layers
from tensorflow.keras import Sequential
def create_vgg16_layers():
vgg16_conv4 = [
layers.Conv2D(64, 3, padding='same', activation='relu'),
layers.Conv2D(64, 3, padding='same', activation='relu'),
layers.MaxPool2D(2, 2, padding='same'),
layers.Conv2D(128, 3, padding='same', activation='relu'),
layers.Conv2D(128, 3, padding='same', activation='relu'),
layers.MaxPool2D(2, 2, padding='same'),
layers.Conv2D(256, 3, padding='same', activation='relu'),
layers.Conv2D(256, 3, padding='same', activation='relu'),
layers.Conv2D(256, 3, padding='same', activation='relu'),
layers.MaxPool2D(2, 2, padding='same'),
layers.Conv2D(512, 3, padding='same', activation='relu'),
layers.Conv2D(512, 3, padding='same', activation='relu'),
layers.Conv2D(512, 3, padding='same', activation='relu'),
layers.MaxPool2D(2, 2, padding='same'),
layers.Conv2D(512, 3, padding='same', activation='relu'),
layers.Conv2D(512, 3, padding='same', activation='relu'),
layers.Conv2D(512, 3, padding='same', activation='relu'),
]
x = layers.Input(shape=[None, None, 3])
out = x
for layer in vgg16_conv4:
out = layer(out)
vgg16_conv4 = tf.keras.Model(x, out)
vgg16_conv7 = [
# Difference from original VGG16:
# 5th maxpool layer has kernel size = 3 and stride = 1
layers.MaxPool2D(3, 1, padding='same'),
# atrous conv2d for 6th block
layers.Conv2D(1024, 3, padding='same',
dilation_rate=6, activation='relu'),
layers.Conv2D(1024, 1, padding='same', activation='relu'),
]
x = layers.Input(shape=[None, None, 512])
out = x
for layer in vgg16_conv7:
out = layer(out)
vgg16_conv7 = tf.keras.Model(x, out)
return vgg16_conv4, vgg16_conv7
def create_extra_layers():
""" Create extra layers
8th to 11th blocks
"""
extra_layers = [
# 8th block output shape: B, 512, 10, 10
Sequential([
layers.Conv2D(256, 1, activation='relu'),
layers.Conv2D(512, 3, strides=2, padding='same',
activation='relu'),
]),
# 9th block output shape: B, 256, 5, 5
Sequential([
layers.Conv2D(128, 1, activation='relu'),
layers.Conv2D(256, 3, strides=2, padding='same',
activation='relu'),
]),
# 10th block output shape: B, 256, 3, 3
Sequential([
layers.Conv2D(128, 1, activation='relu'),
layers.Conv2D(256, 3, activation='relu'),
]),
# 11th block output shape: B, 256, 1, 1
Sequential([
layers.Conv2D(128, 1, activation='relu'),
layers.Conv2D(256, 3, activation='relu'),
]),
# 12th block output shape: B, 256, 1, 1
Sequential([
layers.Conv2D(128, 1, activation='relu'),
layers.Conv2D(256, 4, activation='relu'),
])
]
return extra_layers
def create_conf_head_layers(num_classes):
""" Create layers for classification
"""
conf_head_layers = [
layers.Conv2D(4 * num_classes, kernel_size=3,
padding='same'), # for 4th block
layers.Conv2D(6 * num_classes, kernel_size=3,
padding='same'), # for 7th block
layers.Conv2D(6 * num_classes, kernel_size=3,
padding='same'), # for 8th block
layers.Conv2D(6 * num_classes, kernel_size=3,
padding='same'), # for 9th block
layers.Conv2D(4 * num_classes, kernel_size=3,
padding='same'), # for 10th block
layers.Conv2D(4 * num_classes, kernel_size=3,
padding='same'), # for 11th block
layers.Conv2D(4 * num_classes, kernel_size=1) # for 12th block
]
return conf_head_layers
def create_loc_head_layers():
""" Create layers for regression
"""
loc_head_layers = [
layers.Conv2D(4 * 4, kernel_size=3, padding='same'),
layers.Conv2D(6 * 4, kernel_size=3, padding='same'),
layers.Conv2D(6 * 4, kernel_size=3, padding='same'),
layers.Conv2D(6 * 4, kernel_size=3, padding='same'),
layers.Conv2D(4 * 4, kernel_size=3, padding='same'),
layers.Conv2D(4 * 4, kernel_size=3, padding='same'),
layers.Conv2D(4 * 4, kernel_size=1)
]
return loc_head_layers