# Final Experiment Note
## Single-label
### Original
```
No aug Aug
CNN 63.72% 72.26%
AlexNet 70.46% 82.51%
VGG16 89.06% 90.44%
ResNet50 81.35% 93.02%
ResNet101 82.05% 93.48%
ResNet152 82.05% 93.93%
DenseNet121 85.11% 94.17%
DenseNet169 83.25% 94.40%
DenseNet201 84.20% 94.87%
```
### Segmentation
```
No aug Aug
CNN 69.54% 80.23%
AlexNet 82.79% 89.54%
VGG16 89.06% 92.79%
ResNet50 95.34% 96.04%
ResNet101 95.51% 96.27%
ResNet152 95.58% 96.74%
DenseNet121 96.04% 96.97%
DenseNet169 96.51% 97.67%
DenseNet201 96.97% 98.13%
```
---
## Multi-label
```
DenseNet_ml1 -> Multiple branch based on DenseNet
DenseNet_ml2 -> 4 dense blocks + fc
DenseNet_ml3 -> 4 dense blocks + cnn
DenseNet_ml4 -> 3 dense blocks + fc
DenseNet_ml5 -> 3 dense blocks + cnn
```
### PlantVillage
```
Precision Accuracy
DenseNet_ml1 99.93 97.50
DenseNet_ml2 99.61 99.49
DenseNet_ml3 100 100
DenseNet_ml4 99.48 99.49
DenseNet_ml5 99.87 98.43
```
### Pomelo original
```
Precision aug 5
DenseNet_ml1 91.51% 94.19% 92.46%
DenseNet_ml2 89.92% 94.71% 92.38%
DenseNet_ml3 91.43% 93.82% 92.05%
DenseNet_ml4 93.61% 94.85% 94.26%
DenseNet_ml5 92.28% 93.72% 93.01%
```
### Segmentation
```
Precision
DenseNet_ml1 92.20%
DenseNet_ml2 93.95%
DenseNet_ml3 92.26%
DenseNet_ml4 94.18%
DenseNet_ml5 93.02%
```
---
# Chapter 3 Method
```
3.1 Multi-label Learning
3.2 Multi-task Learning
3.3 ML-DenseNet
3.3.1 DenseNet
3.3.2 Multiple Branches
3.4 Evaluation Metrics
3.5 Dataset
3.5.1 PlantVillage
3.5.2 Our Pomelo
- Original data
- Segmentataion data
- Augmentation method
```
# Chapter 4 Experiment
```
4.1 Experimental Setup
- System
- Hyper-parameter
4.2 Single-label
- Prepared models (CNN, AlexNet, VGG, ResNet, DenseNet)
- Experiment 1: PlantVillage
- Show the result of PlantVillage and compared with current paper.
- Experiment 2: Our Pomelo
- 1. Original experiment (compared with aug)
- 2. Segmentation experiment (compared with aug)
4.3 Multi-label
- Experiment 1: PlantVillage
- The result of PlantVillage
- Experiment 2: Our Pomelo
- 1. Original experiment (compared with aug)
- 2. Segmentation experiment (compared with aug)
```
```python=
def densenet(img_shape, n_classes, finalAct='softmax', f = 32):
repetitions = 6,12,24,16
def bn_rl_conv(x, f, k=1, s=1, p='same'):
x = BatchNormalization(epsilon=1.001e-5)(x)
x = ReLU()(x)
x = Conv2D(f, k, strides=s, padding=p)(x)
return x
def dense_block(tensor, r):
for _ in range(r):
x = bn_rl_conv(tensor, 4*f)
x = bn_rl_conv(x, f, 3)
tensor = Concatenate()([tensor, x])
return tensor
def transition_block(x):
x = bn_rl_conv(x, K.int_shape(x)[-1] // 2)
#x = Dropout(0.5)(x)
x = AvgPool2D(2, strides=2, padding='same')(x)
return x
input = Input(img_shape)
x = Conv2D(64, 7, strides=2, padding='same')(input)
x = BatchNormalization(epsilon=1.001e-5)(x)
x = ReLU()(x)
x = MaxPool2D(3, strides=2, padding='same')(x)
for r in repetitions:
d = dense_block(x, r)
x = transition_block(d)
x = GlobalAvgPool2D()(d)
output = Dense(n_classes, activation=finalAct)(x)
model = Model(input, output)
return model
```
```python=
def densenet_ml4(img_shape, n_classes, finalAct='softmax', f = 32):
repetitions = 6,12,24#,16
r2 = 10
def bn_rl_conv(x, f, k=1, s=1, p='same'):
x = BatchNormalization(epsilon=1.001e-5)(x)
x = ReLU()(x)
x = Conv2D(f, k, strides=s, padding=p)(x)
return x
def dense_block(tensor, r):
for _ in range(r):
x = bn_rl_conv(tensor, 4*f)
x = bn_rl_conv(x, f, 3)
tensor = Concatenate()([tensor, x])
return tensor
def transition_block(x):
x = bn_rl_conv(x, K.int_shape(x)[-1] // 2)
#x = Dropout(0.3)(x)
x = AvgPool2D(2, strides=2, padding='same')(x)
return x
input = Input(img_shape)
x = Conv2D(64, 7, strides=2, padding='same')(input)
x = BatchNormalization(epsilon=1.001e-5)(x)
x = ReLU()(x)
x = MaxPool2D(3, strides=2, padding='same')(x)
for r in repetitions:
d = dense_block(x, r)
x = transition_block(d)
x = GlobalAvgPool2D()(d)
"""
outputs = []
for i in range(n_classes):
print("class ", i)
d = dense_block(x, r2)
branch = transition_block(d)
branch = GlobalAvgPool2D()(d)
output = Dense(1, activation=finalAct)(branch)
outputs.append(output)
"""
outputs = []
for i in range(n_classes):
print("\rclass ", i, end="")
x = Dense(1024, activation='relu')(x)
x = Dense(1024, activation='relu')(x)
#x = Dropout(0.3)(x)
x = Dense(512, activation='relu')(x)
output = Dense(1, activation=finalAct)(x)
outputs.append(output)
outputs = Concatenate()(outputs)
#output = Dense(n_classes, activation=finalAct)(x)
model = Model(input, outputs)
return model
```
```python=
def densenet_ml5(img_shape, n_classes, finalAct='softmax', f = 32):
repetitions = 6,12,24#,16
r2 = 10
def bn_rl_conv(x, f, k=1, s=1, p='same'):
x = BatchNormalization(epsilon=1.001e-5)(x)
x = ReLU()(x)
x = Conv2D(f, k, strides=s, padding=p)(x)
return x
def dense_block(tensor, r):
for _ in range(r):
x = bn_rl_conv(tensor, 4*f)
x = bn_rl_conv(x, f, 3)
tensor = Concatenate()([tensor, x])
return tensor
def transition_block(x):
x = bn_rl_conv(x, K.int_shape(x)[-1] // 2)
#x = Dropout(0.3)(x)
x = AvgPool2D(2, strides=2, padding='same')(x)
return x
input = Input(img_shape)
x = Conv2D(64, 7, strides=2, padding='same')(input)
x = BatchNormalization(epsilon=1.001e-5)(x)
x = ReLU()(x)
x = MaxPool2D(3, strides=2, padding='same')(x)
for r in repetitions:
d = dense_block(x, r)
x = transition_block(d)
#x = GlobalAvgPool2D()(d)
"""
outputs = []
for i in range(n_classes):
print("class ", i)
d = dense_block(x, r2)
branch = transition_block(d)
branch = GlobalAvgPool2D()(d)
output = Dense(1, activation=finalAct)(branch)
outputs.append(output)
"""
outputs = []
for i in range(n_classes):
### Branches
d = bn_rl_conv(x, f*4)
d = bn_rl_conv(d, f, 3)
d = bn_rl_conv(d, f*4)
d = bn_rl_conv(d, f, 3)
b = transition_block(d)
b = GlobalAvgPool2D()(b)
output = Dense(1, activation=finalAct)(b)
outputs.append(output)
outputs = Concatenate()(outputs)
#output = Dense(n_classes, activation=finalAct)(x)
model = Model(input, outputs)
return model
```