|
6 | 6 | import configparser |
7 | 7 | from keras.layers import Input |
8 | 8 | from keras import backend as K |
| 9 | +from keras.optimizers import Adam |
9 | 10 | from keras.utils import plot_model |
10 | 11 | from tools.data_tools import DataSequence |
| 12 | +from tools.loss_metrics_tools import mean_iou |
11 | 13 | from tools.callbacks import PredictionsCallback |
12 | | -from keras.optimizers import SGD, RMSprop, Adam |
13 | 14 | from tools.tiramisu_model import get_tiramisu_model |
14 | | -from tools.loss_metrics_tools import mean_iou, focal_loss |
15 | 15 | from tools.plotting_tools import plot_history, plot_feature_label_prediction |
16 | 16 | from keras.callbacks import EarlyStopping, ReduceLROnPlateau, ModelCheckpoint |
17 | 17 |
|
@@ -123,25 +123,11 @@ def main(): |
123 | 123 | except: |
124 | 124 | print("Old weights couldn't be loaded successfully, will continue!") |
125 | 125 |
|
126 | | - learning_rate = 1.0e-6; |
127 | | - decay_rate = learning_rate/NUM_EPOCHS |
128 | | - print("Decay rate is set to {}.".format(decay_rate)) |
129 | | - |
130 | | - test = 1 |
131 | | - if test == 1: |
132 | | - model.compile(optimizer=SGD(lr=learning_rate), loss='categorical_crossentropy', metrics=['accuracy', mean_iou]) |
133 | | - elif test == 2: |
134 | | - model.compile(optimizer=SGD(lr=learning_rate, decay=decay_rate), loss='categorical_crossentropy', metrics=['accuracy', mean_iou]) |
135 | | - elif test == 3: |
136 | | - model.compile(optimizer=RMSprop(lr=learning_rate), loss='categorical_crossentropy', metrics=['accuracy', mean_iou]) |
137 | | - elif test == 4: |
138 | | - model.compile(optimizer=RMSprop(lr=learning_rate), loss=focal_loss(), metrics=['accuracy', mean_iou]) |
139 | | - elif test == 5: |
140 | | - model.compile(optimizer=Adam(lr=learning_rate), loss='categorical_crossentropy', metrics=['accuracy', mean_iou]) |
141 | | - else: |
142 | | - print("\nError: Test is not in the range.") |
143 | | - print("Exiting!\n") |
144 | | - sys.exit(1) |
| 126 | + learning_rate = 1.0e-8; |
| 127 | + #decay_rate = learning_rate/NUM_EPOCHS |
| 128 | + #print("Decay rate is set to {}.".format(decay_rate)) |
| 129 | + |
| 130 | + model.compile(optimizer=Adam(lr=learning_rate), loss='categorical_crossentropy', metrics=['accuracy', mean_iou]) |
145 | 131 |
|
146 | 132 | # Print model summary |
147 | 133 | #model.summary() |
|
0 commit comments