Commit 48a9859e authored by Gabriel Kirsten's avatar Gabriel Kirsten
Browse files

refatoring
parent ba271307
......@@ -37,8 +37,6 @@ from util.file_utils import File
from util.utils import TimeUtils
logger = logging.getLogger('PIL')
logger.setLevel(logging.WARNING)
......@@ -51,6 +49,7 @@ START_TIME = time.time()
IMG_WIDTH, IMG_HEIGHT = 256, 256
weight_path = None
class CNNKeras(Classifier):
""" Class for CNN classifiers based on Keras applications """
......@@ -118,17 +117,26 @@ class CNNKeras(Classifier):
configs : OrderedDict
New configs of classifier.
"""
self.architecture = Config.nvl_config(configs["Architecture"], self.architecture)
self.learning_rate = Config.nvl_config(configs["Learning rate"], self.learning_rate)
self.architecture = Config.nvl_config(
configs["Architecture"], self.architecture)
self.learning_rate = Config.nvl_config(
configs["Learning rate"], self.learning_rate)
self.momentum = Config.nvl_config(configs["Momentum"], self.momentum)
self.batch_size = Config.nvl_config(configs["Batch size"], self.batch_size)
self.batch_size = Config.nvl_config(
configs["Batch size"], self.batch_size)
self.epochs = Config.nvl_config(configs["Epochs"], self.epochs)
self.fine_tuning_rate = Config.nvl_config(configs["Fine Tuning rate"], self.fine_tuning_rate)
self.transfer_learning = Config.nvl_config(configs["Transfer Learning"], self.transfer_learning)
self.save_weights = Config.nvl_config(configs["Save weights"], self.save_weights)
self.perc_train = Config.nvl_config(configs["Perc Train"], self.perc_train)
self.perc_validation = Config.nvl_config(configs["Perc Validation"], self.perc_validation)
self.recreate_dataset = Config.nvl_config(configs["Recreate Dataset"], self.recreate_dataset)
self.fine_tuning_rate = Config.nvl_config(
configs["Fine Tuning rate"], self.fine_tuning_rate)
self.transfer_learning = Config.nvl_config(
configs["Transfer Learning"], self.transfer_learning)
self.save_weights = Config.nvl_config(
configs["Save weights"], self.save_weights)
self.perc_train = Config.nvl_config(
configs["Perc Train"], self.perc_train)
self.perc_validation = Config.nvl_config(
configs["Perc Validation"], self.perc_validation)
self.recreate_dataset = Config.nvl_config(
configs["Recreate Dataset"], self.recreate_dataset)
def get_summary_config(self):
"""Return fomatted summary of configuration.
......@@ -184,15 +192,17 @@ class CNNKeras(Classifier):
for file in os.listdir(predict_directory):
print(File.make_path(predict_directory, file))
if os.path.splitext(file)[-1] == ".tif":
try:
img = Image.open(File.make_path(predict_directory, file))
#img.thumbnail(img.size)
new_file = os.path.splitext(file)[0]+".png"
img.save(File.make_path(predict_directory, 'png', new_file), "PNG", quality=100)
except Exception, e:
print e
try:
img = Image.open(File.make_path(predict_directory, file))
# img.thumbnail(img.size)
new_file = os.path.splitext(file)[0] + ".png"
img.save(File.make_path(predict_directory,
'png', new_file), "PNG", quality=100)
except Exception, e:
print e
else:
os.symlink(File.make_path(predict_directory, file), File.make_path(predict_directory, 'png', file))
os.symlink(File.make_path(predict_directory, file),
File.make_path(predict_directory, 'png', file))
classify_datagen = ImageDataGenerator()
......@@ -204,12 +214,13 @@ class CNNKeras(Classifier):
class_mode=None)
try:
#self.model.load_weights(
#"../models_checkpoints/" + self.file_name + ".h5")
# self.model.load_weights(
#"../models_checkpoints/" + self.file_name + ".h5")
K.clear_session()
if self.weight_path is not None:
self.model = load_model(self.weight_path)
path_classes = self.weight_path.replace("_model.h5", "_classes.npy")
path_classes = self.weight_path.replace(
"_model.h5", "_classes.npy")
CLASS_NAMES = np.load(path_classes).item().keys()
except Exception, e:
raise IException("Can't load the model in " +
......@@ -256,24 +267,25 @@ class CNNKeras(Classifier):
File.remove_dir(File.make_path(dataset, ".tmp"))
train_generator, validation_generator, test_generator = self.make_dataset(dataset)
train_generator, validation_generator, test_generator = self.make_dataset(
dataset)
# Save the model according to the conditions
if self.save_weights:
if not os.path.exists("../models_checkpoints/"):
os.makedirs("../models_checkpoints/")
checkpoint = ModelCheckpoint("../models_checkpoints/" + self.file_name + ".h5", monitor='val_acc',
verbose=1, save_best_only=True, save_weights_only=False,
mode='auto', period=1)
verbose=1, save_best_only=True, save_weights_only=False,
mode='auto', period=1)
else:
checkpoint = None
self.model = self.select_model_params(train_generator.num_classes)
tensorboard = TensorBoard(log_dir="../models_checkpoints/logs_" + self.file_name, write_images=False)
#tensorboard.set_model(self.model)
tensorboard = TensorBoard(
log_dir="../models_checkpoints/logs_" + self.file_name, write_images=False)
# tensorboard.set_model(self.model)
# compile the model
self.model.compile(loss="categorical_crossentropy",
optimizer=optimizers.SGD(
......@@ -290,15 +302,15 @@ class CNNKeras(Classifier):
validation_steps=validation_generator.samples // self.batch_size.value)
if self.save_weights:
#self.model.save_weights(
# self.model.save_weights(
# "../models_checkpoints/" + self.file_name + ".h5")
self.model.save(
"../models_checkpoints/" + self.file_name + "_model.h5")
self.weight_path = "../models_checkpoints/" + self.file_name + "_model.h5"
dict_classes = validation_generator.class_indices
np.save("../models_checkpoints/" + self.file_name + "_classes.npy", dict_classes)
np.save("../models_checkpoints/" + self.file_name +
"_classes.npy", dict_classes)
def must_train(self):
"""Return if classifier must be trained.
......@@ -389,7 +401,6 @@ class CNNKeras(Classifier):
PERC_TRAIN = self.perc_train.value
PERC_VALIDATION = self.perc_validation.value
# create keras dir dataset
if not os.path.exists(File.make_path(dataset, KERAS_DATASET_DIR_NAME)) or self.recreate_dataset.value:
if os.path.exists(File.make_path(dataset, KERAS_DATASET_DIR_NAME)):
......@@ -426,66 +437,75 @@ class CNNKeras(Classifier):
quant_validation = int((quant_files / 100.0) * PERC_VALIDATION)
files_train = files[0:quant_train]
files_validation = files[quant_train:quant_train+quant_validation]
files_test = files[quant_train+quant_validation:quant_files]
print("Processing class %s - %d itens - %d train items - %d validation items" % (dir_class, quant_files, quant_train, quant_validation))
files_validation = files[quant_train:quant_train +
quant_validation]
files_test = files[quant_train + quant_validation:quant_files]
print("Processing class %s - %d itens - %d train items - %d validation items" %
(dir_class, quant_files, quant_train, quant_validation))
for file in files_train:
dir_class_train = File.make_path(dataset, KERAS_DATASET_DIR_NAME, KERAS_DIR_TRAIN_NAME, dir_class)
dir_class_train = File.make_path(
dataset, KERAS_DATASET_DIR_NAME, KERAS_DIR_TRAIN_NAME, dir_class)
if not os.path.exists(dir_class_train):
os.makedirs(dir_class_train)
if os.path.splitext(file)[-1] == ".tif":
img = Image.open(File.make_path(root, file))
#img.thumbnail(img.size)
new_file = os.path.splitext(file)[0]+".png"
img.save(File.make_path(dir_class_train, new_file), "PNG", quality=100)
# img.thumbnail(img.size)
new_file = os.path.splitext(file)[0] + ".png"
img.save(File.make_path(dir_class_train,
new_file), "PNG", quality=100)
else:
os.symlink(File.make_path(root, file), File.make_path(dir_class_train, file))
os.symlink(File.make_path(root, file),
File.make_path(dir_class_train, file))
for file in files_validation:
dir_class_validation = File.make_path(dataset, KERAS_DATASET_DIR_NAME, KERAS_DIR_VALIDATION_NAME, dir_class)
dir_class_validation = File.make_path(
dataset, KERAS_DATASET_DIR_NAME, KERAS_DIR_VALIDATION_NAME, dir_class)
if not os.path.exists(dir_class_validation):
os.makedirs(dir_class_validation)
os.makedirs(dir_class_validation)
if os.path.splitext(file)[-1] == ".tif":
img = Image.open(File.make_path(root, file))
#img.thumbnail(img.size)
new_file = os.path.splitext(file)[0]+".png"
img.save(File.make_path(dir_class_validation, new_file), "PNG", quality=100)
# img.thumbnail(img.size)
new_file = os.path.splitext(file)[0] + ".png"
img.save(File.make_path(dir_class_validation,
new_file), "PNG", quality=100)
else:
os.symlink(File.make_path(root, file), File.make_path(dir_class_validation, file))
os.symlink(File.make_path(root, file),
File.make_path(dir_class_validation, file))
for file in files_test:
dir_class_test = File.make_path(dataset, KERAS_DATASET_DIR_NAME, KERAS_DIR_TEST_NAME, dir_class)
dir_class_test = File.make_path(
dataset, KERAS_DATASET_DIR_NAME, KERAS_DIR_TEST_NAME, dir_class)
if not os.path.exists(dir_class_test):
os.makedirs(dir_class_test)
os.makedirs(dir_class_test)
if os.path.splitext(file)[-1] == ".tif":
img = Image.open(File.make_path(root, file))
#img.thumbnail(img.size)
new_file = os.path.splitext(file)[0]+".png"
img.save(File.make_path(dir_class_test, new_file), "PNG", quality=100)
# img.thumbnail(img.size)
new_file = os.path.splitext(file)[0] + ".png"
img.save(File.make_path(dir_class_test,
new_file), "PNG", quality=100)
else:
os.symlink(File.make_path(root, file), File.make_path(dir_class_test, file))
os.symlink(File.make_path(root, file),
File.make_path(dir_class_test, file))
train_datagen = ImageDataGenerator()
train_generator = train_datagen.flow_from_directory(
File.make_path(dataset, KERAS_DATASET_DIR_NAME, KERAS_DIR_TRAIN_NAME),
File.make_path(dataset, KERAS_DATASET_DIR_NAME,
KERAS_DIR_TRAIN_NAME),
target_size=(IMG_HEIGHT, IMG_WIDTH),
batch_size=self.batch_size.value,
shuffle=True,
class_mode="categorical")
validation_datagen = ImageDataGenerator()
validation_generator = validation_datagen.flow_from_directory(
File.make_path(dataset, KERAS_DATASET_DIR_NAME, KERAS_DIR_VALIDATION_NAME),
File.make_path(dataset, KERAS_DATASET_DIR_NAME,
KERAS_DIR_VALIDATION_NAME),
target_size=(IMG_HEIGHT, IMG_WIDTH),
batch_size=self.batch_size.value,
shuffle=True,
......@@ -494,13 +514,11 @@ class CNNKeras(Classifier):
test_datagen = ImageDataGenerator()
test_generator = test_datagen.flow_from_directory(
File.make_path(dataset, KERAS_DATASET_DIR_NAME, KERAS_DIR_TEST_NAME),
File.make_path(dataset, KERAS_DATASET_DIR_NAME,
KERAS_DIR_TEST_NAME),
target_size=(IMG_HEIGHT, IMG_WIDTH),
batch_size=self.batch_size.value,
shuffle=True,
class_mode="categorical")
return train_generator, validation_generator, test_generator
\ No newline at end of file
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment