Skip to content

Commit

Permalink
add modulization
Browse files Browse the repository at this point in the history
  • Loading branch information
RocketFlash committed Aug 29, 2019
1 parent 7da3f9c commit 0689e76
Show file tree
Hide file tree
Showing 7 changed files with 36 additions and 28 deletions.
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -103,6 +103,8 @@ venv.bak/
# mypy
.mypy_cache/

# vscode
.vscode/

*DS_Store*
tf_log/
Expand Down
Empty file added __init__.py
Empty file.
2 changes: 1 addition & 1 deletion configs/road_signs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ project_name : 'road_signs/'
freeze_backbone : True

#paths
dataset_path : '/home/rauf/plates_competition/dataset/road_signs/road_signs_separated/'
dataset_path : '/home/rauf/dataset/road_signs/road_signs_separated/'
tensorboard_log_path : 'tf_log/'
weights_save_path : 'weights/'
plots_path : 'plots/'
Expand Down
24 changes: 13 additions & 11 deletions model.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,15 +7,13 @@
import random
from keras.models import Model, load_model
from keras import optimizers
from keras.utils import plot_model
from keras.layers import Dense, Input, Lambda, Dropout, Flatten
from keras.layers import Conv2D, MaxPool2D, BatchNormalization, concatenate
from classification_models import Classifiers
import utils
import pickle
import losses_and_accuracies as lac
from utils import parse_net_params
from backbones import get_backbone
from . import losses_and_accuracies as lac
from .utils import parse_net_params, load_encodings
from .backbones import get_backbone
import matplotlib.pyplot as plt


Expand Down Expand Up @@ -48,11 +46,13 @@ def __init__(self, cfg_file):
self.l_model = []

self.encodings_path = params['encodings_path']
self.configs_path = params['configs_path']
self.plots_path = params['plots_path']
self.tensorboard_log_path = params['tensorboard_log_path']
self.weights_save_path = params['weights_save_path']

os.makedirs(self.encodings_path, exist_ok=True)
os.makedirs(self.configs_path, exist_ok=True)
os.makedirs(self.plots_path, exist_ok=True)
os.makedirs(self.tensorboard_log_path, exist_ok=True)
os.makedirs(self.weights_save_path, exist_ok=True)
Expand Down Expand Up @@ -105,7 +105,6 @@ def _create_model_siamese(self):
self.model = Model(
inputs=[input_image_1, input_image_2], outputs=prediction)

plot_model(self.model, to_file='{}model.png'.format(self.plots_path))
print('Base model summary')
self.base_model.summary()

Expand All @@ -131,7 +130,6 @@ def _create_model_triplet(self):
self.model = Model(inputs=[input_image_a,input_image_p, input_image_n],
outputs=merged_vector)

plot_model(self.model, to_file='{}model.png'.format(self.plots_path))
print('Base model summary')
self.base_model.summary()

Expand Down Expand Up @@ -215,7 +213,7 @@ def generate_encodings(self, save_file_name='encodings.pkl', max_num_samples_of_
f.close()

def load_encodings(self, path_to_encodings):
self.encoded_training_data = utils.load_encodings(path_to_encodings)
self.encoded_training_data = load_encodings(path_to_encodings)

def load_model(self,file_path):
self.model = load_model(file_path,
Expand All @@ -224,16 +222,20 @@ def load_model(self,file_path):
'triplet_loss': lac.triplet_loss})
self.base_model = Model(inputs=[self.model.layers[3].get_input_at(0)],
outputs=[self.model.layers[3].layers[-1].output])
self.base_model._make_predict_function()

def calculate_distances(self, encoding):
training_encodings = self.encoded_training_data['encodings']
return np.sqrt(
np.sum((training_encodings - np.array(encoding))**2, axis=1))

def predict(self, image_path):
img = cv2.imread(image_path)
def predict(self, image):
if type(image) is str:
img = cv2.imread(image)
else:
img = image
img = cv2.resize(img, (self.input_shape[0], self.input_shape[1]))

print(img.shape)
encoding = self.base_model.predict(np.expand_dims(img, axis=0))
distances = self.calculate_distances(encoding)
max_element = np.argmin(distances)
Expand Down
6 changes: 6 additions & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
image-classifiers
keras
tensorflow-gpu
matplotlib
albumentations
pydot
11 changes: 1 addition & 10 deletions test.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,6 @@
import keras.backend as K
from model import SiameseNet
from data_loader import SiameseImageLoader
import matplotlib.pyplot as plt
from keras import optimizers
from keras.models import Model, load_model
import albumentations as A
import cv2
import time


model = SiameseNet('configs/road_signs.yml')
model = SiameseNet('SiameseNet/configs/road_signs.yml')
model.load_model('{}best_model_4.h5'.format(model.weights_save_path))
model.load_encodings('{}encodings.pkl'.format(model.encodings_path))

Expand Down
19 changes: 13 additions & 6 deletions utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,8 @@
from matplotlib import pyplot as plt
import yaml
from keras import optimizers
from augmentations import get_aug
from data_loader import SiameseImageLoader
from .augmentations import get_aug
from .data_loader import SiameseImageLoader


def load_encodings(path_to_encodings):
Expand Down Expand Up @@ -96,13 +96,20 @@ def parse_net_params(filename='configs/road_signs.yml'):
augmentations = None

params = {k: v for k, v in cfg.items() if k not in ['optimizer']}
params['encodings_path'] = os.path.join(cfg['encodings_path'],

current_directory = os.path.dirname(os.path.abspath(__file__))
print(current_directory)
params['encodings_path'] = os.path.join(current_directory,
cfg['encodings_path'],
cfg['project_name'])
params['plots_path'] = os.path.join(cfg['plots_path'],
params['plots_path'] = os.path.join(current_directory,
cfg['plots_path'],
cfg['project_name'])
params['tensorboard_log_path'] = os.path.join(cfg['tensorboard_log_path'],
params['tensorboard_log_path'] = os.path.join(current_directory,
cfg['tensorboard_log_path'],
cfg['project_name'])
params['weights_save_path'] = os.path.join(cfg['weights_save_path'],
params['weights_save_path'] = os.path.join(current_directory,
cfg['weights_save_path'],
cfg['project_name'])

if 'dataset_path' in cfg:
Expand Down

0 comments on commit 0689e76

Please sign in to comment.