Skip to content

Commit

Permalink
add np_mAP
Browse files Browse the repository at this point in the history
  • Loading branch information
conan7882 committed Jan 5, 2019
1 parent 8b4b3de commit d40c25b
Show file tree
Hide file tree
Showing 25 changed files with 580 additions and 267 deletions.
Binary file added data/yolo/000005.jpg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added data/yolo/000019.jpg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added data/yolo/000200.jpg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added data/yolo/0b178813be36410eda8742abe2153ced.jpg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added data/yolo/COCO_train2014_000000000049.jpg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added data/yolo/COCO_train2014_000000477442.jpg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added data/yolo/birds.jpg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added data/yolo/cat_and_dog.jpg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
4 changes: 4 additions & 0 deletions experiment/cluster_bbox.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,10 @@ def kmeans_clustering(xml_dir, k):
avg_iou = kmeans_bbox.mean_iou()
print(centroid)
print(avg_iou)
def bbox_area(bbox):
return bbox[2] * bbox[3]
sorted_centroid = sorted(centroid, key=bbox_area)
print(list(np.reshape([[b[2], b[3]] for b in sorted_centroid], -1)))

def get_args():
parser = argparse.ArgumentParser()
Expand Down
7 changes: 7 additions & 0 deletions experiment/configs/parsecfg.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,12 @@
import numpy as np


def Config(object):
def __init__(self):
pass



def parse_cfg(file_name):
# section_counters = defaultdict(int)
cfg_stream = open(file_name)
Expand All @@ -28,6 +34,7 @@ def parse_cfg(file_name):
cfg_dict['obj_score_thr'] = float(cfg_parser[section]['obj_score_thresh'])
cfg_dict['nms_iou_thr'] = float(cfg_parser[section]['nms_iou_thresh'])
cfg_dict['n_class'] = int(cfg_parser[section]['classes'])
cfg_dict['ignore_thr'] = float(cfg_parser[section]['ignore_thr'])

anchors = list(map(float, (cfg_parser[section]['anchors']).split(',')))
anchor_mask = list(map(int, (cfg_parser[section]['anchor_mask']).split(',')))
Expand Down
5 changes: 3 additions & 2 deletions experiment/configs/voc.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,8 @@ multiscale = 320, 352, 384, 416, 448, 480, 512, 544, 576, 608

[yolo]
anchor_mask = 2, 2, 2, 1, 1, 1, 0, 0, 0
anchors = 10,13, 16,30, 33,23, 30,61, 62,45, 59,119, 116,90, 156,198, 373,326
anchors = 20.0, 26.0, 31.0, 67.0, 59.0, 42.0, 57.0, 115.0, 112.0, 91.0, 107.0, 191.0, 244.0, 142.0, 192.0, 280.0, 373.0, 298.0
classes=20
obj_score_thresh=0.8
nms_iou_thresh=0.45
nms_iou_thresh=0.45
ignore_thr=0.5
53 changes: 31 additions & 22 deletions experiment/convert/convert_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
# -*- coding: utf-8 -*-
# File: convert_model.py
# Author: Qian Ge <[email protected]>
# Modified from
# modified from:
# https://github.com/qqwweee/keras-yolo3/blob/master/convert.py
# reference:
# https://github.com/pjreddie/darknet/blob/b13f67bfdd87434e141af532cdb5dc1b8369aa3b/src/parser.c#L958
Expand All @@ -17,20 +17,18 @@


def unique_config_sections(config_path):
"""Convert all config sections to have unique names.
Adds unique suffixes to config sections for compability with configparser.
""" Convert all config sections to have unique names.
Adds unique suffixes to config sections for compability with configparser.
"""
section_counters = defaultdict(int)
output_stream = io.StringIO()
yolo_id = 0
# prev_dim = 3
# prev_dim_dict = {}

with open(config_path) as fin:
for line in fin:
if line.startswith('['):
section = line.strip().strip('[]')
n_section = section
# out_dim = prev_dim
if section == 'yolo':
n_section = section
yolo_id += 1
Expand All @@ -50,15 +48,25 @@ def unique_config_sections(config_path):
_section = n_section + '_' + str(section_counters[n_section]-1)
else:
_section = n_section + '_' + str(section_counters[n_section])
# prev_dim_dict[_section] = prev_dim
# prev_dim = out_dim

print(_section)
line = line.replace(section, _section)
output_stream.write(line)
output_stream.seek(0)
return output_stream

def parse_conv(weights_file, cfg_parser, section, layer_dict):
""" parse conv layer
Args:
weights_file (file object): file object of .weights file
cfg_parser (ConfigParser object): ConfigParser object of .cfg file for net
section (str): name of conv layer
layer_dict (dictionary): dict storing layer info
Returns:
dict storing layer info and weights values
"""
prev_layer_channel = layer_dict['prev_layer_channel']
count = layer_dict['count']

Expand All @@ -69,11 +77,6 @@ def parse_conv(weights_file, cfg_parser, section, layer_dict):
activation = cfg_parser[section]['activation']
batch_normalize = 'batch_normalize' in cfg_parser[section]

# Setting weights.
# Darknet serializes convolutional weights as:
# [bias/beta, [gamma, mean, variance], conv_weights]
# prev_layer_shape = K.int_shape(prev_layer)

weights_shape = (size, size, prev_layer_channel, filters)
darknet_w_shape = (filters, weights_shape[2], size, size)
weights_size = np.product(weights_shape)
Expand Down Expand Up @@ -125,6 +128,16 @@ def parse_conv(weights_file, cfg_parser, section, layer_dict):
return layer_dict

def convert(weights_path, config_path, save_path):
""" convert .weight file to .npy file
The converted .npy file will be saved in save_path
Args:
weights_path (str): path of .weight file
config_path (str): path of configuration .cfg file
save_path (str): path for saving .npy file
"""
# load weights file
weights_file = open(weights_path, 'rb')
major, minor, revision = np.ndarray(
shape=(3, ), dtype='int32', buffer=weights_file.read(12))
Expand All @@ -134,7 +147,7 @@ def convert(weights_path, config_path, save_path):
else:
seen = np.ndarray(shape=(1,), dtype='int32', buffer=weights_file.read(4))
print('Weights Header: ', major, minor, revision, seen)

# parse net configuration
net_config = unique_config_sections(config_path)
cfg_parser = configparser.ConfigParser()
cfg_parser.read_file(net_config)
Expand All @@ -144,29 +157,23 @@ def convert(weights_path, config_path, save_path):
dim_list = []
layer_dict['prev_layer_channel'] = 3
layer_dict['count'] = 0
# layer_id = 0
for section in cfg_parser.sections():
print('Parsing section {}'.format(section))
if section.startswith('conv'):
save_weight_dict[section] = {}

layer_dict = parse_conv(weights_file, cfg_parser, section, layer_dict)
save_weight_dict[section]['weights'] = layer_dict['conv_weights']

if len(layer_dict['bn_weight_list']) > 0:
save_weight_dict[section]['bn'] = layer_dict['bn_weight_list']
if len(layer_dict['conv_bias']) > 0:
save_weight_dict[section]['biases'] = layer_dict['conv_bias']
elif section.startswith('route'):
route_layers = list(map(int, (cfg_parser[section]['layers']).split(',')))
layer_dict['prev_layer_channel'] = sum([dim_list[layer_] for layer_ in route_layers])
# print(route_layers)
dim_list.append(layer_dict['prev_layer_channel'])
# layer_id += 1
remaining_weights = len(weights_file.read()) / 4
print('Load {} of {} from weights.'.format(layer_dict['count'], remaining_weights + layer_dict['count']))
weights_file.close()
# print(dim_list)
np.save(save_path, save_weight_dict)

def get_args():
Expand All @@ -192,20 +199,22 @@ def get_args():
weights_dir = FLAGS.weights_dir
save_dir = FLAGS.save_dir


FLAGS = get_args()
if FLAGS.model == 'darknet':
# convert Darknet53 for classification
config_path = 'darknet53.cfg'
weights_path = os.path.join(weights_dir, 'darknet53_448.weights')
save_path = os.path.join(save_dir, 'darknet53_448.npy')
elif FLAGS.model == 'yolov3_feat':
# convert Darknet53 first 52 conv layers for feature extration in yolov3
config_path = 'yolov3_feat.cfg'
weights_path = os.path.join(weights_dir, 'yolov3.weights')
save_path = os.path.join(save_dir, 'yolov3_feat.npy')
elif FLAGS.model == 'yolo':
# convert yolov3 trained on COCO dataset
config_path = 'yolov3.cfg'
weights_path = os.path.join(weights_dir, 'yolov3.weights')
save_path = os.path.join(save_dir, 'yolov3.npy')

convert(weights_path, config_path, save_path)
# unique_config_sections(config_path)

12 changes: 8 additions & 4 deletions experiment/darknet.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,14 +34,18 @@ def get_args():

def predict():
FLAGS = get_args()
# load class id and the corresponding class name
label_dict = loader.load_imagenet1k_label_darknet()
# Create a Dataflow object for test images
# create a Dataflow object for test images
image_data = loader.read_image(
im_name=FLAGS.im_name, n_channel=3,
data_dir=FLAGS.data_dir, batch_size=1, rescale=FLAGS.rescale)
data_dir=FLAGS.data_dir, batch_size=1,
rescale=FLAGS.rescale)

# create test model
test_model = DarkNet53(
n_channel=3, n_class=1000, pre_trained_path=FLAGS.pretrained_path, trainable=False)
n_channel=3,n_class=1000,
pre_trained_path=FLAGS.pretrained_path, trainable=False)
test_model.create_valid_model()

with tf.Session() as sess:
Expand All @@ -63,4 +67,4 @@ def predict():


if __name__ == '__main__':
predict()
predict()
25 changes: 24 additions & 1 deletion experiment/test.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,10 @@
import src.bbox.bboxgt as bboxgt
import src.utils.viz as viz
import src.utils.image as image
import src.evaluate.np_eval as np_eval

if __name__ == "__main__":

def test_target_anchor():
pathconfig = parscfg.parse_cfg('configs/{}_path.cfg'.format(platform.node()))
pretrained_path = pathconfig['coco_pretrained_npy_path']
data_dir = pathconfig['test_image_path']
Expand Down Expand Up @@ -86,4 +88,25 @@
viz.draw_bounding_box(o_im, gt_bbox_para, label_list=None, box_type='xyxy')
viz.draw_bounding_box(rescale_im, target_anchor_batch[0], label_list=None, box_type='xyxy')

def test_mAP():
pred_bboxes = [[25,35,45,55], [35,45,55,65],[250,350,450,550],[250,350,450,550],[250,350,450,550],
[45,65,55,75],[15,25,35,45],[250,350,450,550],[250,350,450,550],[35,25,55,45],[15,25,35,45],
]
gt_bboxes = [[25,35,45,55], [35,45,55,65],[45,65,55,75],[15,25,35,45],[35,25,55,45],[45,65,55,75],[15,25,35,45],[35,25,55,45]]
pred_classes = [1,1,1,1,1,1,1,1,1,1, 2]
gt_classes = [1,1,1,1,1,2,2,2]
pred_conf = [0.9, 0.9, 0.8,0.7,0.6,0.5,0.4,0.3,0.2,0.1, 0.9]
IoU_thr = 0.5
pred_im_size = 1
gt_im_size = 1

re = np_eval.mAP(
pred_bboxes, pred_classes, pred_conf, gt_bboxes,
gt_classes, IoU_thr, pred_im_size, gt_im_size)

print(re)


if __name__ == "__main__":
test_mAP()

Loading

0 comments on commit d40c25b

Please sign in to comment.