Skip to content

Commit

Permalink
add yolov3_mobilenet_v1
Browse files Browse the repository at this point in the history
  • Loading branch information
yuedongli1 committed Dec 18, 2023
1 parent 867bc31 commit 4878c70
Show file tree
Hide file tree
Showing 15 changed files with 4,804 additions and 0 deletions.
24 changes: 24 additions & 0 deletions examples/yolov3_mobilenet_v1/configs/coco.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
# COCO 2017 dataset http://cocodataset.org

data:
dataset_name: coco

train_set: ./coco/train2017.txt # 118287 images
val_set: ./coco/val2017.txt # 5000 images
test_set: ./coco/test-dev2017.txt # 20288 of 40670 images, submit to https://competitions.codalab.org/competitions/20794

nc: 80

# class names
names: [ 'person', 'bicycle', 'car', 'motorcycle', 'airplane', 'bus', 'train', 'truck', 'boat', 'traffic light',
'fire hydrant', 'stop sign', 'parking meter', 'bench', 'bird', 'cat', 'dog', 'horse', 'sheep', 'cow',
'elephant', 'bear', 'zebra', 'giraffe', 'backpack', 'umbrella', 'handbag', 'tie', 'suitcase', 'frisbee',
'skis', 'snowboard', 'sports ball', 'kite', 'baseball bat', 'baseball glove', 'skateboard', 'surfboard',
'tennis racket', 'bottle', 'wine glass', 'cup', 'fork', 'knife', 'spoon', 'bowl', 'banana', 'apple',
'sandwich', 'orange', 'broccoli', 'carrot', 'hot dog', 'pizza', 'donut', 'cake', 'chair', 'couch',
'potted plant', 'bed', 'dining table', 'toilet', 'tv', 'laptop', 'mouse', 'remote', 'keyboard', 'cell phone',
'microwave', 'oven', 'toaster', 'sink', 'refrigerator', 'book', 'clock', 'vase', 'scissors', 'teddy bear',
'hair drier', 'toothbrush' ]

train_transforms: []
test_transforms: []
42 changes: 42 additions & 0 deletions examples/yolov3_mobilenet_v1/configs/hyp.scratch.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
__BASE__: [
'./coco.yaml',
]

epochs: 270
per_batch_size: 8 # 8 * 8 = 64
anchors: [[[116,90], [156,198], [373,326]], [[30,61], [62,45], [59,119]], [[10,13], [16,30], [33,23]]]

optimizer:
optimizer: momentum
lr_init: 0.001 # initial learning rate (SGD=1E-2, Adam=1E-3)
gp_weight_decay: 0.0005 # optimizer weight decay 5e-4
momentum: 0.9 # SGD momentum/Adam beta1
nesterov: True # update gradients with NAG(Nesterov Accelerated Gradient) algorithm
loss_scale: 1.0 # loss scale for optimizer
warmup_epochs: 0 # warmup epochs (fractions ok)
warmup_momentum: 0.9 # warmup initial momentum
warmup_bias_lr: 0.0 # warmup initial bias lr
min_warmup_step: 4000 # minimum warmup step
group_param: yolov3 # group param strategy
start_factor: 1.0
end_factor: 0.01

loss:
ignore_thresh: 0.7
downsample: [32, 16, 8]
label_smooth: false

data:
num_parallel_workers: 4

train_transforms:
- { func_name: mixup, alpha: 1.5, beta: 1.5 }
- { func_name: random_distort }
- { func_name: random_expand, fill_value: [123.675, 116.28, 103.53]}
- { func_name: random_crop }
- { func_name: fliplr, prob: 0.5 }

test_transforms:
- { func_name: resize, target_size: [608, 608] }
- { func_name: normalize_image, mean: [0.406, 0.456, 0.485], std: [0.225, 0.224, 0.229] }
- { func_name: image_transpose, bgr2rgb: True, hwc2chw: True }
1,479 changes: 1,479 additions & 0 deletions examples/yolov3_mobilenet_v1/dataset.py

Large diffs are not rendered by default.

81 changes: 81 additions & 0 deletions examples/yolov3_mobilenet_v1/group_params.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,81 @@
import numpy as np


__all__ = ["create_group_param"]


def create_group_param(params, gp_weight_decay=0.0, **kwargs):
"""
Create group parameters for optimizer.
Args:
params: Network parameters
gp_weight_decay: Weight decay. Default: 0.0
**kwargs: Others
"""
if "group_param" in kwargs:
return group_param_yolov3(params, weight_decay=gp_weight_decay, **kwargs)
else:
return params



def group_param_yolov3(
params,
weight_decay,
start_factor,
end_factor,
lr_init,
warmup_bias_lr,
warmup_epochs,
min_warmup_step,
accumulate,
epochs,
steps_per_epoch,
total_batch_size,
**kwargs
):
# old: # weight, gamma, bias/beta
# new: # bias/beta, weight, others
pg0, pg1, pg2 = _group_param_common3(params)

lr_pg0, lr_pg1, lr_pg2 = [], [], []
lrs = []
lrs.extend([lr_init] * 216 * steps_per_epoch)
lrs.extend([lr_init * 0.1] * 27 * steps_per_epoch)
lrs.extend([lr_init * 0.01] * 27 * steps_per_epoch)

warmup_steps = max(round(warmup_epochs * steps_per_epoch), min_warmup_step)
xi = [0, warmup_steps]
for i in range(epochs * steps_per_epoch):
_lr = lrs[i]
if i < warmup_steps:
lr_pg0.append(np.interp(i, xi, [warmup_bias_lr, _lr]))
lr_pg1.append(np.interp(i, xi, [0.0, _lr]))
lr_pg2.append(np.interp(i, xi, [0.0, _lr]))
else:
lr_pg0.append(_lr)
lr_pg1.append(_lr)
lr_pg2.append(_lr)

nbs = 64
weight_decay *= total_batch_size * accumulate / nbs # scale weight_decay
group_params = [
{"params": pg0, "lr": lr_pg0},
{"params": pg1, "lr": lr_pg1, "weight_decay": weight_decay},
{"params": pg2, "lr": lr_pg2},
]
return group_params


def _group_param_common3(params):
pg0, pg1, pg2 = [], [], [] # optimizer parameter groups
for p in params:
if "bias" in p.name or "beta" in p.name:
pg0.append(p)
elif "weight" in p.name:
pg1.append(p)
else:
pg2.append(p)

return pg0, pg1, pg2 # bias/beta, weight, others
Loading

0 comments on commit 4878c70

Please sign in to comment.