Skip to content

Commit

Permalink
adding yolo9000
Browse files Browse the repository at this point in the history
  • Loading branch information
pjreddie committed Jan 4, 2017
1 parent 2710d63 commit d2dece3
Show file tree
Hide file tree
Showing 29 changed files with 19,587 additions and 592 deletions.
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@
*.csv
*.out
*.png
*.jpg
old/
mnist/
data/
caffe/
Expand Down
4 changes: 2 additions & 2 deletions cfg/coco.data
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
classes= 80
train = /home/pjreddie/data/coco/trainvalno5k.txt
valid = coco_testdev
#valid = data/coco_val_5k.list
#valid = coco_testdev
valid = data/coco_val_5k.list
names = data/coco.names
backup = /home/pjreddie/backup/
eval=coco
Expand Down
2 changes: 1 addition & 1 deletion cfg/voc.data
Original file line number Diff line number Diff line change
Expand Up @@ -2,5 +2,5 @@ classes= 20
train = /home/pjreddie/data/voc/train.txt
valid = /home/pjreddie/data/voc/2007_test.txt
names = data/voc.names
backup = /home/pjreddie/backup/
backup = backup

211 changes: 211 additions & 0 deletions cfg/yolo9000.cfg
Original file line number Diff line number Diff line change
@@ -0,0 +1,211 @@
[net]
batch=1
subdivisions=1
height=416
width=416
channels=3
momentum=0.9
decay=0.0005

learning_rate=0.00001
max_batches = 242200
policy=steps
steps=500,200000,240000
scales=10,.1,.1

hue=.1
saturation=.75
exposure=.75

[convolutional]
batch_normalize=1
filters=32
size=3
stride=1
pad=1
activation=leaky

[maxpool]
size=2
stride=2

[convolutional]
batch_normalize=1
filters=64
size=3
stride=1
pad=1
activation=leaky

[maxpool]
size=2
stride=2

[convolutional]
batch_normalize=1
filters=128
size=3
stride=1
pad=1
activation=leaky

[convolutional]
batch_normalize=1
filters=64
size=1
stride=1
pad=1
activation=leaky

[convolutional]
batch_normalize=1
filters=128
size=3
stride=1
pad=1
activation=leaky

[maxpool]
size=2
stride=2

[convolutional]
batch_normalize=1
filters=256
size=3
stride=1
pad=1
activation=leaky

[convolutional]
batch_normalize=1
filters=128
size=1
stride=1
pad=1
activation=leaky

[convolutional]
batch_normalize=1
filters=256
size=3
stride=1
pad=1
activation=leaky

[maxpool]
size=2
stride=2

[convolutional]
batch_normalize=1
filters=512
size=3
stride=1
pad=1
activation=leaky

[convolutional]
batch_normalize=1
filters=256
size=1
stride=1
pad=1
activation=leaky

[convolutional]
batch_normalize=1
filters=512
size=3
stride=1
pad=1
activation=leaky

[convolutional]
batch_normalize=1
filters=256
size=1
stride=1
pad=1
activation=leaky

[convolutional]
batch_normalize=1
filters=512
size=3
stride=1
pad=1
activation=leaky

[maxpool]
size=2
stride=2

[convolutional]
batch_normalize=1
filters=1024
size=3
stride=1
pad=1
activation=leaky

[convolutional]
batch_normalize=1
filters=512
size=1
stride=1
pad=1
activation=leaky

[convolutional]
batch_normalize=1
filters=1024
size=3
stride=1
pad=1
activation=leaky

[convolutional]
batch_normalize=1
filters=512
size=1
stride=1
pad=1
activation=leaky

[convolutional]
batch_normalize=1
filters=1024
size=3
stride=1
pad=1
activation=leaky

[convolutional]
filters=28269
size=1
stride=1
pad=1
activation=linear

[region]
anchors = 0.77871, 1.14074, 3.00525, 4.31277, 9.22725, 9.61974
bias_match=1
classes=9418
coords=4
num=3
softmax=1
jitter=.2
rescore=1

object_scale=5
noobject_scale=1
class_scale=1
coord_scale=1

thresh = .6
absolute=1
random=1

tree=data/9k.tree
map = data/coco9k.map
Loading

0 comments on commit d2dece3

Please sign in to comment.