AlexNet/layers/layer-params-141-2009.cfg
Laurent El Shafey 9fdd561586 Initial commit
2024-12-10 08:56:11 -08:00

231 lines
3.9 KiB
INI

[conv1a]
epsW=0.0000
epsB=0.00
momW=0.9
momB=0.9
wc=0.0005
wball=0.00
[conv1b]
epsW=0.0000
epsB=0.00
momW=0.9
momB=0.9
wc=0.0005
wball=0.00
[conv2a]
epsW=0.0000,0.0000
epsB=0.00
momW=0.9,0.9
momB=0.9
wc=0.0005,0.0005
wball=0.00,0.00
[conv2b]
epsW=0.0000,0.0000
epsB=0.00
momW=0.9,0.9
momB=0.9
wc=0.0005,0.0005
wball=0.00,0.00
[conv3a]
epsW=0.00001,0.00001
epsB=0.002
momW=0.9,0.9
momB=0.9
wc=0.0005,0.0005
wball=0,0
[conv3b]
epsW=0.00001,0.00001
epsB=0.002
momW=0.9,0.9
momB=0.9
wc=0.0005,0.0005
wball=0,0
[conv4a]
epsW=0.00001
epsB=0.002
momW=0.9
momB=0.9
wc=0.0005
wball=0
[conv4b]
epsW=0.00001
epsB=0.002
momW=0.9
momB=0.9
wc=0.0005
wball=0
[conv5a]
epsW=0.00001
epsB=0.002
momW=0.9
momB=0.9
wc=0.0005
wball=0
[conv5b]
epsW=0.00001
epsB=0.002
momW=0.9
momB=0.9
wc=0.0005
wball=0
[conv6a]
epsW=0.00001,0.00001
epsB=0.002
momW=0.9,0.9
momB=0.9
wc=0.0005,0.0005
wball=0,0
[conv6b]
epsW=0.00001,0.00001
epsB=0.002
momW=0.9,0.9
momB=0.9
wc=0.0005,0.0005
wball=0,0
[fc2048a]
epsW=0.00001,0.00001
epsB=0.002
momW=0.9,0.9
momB=0.9
wc=0.0005,0.0005
wball=0,0
[fc2048b]
epsW=0.00001,0.00001
epsB=0.002
momW=0.9,0.9
momB=0.9
wc=0.0005,0.0005
wball=0,0
[fc2048ba]
epsW=0.00001,0.00001
epsB=0.002
momW=0.9,0.9
momB=0.9
wc=0.0005,0.0005
wball=0,0
[fc2048bb]
epsW=0.00001,0.00001
epsB=0.002
momW=0.9,0.9
momB=0.9
wc=0.0005,0.0005
wball=0,0
[fc1000]
epsW=0.00001,0.00001
epsB=0.002
momW=0.9,0.9
momB=0.9
wc=0.0005,0.0005
wball=0,0
[logprob]
coeff=1
topk=5
[hs1a]
enable=true
[hs2a]
enable=true
[hs1b]
enable=true
[hs2b]
enable=true
[rnorm1a]
scale=0.0001
pow=0.75
minDiv=2
[rnorm1b]
scale=0.0001
pow=0.75
minDiv=2
[rnorm2a]
scale=0.0001
pow=0.75
minDiv=2
[rnorm2b]
scale=0.0001
pow=0.75
minDiv=2
[cnorm2a]
scale=0.001
pow=0.75
[cnorm2b]
scale=0.001
pow=0.75
# training on lsvrc-2010
# initialized from 141 trained on lsvrc-2012, then 2009
# using def file layers-141-2009-2010.cfg
# /nobackup/kriz/tmp/ConvNet__2012-09-12_01.06.32
# on guppy8
# init epsw 0.001
# logs/layers-141-2010-pretrain-2009-pretrain-2012.log
# epoch 14: set epsw to 0.0001 from 0.001
# epoch 30: set epsw to 0.00001 from 0.0001 on conv1,conv2
# set color noise to 0 from 0.1
# epoch 36: set epsw to 0 on conv1/2
# epoch 47: set epsw to 0.00001 from 0.0001
# epoch 54: killed
# logprob: 1.511725, 0.356707, 0.154893
# training on lsvrc-2012
# initialized from 141 trained on lsvrc-2012, then 2009
# using def file layers-141-2009-2012.cfg
# init epsw 0.001
# logs/layers-141-2012-pretrain-2009-pretrain-2012.log
# /nobackup/kriz/tmp/ConvNet__2012-09-09_03.36.13
# backup: /ais/gobi3/u/kriz/tmp/ConvNet__2012-09-09_03.36.13
# also /ais/gobi3/u/kriz/net-backups/
# on guppy8
# epoch 13: set epsw to 0.0001 from 0.001
# epoch 26: set epsw to 0.00001 from 0.0001 on conv1,conv2
# set color noise to 0 from 0.1
# epoch 32: set epsw to 0 on conv1/2
# epoch 43: set epsw to 0.00001 from 0.0001
# epoch 54: killed
# python convnet.py -f /nobackup/kriz/tmp/ConvNet__2012-09-09_03.36.13 --test-only=1 --test-one=0 --multiview-test=1
# logprob: 1.671316, 0.395620, 0.172060
#python convnet.py -f /nobackup/kriz/tmp/ConvNet__2012-09-09_03.36.13 --test-only=1 --test-one=0 --multiview-test=0
# logprob: 1.779082, 0.415920, 0.186780
# 141-2009 notes, before going back to 2012:
# initialized from 141 trained on lsvrc-2012
# init epsw 0.001
# logs/layers-141-2009-pretrain-2012.log
# /nobackup/kriz/tmp/ConvNet__2012-09-07_05.22.51
# epoch 4.1189: set epsw to 0.0001 from 0.001
# epoch 5.1596: killed, not improving much. lets go back to training on lsvrc-2012 with these weights now.
#
# 141 notes:
# this is like #137 but with conv6, also communication in conv6
# /nobackup/kriz/tmp/ConvNet__2012-09-03_16.27.48
# logs/layers-141.log
# epoch 23: set epsw to 0.001 from 0.01
# epoch 48: set epsw to 0.0001 from 0.001
# epoch 60: this seems overfitty....killing
# but will use these weights to initialize a net on 2009... why the hell not?