xconfig 3.01 KB
Newer Older
ntut committed
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32
# This file was created by the command:
# steps/nnet3/xconfig_to_configs.py --xconfig-file exp/chain/tdnn_1d_sp/configs/network.xconfig --config-dir exp/chain/tdnn_1d_sp/configs/
# It is a copy of the source from which the config files in # this directory were generated.

input dim=100 name=ivector
input dim=43 name=input

# please note that it is important to have input layer with the name=input
# as the layer immediately preceding the fixed-affine-layer to enable
# the use of short notation for the descriptor
fixed-affine-layer name=lda input=Append(-1,0,1,ReplaceIndex(ivector, t, 0)) affine-transform-file=exp/chain/tdnn_1d_sp/configs/lda.mat
# the first splicing is moved before the lda layer, so no splicing here
relu-batchnorm-dropout-layer name=tdnn1 l2-regularize=0.01 dropout-proportion=0.0 dropout-per-dim=true dropout-per-dim-continuous=true dim=1536
tdnnf-layer name=tdnnf2 l2-regularize=0.01 dropout-proportion=0.0 bypass-scale=0.66 dim=1536 bottleneck-dim=160 time-stride=1
tdnnf-layer name=tdnnf3 l2-regularize=0.01 dropout-proportion=0.0 bypass-scale=0.66 dim=1536 bottleneck-dim=160 time-stride=1
tdnnf-layer name=tdnnf4 l2-regularize=0.01 dropout-proportion=0.0 bypass-scale=0.66 dim=1536 bottleneck-dim=160 time-stride=1
tdnnf-layer name=tdnnf5 l2-regularize=0.01 dropout-proportion=0.0 bypass-scale=0.66 dim=1536 bottleneck-dim=160 time-stride=0
tdnnf-layer name=tdnnf6 l2-regularize=0.01 dropout-proportion=0.0 bypass-scale=0.66 dim=1536 bottleneck-dim=160 time-stride=3
tdnnf-layer name=tdnnf7 l2-regularize=0.01 dropout-proportion=0.0 bypass-scale=0.66 dim=1536 bottleneck-dim=160 time-stride=3
tdnnf-layer name=tdnnf8 l2-regularize=0.01 dropout-proportion=0.0 bypass-scale=0.66 dim=1536 bottleneck-dim=160 time-stride=3
tdnnf-layer name=tdnnf9 l2-regularize=0.01 dropout-proportion=0.0 bypass-scale=0.66 dim=1536 bottleneck-dim=160 time-stride=3
tdnnf-layer name=tdnnf10 l2-regularize=0.01 dropout-proportion=0.0 bypass-scale=0.66 dim=1536 bottleneck-dim=160 time-stride=3
tdnnf-layer name=tdnnf11 l2-regularize=0.01 dropout-proportion=0.0 bypass-scale=0.66 dim=1536 bottleneck-dim=160 time-stride=3
tdnnf-layer name=tdnnf12 l2-regularize=0.01 dropout-proportion=0.0 bypass-scale=0.66 dim=1536 bottleneck-dim=160 time-stride=3
tdnnf-layer name=tdnnf13 l2-regularize=0.01 dropout-proportion=0.0 bypass-scale=0.66 dim=1536 bottleneck-dim=160 time-stride=3
tdnnf-layer name=tdnnf14 l2-regularize=0.01 dropout-proportion=0.0 bypass-scale=0.66 dim=1536 bottleneck-dim=160 time-stride=3
tdnnf-layer name=tdnnf15 l2-regularize=0.01 dropout-proportion=0.0 bypass-scale=0.66 dim=1536 bottleneck-dim=160 time-stride=3
linear-component name=prefinal-l dim=256 l2-regularize=0.01 orthonormal-constraint=-1.0
prefinal-layer name=prefinal-chain input=prefinal-l l2-regularize=0.01 big-dim=1536 small-dim=256
output-layer name=output include-log-softmax=false dim=5689 l2-regularize=0.002
prefinal-layer name=prefinal-xent input=prefinal-l l2-regularize=0.01 big-dim=1536 small-dim=256
output-layer name=output-xent dim=5689 learning-rate-factor=5.0 l2-regularize=0.002