blob: 14db38c1763ccbe7b44947dfb4487744df875684 [file] [log] [blame]
[DNNMark]
run_mode=composed
[Convolution]
name=block1_1_conv
n=100
c=3
h=32
w=32
previous_layer=null
conv_mode=convolution
kernel_size=3
num_output=64
pad=1
stride=1
conv_fwd_pref=fastest
conv_bwd_filter_pref=fastest
conv_bwd_data_pref=fastest
[BatchNorm]
previous_layer=block1_1_conv
name=block1_1_bn
batchnorm_mode=per_activation
save_intermediates=true
exp_avg_factor=1
epsilon=2e-5
[Activation]
previous_layer=block1_1_bn
name=block1_1_relu
activation_mode=relu
[Dropout]
previous_layer=block1_1_relu
name=dropout1_1
dropout_probability=.3
random_seed=0
[Convolution]
previous_layer=dropout1_1
name=block1_2_conv
conv_mode=convolution
kernel_size=3
num_output=64
pad=1
stride=1
conv_fwd_pref=fastest
conv_bwd_filter_pref=fastest
conv_bwd_data_pref=fastest
[BatchNorm]
previous_layer=block1_2_conv
name=block1_2_bn
batchnorm_mode=per_activation
save_intermediates=true
exp_avg_factor=1
epsilon=2e-5
[Activation]
previous_layer=block1_2_bn
name=block1_2_relu
activation_mode=relu
[Pooling]
previous_layer=block1_2_relu
name=pool1_2
pool_mode=max
kernel_size=2
pad=0
stride=2
[Convolution]
previous_layer=pool1_2
name=block2_1_conv
conv_mode=convolution
kernel_size=3
num_output=128
pad=1
stride=1
conv_fwd_pref=fastest
conv_bwd_filter_pref=fastest
conv_bwd_data_pref=fastest
[BatchNorm]
previous_layer=block2_1_conv
name=block2_1_bn
batchnorm_mode=per_activation
save_intermediates=true
exp_avg_factor=1
epsilon=2e-5
[Activation]
previous_layer=block2_1_bn
name=block2_1_relu
activation_mode=relu
[Dropout]
previous_layer=block2_1_relu
name=dropout2_1
dropout_probability=.4
random_seed=0
[Convolution]
previous_layer=dropout2_1
name=block2_2_conv
conv_mode=convolution
kernel_size=3
num_output=128
pad=1
stride=1
conv_fwd_pref=fastest
conv_bwd_filter_pref=fastest
conv_bwd_data_pref=fastest
[BatchNorm]
previous_layer=block2_2_conv
name=block2_2_bn
batchnorm_mode=per_activation
save_intermediates=true
exp_avg_factor=1
epsilon=2e-5
[Activation]
previous_layer=block2_2_bn
name=block2_2_relu
activation_mode=relu
[Pooling]
previous_layer=block2_2_relu
name=pool2_2
pool_mode=max
kernel_size=2
pad=0
stride=2
[Convolution]
previous_layer=pool2_2
name=block3_1_conv
conv_mode=convolution
kernel_size=3
num_output=256
pad=1
stride=1
conv_fwd_pref=fastest
conv_bwd_filter_pref=fastest
conv_bwd_data_pref=fastest
[BatchNorm]
previous_layer=block3_1_conv
name=block3_1_bn
batchnorm_mode=per_activation
save_intermediates=true
exp_avg_factor=1
epsilon=2e-5
[Activation]
previous_layer=block3_1_bn
name=block3_1_relu
activation_mode=relu
[Dropout]
previous_layer=block3_1_relu
name=dropout3_1
dropout_probability=.4
random_seed=0
[Convolution]
previous_layer=dropout3_1
name=block3_2_conv
conv_mode=convolution
kernel_size=3
num_output=256
pad=1
stride=1
conv_fwd_pref=fastest
conv_bwd_filter_pref=fastest
conv_bwd_data_pref=fastest
[BatchNorm]
previous_layer=block3_2_conv
name=block3_2_bn
batchnorm_mode=per_activation
save_intermediates=true
exp_avg_factor=1
epsilon=2e-5
[Activation]
previous_layer=block3_2_bn
name=block3_2_relu
activation_mode=relu
[Dropout]
previous_layer=block3_2_relu
name=dropout3_2
dropout_probability=.4
random_seed=0
[Convolution]
previous_layer=dropout3_2
name=block3_3_conv
conv_mode=convolution
kernel_size=3
num_output=256
pad=1
stride=1
conv_fwd_pref=fastest
conv_bwd_filter_pref=fastest
conv_bwd_data_pref=fastest
[BatchNorm]
previous_layer=block3_3_conv
name=block3_3_bn
batchnorm_mode=per_activation
save_intermediates=true
exp_avg_factor=1
epsilon=2e-5
[Activation]
previous_layer=block3_3_bn
name=block3_3_relu
activation_mode=relu
[Pooling]
previous_layer=block3_3_relu
name=pool3_3
pool_mode=max
kernel_size=2
pad=0
stride=2
[Convolution]
previous_layer=pool3_3
name=block4_1_conv
conv_mode=convolution
kernel_size=3
num_output=512
pad=1
stride=1
conv_fwd_pref=fastest
conv_bwd_filter_pref=fastest
conv_bwd_data_pref=fastest
[BatchNorm]
previous_layer=block4_1_conv
name=block4_1_bn
batchnorm_mode=per_activation
save_intermediates=true
exp_avg_factor=1
epsilon=2e-5
[Activation]
previous_layer=block4_1_bn
name=block4_1_relu
activation_mode=relu
[Dropout]
previous_layer=block4_1_relu
name=dropout4_1
dropout_probability=.4
random_seed=0
[Convolution]
previous_layer=dropout4_1
name=block4_2_conv
conv_mode=convolution
kernel_size=3
num_output=512
pad=1
stride=1
conv_fwd_pref=fastest
conv_bwd_filter_pref=fastest
conv_bwd_data_pref=fastest
[BatchNorm]
previous_layer=block4_2_conv
name=block4_2_bn
batchnorm_mode=per_activation
save_intermediates=true
exp_avg_factor=1
epsilon=2e-5
[Activation]
previous_layer=block4_2_bn
name=block4_2_relu
activation_mode=relu
[Dropout]
previous_layer=block4_2_relu
name=dropout4_2
dropout_probability=.4
random_seed=0
[Convolution]
previous_layer=dropout4_2
name=block4_3_conv
conv_mode=convolution
kernel_size=3
num_output=512
pad=1
stride=1
conv_fwd_pref=fastest
conv_bwd_filter_pref=fastest
conv_bwd_data_pref=fastest
[BatchNorm]
previous_layer=block4_3_conv
name=block4_3_bn
batchnorm_mode=per_activation
save_intermediates=true
exp_avg_factor=1
epsilon=2e-5
[Activation]
previous_layer=block4_3_bn
name=block4_3_relu
activation_mode=relu
[Pooling]
previous_layer=block4_3_relu
name=pool4_3
pool_mode=max
kernel_size=2
pad=0
stride=2
[Convolution]
previous_layer=pool4_3
name=block5_1_conv
conv_mode=convolution
kernel_size=3
num_output=512
pad=1
stride=1
conv_fwd_pref=fastest
conv_bwd_filter_pref=fastest
conv_bwd_data_pref=fastest
[BatchNorm]
previous_layer=block5_1_conv
name=block5_1_bn
batchnorm_mode=per_activation
save_intermediates=true
exp_avg_factor=1
epsilon=2e-5
[Activation]
previous_layer=block5_1_bn
name=block5_1_relu
activation_mode=relu
[Dropout]
previous_layer=block5_1_relu
name=dropout5_1
dropout_probability=.4
random_seed=0
[Convolution]
previous_layer=dropout5_1
name=block5_2_conv
conv_mode=convolution
kernel_size=3
num_output=512
pad=1
stride=1
conv_fwd_pref=fastest
conv_bwd_filter_pref=fastest
conv_bwd_data_pref=fastest
[BatchNorm]
previous_layer=block5_2_conv
name=block5_2_bn
batchnorm_mode=per_activation
save_intermediates=true
exp_avg_factor=1
epsilon=2e-5
[Activation]
previous_layer=block5_2_bn
name=block5_2_relu
activation_mode=relu
[Dropout]
previous_layer=block5_2_relu
name=dropout5_2
dropout_probability=.4
random_seed=0
[Convolution]
previous_layer=dropout5_2
name=block5_3_conv
conv_mode=convolution
kernel_size=3
num_output=512
pad=1
stride=1
conv_fwd_pref=fastest
conv_bwd_filter_pref=fastest
conv_bwd_data_pref=fastest
[BatchNorm]
previous_layer=block5_3_conv
name=block5_3_bn
batchnorm_mode=per_activation
save_intermediates=true
exp_avg_factor=1
epsilon=2e-5
[Activation]
previous_layer=block5_3_bn
name=block5_3_relu
activation_mode=relu
[Pooling]
previous_layer=block5_3_relu
name=pool5_3
pool_mode=max
kernel_size=2
pad=0
stride=2
[Dropout]
previous_layer=pool5_3
name=dropout5_3
dropout_probability=.5
random_seed=0
[FullyConnected]
previous_layer=dropout5_3
name=fc1
num_output=512
[BatchNorm]
previous_layer=fc1
name=bn_fc1
batchnorm_mode=per_activation
save_intermediates=true
exp_avg_factor=1
epsilon=2e-5
[Activation]
previous_layer=bn_fc1
name=relu_fc1
activation_mode=relu
[Dropout]
previous_layer=relu_fc1
name=dropout_fc1
dropout_probability=.5
random_seed=0
[FullyConnected]
previous_layer=dropout_fc1
name=fc2
num_output=100