|
1 | 1 | { |
2 | | - "comment" : "nnetType :: (Mandatory) specify Type of Network (CNN,SDA) ", |
3 | | - "nnetType" : "SDA", |
4 | | - |
5 | | - "comment" : "train_data :: (Mandatory) specify the working directory containing data configuration and output ", |
6 | | - "wdir" : "wdir", |
7 | | - |
8 | | - "comment" : "valid_data (Mandatory) specify the path of the validation data relative to the working directory", |
9 | | - "data_spec" : "data_spec.json", |
10 | | - |
11 | | - "comment" : "sda_nnet_spec:: (Mandatory) specify the path of SDA network configuration specification relative to working directory", |
12 | | - "nnet_spec" : "sda_spec.json", |
13 | | - |
14 | | - "comment" : "output_file :: (Mandatory) specify the path of SDA network output file relative to working directory", |
15 | | - "output_file" : "sda_out.model", |
16 | | - |
17 | | - "comment" : "batch_size :: specify the mini batch size while training, default 128", |
18 | | - "batch_size" : 128, |
19 | | - |
20 | | - "comment": "", |
21 | | - "n_ins":2352, |
22 | | - |
23 | | - "comment":"", |
24 | | - "n_outs":200, |
25 | | - |
26 | | - "comment" :"pretraining_epochs: number of epoch to do pretraining", |
27 | | - "pretraining_epochs":10, |
28 | | - |
29 | | - |
30 | | - "comment" :"learning rate to be used during pre-training", |
31 | | - "pretrain_lr":0.08, |
32 | | - |
33 | | - "comment" : "finetune_method:: Two methods are supported C: Constant learning rate and E : Exponential decay", |
34 | | - "finetune_method":"C", |
35 | | - |
36 | | - "comment" : "finetune_rate :: learning rate configuration", |
37 | | - "finetune_rate" : { |
38 | | - "learning_rate" : 0.08, |
39 | | - "epoch_num" : 10, |
40 | | - |
41 | | - "start_rate" : 0.08, |
42 | | - "scale_by" : 0.5, |
43 | | - "min_derror_decay_start" : 0.05, |
44 | | - "min_derror_stop" : 0.05, |
45 | | - "min_epoch_decay_start" : 15, |
46 | | - "init_error" :100 |
47 | | - }, |
48 | | - |
49 | | - "comment" : "finetune_momentum :: Specify the momentum factor while finetuning", |
50 | | - "finetune_momentum": 0.5, |
51 | | - |
52 | | - "processes":{ |
53 | | - "pretraining":true, |
54 | | - "finetuning":true, |
55 | | - "testing":true, |
56 | | - "export_data":false |
57 | | - } |
| 2 | + "comment" : "nnetType :: (Mandatory) specify Type of Network (CNN,SDA) ", |
| 3 | + "nnetType" : "SDA", |
| 4 | + |
| 5 | + "comment" : "train_data :: (Mandatory) specify the working directory containing data configuration and output ", |
| 6 | + "wdir" : "wdir", |
| 7 | + |
| 8 | + "comment" : "valid_data (Mandatory) specify the path of the validation data relative to the working directory", |
| 9 | + "data_spec" : "data_spec.json", |
| 10 | + |
| 11 | + "comment" : "sda_nnet_spec:: (Mandatory) specify the path of SDA network configuration specification relative to working directory", |
| 12 | + "nnet_spec" : "sda_spec.json", |
| 13 | + |
| 14 | + "comment" : "output_file :: (Mandatory) specify the path of SDA network output file relative to working directory", |
| 15 | + "output_file" : "sda_out.model", |
| 16 | + |
| 17 | + "logger_level":"INFO", |
| 18 | + |
| 19 | + "comment" : "batch_size :: specify the mini batch size while training, default 128", |
| 20 | + "batch_size" : 128, |
| 21 | + |
| 22 | + "comment": "", |
| 23 | + "n_ins":784, |
| 24 | + |
| 25 | + "comment":"", |
| 26 | + "n_outs":10, |
| 27 | + |
| 28 | + "comment" :"pretraining_epochs: number of epoch to do pretraining", |
| 29 | + "pretraining_epochs":10, |
| 30 | + |
| 31 | + "comment" :"learning rate to be used during pre-training", |
| 32 | + "pretrain_lr":0.08, |
| 33 | + |
| 34 | + "comment" : "finetune_method:: Two methods are supported C: Constant learning rate and E : Exponential decay", |
| 35 | + "finetune_method":"C", |
| 36 | + |
| 37 | + "comment" : "finetune_rate :: learning rate configuration", |
| 38 | + "finetune_rate" : { |
| 39 | + "learning_rate" : 0.08, |
| 40 | + "epoch_num" : 10, |
| 41 | + |
| 42 | + "start_rate" : 0.08, |
| 43 | + "scale_by" : 0.5, |
| 44 | + "min_derror_decay_start" : 0.05, |
| 45 | + "min_derror_stop" : 0.05, |
| 46 | + "min_epoch_decay_start" : 15, |
| 47 | + "init_error" :100 |
| 48 | + }, |
| 49 | + |
| 50 | + "comment" : "finetune_momentum :: Specify the momentum factor while finetuning", |
| 51 | + "finetune_momentum": 0.5, |
| 52 | + |
| 53 | + "processes":{ |
| 54 | + "pretraining":true, |
| 55 | + "finetuning":true, |
| 56 | + "testing":true, |
| 57 | + "export_data":false |
| 58 | + } |
58 | 59 | } |
0 commit comments