Skip to content

Commit f911391

Browse files
committed
Conflicts: run/run_CNN.py run/run_DBN.py run/run_SDA.py
2 parents 2392e3a + 1894819 commit f911391

10 files changed

Lines changed: 252 additions & 264 deletions

File tree

config/CNN/NP/model_conf.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,7 @@
4949
"pretraining":false,
5050
"finetuning":true,
5151
"testing":true,
52-
"export_data":false
52+
"export_data":true
5353
},
5454
"export_path" : "data/export"
5555
}

config/DBN/data_spec.json

Lines changed: 27 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -1,31 +1,31 @@
11
{
2-
"validation": {
3-
"base_path" : "data/NPFILE",
4-
"filename" : "val.dat",
5-
"partition" : 200,
6-
"random" : true,
7-
"random_seed" : 123,
8-
"keep_flatten" : true,
9-
"reader_type" : "NP"
10-
},
2+
"validation": {
3+
"base_path" : "data/NPFILE",
4+
"filename" : "val.dat",
5+
"partition" : 200,
6+
"random" : true,
7+
"random_seed" : 123,
8+
"keep_flatten" : true,
9+
"reader_type" : "NP"
10+
},
1111

12-
"training" : {
13-
"base_path" : "data/NPFILE",
14-
"filename" : "train.dat",
15-
"partition" : 200,
16-
"random" : true,
17-
"random_seed" : 123,
18-
"keep_flatten" : true,
19-
"reader_type" : "NP"
20-
},
12+
"training" : {
13+
"base_path" : "data/NPFILE",
14+
"filename" : "train.dat",
15+
"partition" : 200,
16+
"random" : true,
17+
"random_seed" : 123,
18+
"keep_flatten" : true,
19+
"reader_type" : "NP"
20+
},
2121

22-
"testing" : {
23-
"base_path" : "data/NPFILE",
24-
"filename" : "train.dat",
25-
"partition" : 200,
26-
"random" : true,
27-
"random_seed" : 123,
28-
"keep_flatten" : true,
29-
"reader_type" : "NP"
30-
}
22+
"testing" : {
23+
"base_path" : "data/NPFILE",
24+
"filename" : "train.dat",
25+
"partition" : 200,
26+
"random" : true,
27+
"random_seed" : 123,
28+
"keep_flatten" : true,
29+
"reader_type" : "NP"
30+
}
3131
}

config/DBN/model_conf.json

Lines changed: 59 additions & 53 deletions
Original file line numberDiff line numberDiff line change
@@ -1,56 +1,62 @@
11
{
2-
"comment" : "nnetType :: (Mandatory) specify Type of Network (CNN,RBM) ",
3-
"nnetType" : "RBM",
4-
5-
"comment" : "train_data :: (Mandatory) specify the working directory containing data configuration and output ",
6-
"wdir" : "wdir",
7-
8-
"comment" : "valid_data (Mandatory) specify the path of the validation data relative to the working directory",
9-
"data_spec" : "data_spec.json",
10-
11-
"comment" : "rbm_nnet_spec:: (Mandatory) specify the path of RBM network configuration specification relative to working directory",
12-
"nnet_spec" : "rbm_spec.json",
13-
14-
"comment" : "output_file :: (Mandatory) specify the path of RBM network output file relative to working directory",
15-
"output_file" : "rbm_out.model",
16-
17-
"comment" : "batch_size :: specify the mini batch size while training, default 128",
18-
"batch_size" : 128,
19-
20-
"comment" :"TODO",
21-
"gbrbm_learning_rate":0.005,
22-
"learning_rate":0.08,
23-
"pretraining_epochs":10,
24-
25-
"comment" : "initial_momentum,final_momentum,initial_momentum_epoch :: Specify the momentum factor while training default 0.5,0.9,5",
26-
"initial_momentum":0.5,
27-
"final_momentum":0.9,
28-
"initial_momentum_epoch":5,
29-
30-
"comment" : "finetune_method:: Two methods are supported C: Constant learning rate and E : Exponential decay",
31-
"finetune_method":"C",
32-
33-
"comment" : "finetune_rate :: learning rate configuration",
34-
"finetune_rate" : {
35-
"learning_rate" : 0.08,
36-
"epoch_num" : 10,
37-
38-
"start_rate" : 0.08,
39-
"scale_by" : 0.5,
40-
"min_derror_decay_start" : 0.05,
41-
"min_derror_stop" : 0.05,
42-
"min_epoch_decay_start" : 15,
43-
"init_error" :100
44-
},
45-
46-
"comment" : "finetune_momentum :: Specify the momentum factor while finetuning",
47-
"finetune_momentum": 0.5,
48-
49-
"processes":{
50-
"pretraining":false,
51-
"finetuning":true,
52-
"testing":true,
53-
"export_data":false
54-
}
2+
"comment" : "nnetType :: (Mandatory) specify Type of Network (CNN,RBM) ",
3+
"nnetType" : "RBM",
4+
5+
"comment" : "train_data :: (Mandatory) specify the working directory containing data configuration and output ",
6+
"wdir" : "wdir",
7+
8+
"comment" : "valid_data (Mandatory) specify the path of the validation data relative to the working directory",
9+
"data_spec" : "data_spec.json",
10+
11+
"comment" : "rbm_nnet_spec:: (Mandatory) specify the path of RBM network configuration specification relative to working directory",
12+
"nnet_spec" : "rbm_spec.json",
13+
14+
"comment" : "output_file :: (Mandatory) specify the path of RBM network output file relative to working directory",
15+
"output_file" : "rbm_out.model",
16+
17+
"comment" : "batch_size :: specify the mini batch size while training, default 128",
18+
"batch_size" : 128,
19+
20+
"comment": "",
21+
"n_ins":2352,
22+
23+
"comment":"",
24+
"n_outs":200,
25+
26+
"comment" :"TODO",
27+
"gbrbm_learning_rate":0.005,
28+
"learning_rate":0.08,
29+
"pretraining_epochs":10,
30+
31+
"comment" : "initial_momentum,final_momentum,initial_momentum_epoch :: Specify the momentum factor while training default 0.5,0.9,5",
32+
"initial_momentum":0.5,
33+
"final_momentum":0.9,
34+
"initial_momentum_epoch":5,
35+
36+
"comment" : "finetune_method:: Two methods are supported C: Constant learning rate and E : Exponential decay",
37+
"finetune_method":"C",
38+
39+
"comment" : "finetune_rate :: learning rate configuration",
40+
"finetune_rate" : {
41+
"learning_rate" : 0.08,
42+
"epoch_num" : 10,
43+
44+
"start_rate" : 0.08,
45+
"scale_by" : 0.5,
46+
"min_derror_decay_start" : 0.05,
47+
"min_derror_stop" : 0.05,
48+
"min_epoch_decay_start" : 15,
49+
"init_error" :100
50+
},
51+
52+
"comment" : "finetune_momentum :: Specify the momentum factor while finetuning",
53+
"finetune_momentum": 0.5,
54+
55+
"processes":{
56+
"pretraining":false,
57+
"finetuning":false,
58+
"testing":true,
59+
"export_data":false
60+
}
5561

5662
}

config/DBN/rbm_spec.json

Lines changed: 1 addition & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,9 @@
1+
12
{
23

34
"comment" : "layers :: RBM layer configuration (No: of Nodes)",
45
"hidden_layers": [ 2350,1024,1024,1024,1024,1901 ],
56

6-
"comment": "",
7-
"n_ins":2352,
8-
9-
"comment":"",
10-
"n_outs":200,
11-
127
"comment" : "activation :: sigmoid or tanh",
138
"activation" : "sigmoid",
149

config/SDA/model_conf.json

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,12 @@
1717
"comment" : "batch_size :: specify the mini batch size while training, default 128",
1818
"batch_size" : 128,
1919

20+
"comment": "",
21+
"n_ins":2352,
22+
23+
"comment":"",
24+
"n_outs":200,
25+
2026
"comment" :"pretraining_epochs: number of epoch to do pretraining",
2127
"pretraining_epochs":10,
2228

config/SDA/sda_spec.json

Lines changed: 0 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -6,17 +6,10 @@
66
"comment":"",
77
"corruption_levels": [0.1,0.2,0.2,0.2,0.2,0.3],
88

9-
"comment": "",
10-
"n_ins":2352,
11-
12-
"comment":"",
13-
"n_outs":200,
149

1510
"comment" : "activation :: sigmoid or tanh",
1611
"activation" : "tanh",
1712

18-
19-
2013
"comment" : "random_seed::",
2114
"random_seed" : 89677
2215
}

run/run_CNN.py

Lines changed: 34 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@
2323
import theano.tensor as T
2424
from theano.tensor.shared_randomstreams import RandomStreams
2525

26-
from utils.load_conf import load_model,load_conv_spec,load_mlp_spec,load_data_spec
26+
from utils.load_conf import load_model,load_conv_spec,load_data_spec
2727
from io_modules.file_reader import read_dataset
2828
from utils.learn_rates import LearningRate
2929
from utils.utils import parse_activation
@@ -43,8 +43,10 @@ def runCNN(arg):
4343
else :
4444
model_config = load_model(arg,'CNN')
4545

46-
conv_config,conv_layer_config,mlp_config = load_conv_spec(model_config['nnet_spec'],model_config['batch_size'],
47-
model_config['input_shape'])
46+
conv_config,conv_layer_config,mlp_config = load_conv_spec(
47+
model_config['nnet_spec'],
48+
model_config['batch_size'],
49+
model_config['input_shape'])
4850

4951
data_spec = load_data_spec(model_config['data_spec'],model_config['batch_size']);
5052

@@ -59,43 +61,45 @@ def runCNN(arg):
5961
createDir(model_config['wdir']);
6062
#create working dir
6163

62-
#learning rate, batch-size and momentum
63-
lrate = LearningRate.get_instance(model_config['l_rate_method'],model_config['l_rate']);
6464
batch_size = model_config['batch_size'];
65-
momentum = model_config['momentum']
66-
6765
cnn = CNN(numpy_rng,theano_rng,conv_layer_configs = conv_layer_config, batch_size = batch_size,
68-
n_outs=model_config['n_outs'],hidden_layers_sizes=mlp_config['layers'], conv_activation = conv_activation,
69-
hidden_activation = hidden_activation,use_fast = conv_config['use_fast'])
66+
n_outs=model_config['n_outs'],hidden_layers_sizes=mlp_config['layers'],
67+
conv_activation = conv_activation,hidden_activation = hidden_activation,
68+
use_fast = conv_config['use_fast'])
7069

71-
train_sets, train_xy, train_x, train_y = read_dataset(data_spec['training'])
72-
valid_sets, valid_xy, valid_x, valid_y = read_dataset(data_spec['validation'])
70+
if model_config['processes']['finetuning']:
71+
#learning rate, batch-size and momentum
72+
lrate = LearningRate.get_instance(model_config['l_rate_method'],model_config['l_rate']);
73+
momentum = model_config['momentum']
7374

74-
err=fineTunning(cnn,train_sets,train_xy,train_x,train_y,
75-
valid_sets,valid_xy,valid_x,valid_y,lrate,momentum,batch_size);
76-
77-
_cnn2file(cnn.layers[0:cnn.conv_layer_num],cnn.layers[cnn.conv_layer_num:], filename=model_config['output_file']);
75+
train_sets, train_xy, train_x, train_y = read_dataset(data_spec['training'])
76+
valid_sets, valid_xy, valid_x, valid_y = read_dataset(data_spec['validation'])
77+
78+
err=fineTunning(cnn,train_sets,train_xy,train_x,train_y,
79+
valid_sets,valid_xy,valid_x,valid_y,lrate,momentum,batch_size);
7880

7981
####################
8082
## TESTING ##
8183
####################
82-
try:
83-
test_sets, test_xy, test_x, test_y = read_dataset(data_spec['testing'])
84-
except KeyError:
85-
#raise e
86-
logger.info("No testing set:Skiping Testing");
87-
logger.info("Finshed")
88-
sys.exit(0)
89-
90-
pred,err=testing(cnn,test_sets, test_xy, test_x, test_y,batch_size)
91-
92-
####################
84+
if model_config['processes']['testing']:
85+
try:
86+
test_sets, test_xy, test_x, test_y = read_dataset(data_spec['testing'])
87+
except KeyError:
88+
#raise e
89+
logger.info("No testing set:Skiping Testing");
90+
logger.info("Finshed")
91+
sys.exit(0)
92+
93+
pred,err=testing(cnn,test_sets, test_xy, test_x, test_y,batch_size)
94+
95+
##########################
9396
## Export Features ##
94-
####################
95-
mlp_layers = cnn.layers[cnn.conv_layer_num:]
96-
_file2cnn(cnn.conv_layers,mlp_layers, filename=model_config['output_file'])
97+
##########################
98+
if model_config['processes']['export_data']:
99+
mlp_layers = cnn.layers[cnn.conv_layer_num:]
100+
_file2cnn(cnn.conv_layers,mlp_layers, filename=model_config['output_file'])
101+
exportFeatures(cnn,model_config['export_path'],data_spec['testing'])
97102

98-
exportFeatures(cnn,model_config['export_path'],data_spec['testing'])
99103

100104
if __name__ == '__main__':
101105
runCNN(sys.argv[1])

0 commit comments

Comments
 (0)