Skip to content

Commit 651de40

Browse files
committed
Add: Activation in each layer cnn/cnn3d
1 parent ec681bb commit 651de40

6 files changed

Lines changed: 31 additions & 26 deletions

File tree

src/layers/mlp.py

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -33,26 +33,26 @@ def __init__(self, rng, input, n_in, n_out, W=None, b=None, activation=T.tanh,
3333
self.delta_W = theano.shared(value = numpy.zeros((n_in,n_out),dtype=theano.config.floatX), name='delta_W')
3434
self.delta_b = theano.shared(value = numpy.zeros_like(self.b.get_value(borrow=True), dtype=theano.config.floatX), name='delta_b')
3535

36-
lin_output = T.dot(input, self.W) + self.b
36+
self.lin_output = T.dot(input, self.W) + self.b
3737

3838
if adv_activation_method == 'maxout': # pooling of output of neuron based on poolsize
3939
self.last_start = n_out - pool_size
40-
self.tmp_output = lin_output[:,0:self.last_start+1:pool_size]
40+
self.tmp_output = self.lin_output[:,0:self.last_start+1:pool_size]
4141
for i in range(1, pool_size):
42-
cur = lin_output[:,i:self.last_start+i+1:pool_size]
42+
cur = self.lin_output[:,i:self.last_start+i+1:pool_size]
4343
self.tmp_output = T.maximum(cur, self.tmp_output)
4444
self.output = activation(self.tmp_output)
4545
elif adv_activation_method == 'pnorm': # pooling of output of neuron based on poolsize and normalizing the output
4646
self.last_start = n_out - pool_size
47-
self.tmp_output = abs(lin_output[:,0:self.last_start+1:pool_size]) ** pnorm_order
47+
self.tmp_output = abs(self.lin_output[:,0:self.last_start+1:pool_size]) ** pnorm_order
4848
for i in range(1, pool_size):
49-
cur = abs(lin_output[:,i:self.last_start+i+1:pool_size]) ** pnorm_order
49+
cur = abs(self.lin_output[:,i:self.last_start+i+1:pool_size]) ** pnorm_order
5050
self.tmp_output = self.tmp_output + cur
5151
self.tmp_output = self.tmp_output ** (1.0 / pnorm_order)
5252
self.output = activation(self.tmp_output)
5353
else:
54-
self.output = (lin_output if activation is None
55-
else activation(lin_output))
54+
self.output = (self.lin_output if activation is None
55+
else activation(self.lin_output))
5656

5757
# parameters of the model
5858
self.params = [self.W, self.b]

src/models/cnn.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@
1010
from collections import OrderedDict
1111
from io_modules.file_reader import read_dataset
1212
from utils.plotter import plot
13+
from utils.utils import parse_activation
1314

1415
import logging
1516
logger = logging.getLogger(__name__)
@@ -157,8 +158,7 @@ def plot_layer_output(self,plot_spec,plot_path,max_images=10):
157158
class CNN(CNNBase):
158159
""" Instantiation of Convolution neural network ... """
159160
def __init__(self, numpy_rng, theano_rng, batch_size, n_outs,conv_layer_configs, hidden_layer_configs,
160-
use_fast=False,conv_activation = T.nnet.sigmoid,hidden_activation = T.nnet.sigmoid,
161-
l1_reg=None,l2_reg=None,max_col_norm=None):
161+
use_fast=False,hidden_activation = T.nnet.sigmoid,l1_reg=None,l2_reg=None,max_col_norm=None):
162162

163163
super(CNN, self).__init__(conv_layer_configs, hidden_layer_configs,l1_reg,l2_reg,max_col_norm)
164164
if not theano_rng:
@@ -171,7 +171,7 @@ def __init__(self, numpy_rng, theano_rng, batch_size, n_outs,conv_layer_configs,
171171
input = self.layers[-1].output #output of previous layer
172172

173173
config = conv_layer_configs[i]
174-
174+
conv_activation= parse_activation(config['activation'])
175175
conv_layer = ConvLayer(numpy_rng=numpy_rng, input=input,input_shape=config['input_shape'],
176176
filter_shape=config['filter_shape'],poolsize=config['poolsize'],
177177
activation = conv_activation, use_fast = use_fast)
@@ -255,8 +255,8 @@ def save_mlp2dict(self,withfinal=True,max_layer_num=-1):
255255
class DropoutCNN(CNNBase):
256256
""" Instantiation of Convolution neural network ... """
257257
def __init__(self, numpy_rng, theano_rng, batch_size, n_outs,conv_layer_configs, hidden_layer_configs,
258-
use_fast=False,conv_activation = T.nnet.sigmoid,hidden_activation = T.nnet.sigmoid,
259-
l1_reg=None,l2_reg=None,max_col_norm=None,input_dropout_factor=0.0):
258+
use_fast=False,hidden_activation = T.nnet.sigmoid,l1_reg=None,l2_reg=None,
259+
max_col_norm=None,input_dropout_factor=0.0):
260260

261261
super(DropoutCNN, self).__init__(conv_layer_configs,hidden_layer_configs,l1_reg,l2_reg,max_col_norm)
262262
self.input_dropout_factor = input_dropout_factor;
@@ -277,7 +277,7 @@ def __init__(self, numpy_rng, theano_rng, batch_size, n_outs,conv_layer_configs,
277277
dropout_conv_input = self.dropout_layers[-1].dropout_output;
278278

279279
config = conv_layer_configs[i]
280-
280+
activation = conv_activation
281281
dropout_conv_layer = DropoutConvLayer(numpy_rng=numpy_rng, input=dropout_conv_input,
282282
input_shape=config['input_shape'],filter_shape=config['filter_shape'],poolsize=config['poolsize'],
283283
activation = conv_activation, use_fast = use_fast,dropout_factor=conv_layer_configs[i]['dropout_factor'])

src/models/cnn3d.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@
1010
from collections import OrderedDict
1111
from io_modules.file_reader import read_dataset
1212
from utils.plotter import plot
13+
from utils.utils import parse_activation
1314

1415
import logging
1516
logger = logging.getLogger(__name__)
@@ -158,8 +159,8 @@ def plot_layer_output(self,plot_spec,plot_path,max_images=10):
158159

159160
class CNN3D(CNN3DBase):
160161
""" Instantiation of Convolution neural network ... """
161-
def __init__(self, numpy_rng, theano_rng, batch_size, n_outs,conv_layer_configs, hidden_layer_configs,
162-
conv_activation = T.nnet.sigmoid,hidden_activation = T.nnet.sigmoid,l1_reg=None,l2_reg=None,max_col_norm=None):
162+
def __init__(self, numpy_rng, theano_rng, batch_size, n_outs,conv_layer_configs, hidden_layer_configs,
163+
hidden_activation = T.nnet.sigmoid,l1_reg=None,l2_reg=None,max_col_norm=None):
163164

164165
super(CNN3D, self).__init__(conv_layer_configs, hidden_layer_configs,l1_reg,l2_reg,max_col_norm)
165166
if not theano_rng:
@@ -172,7 +173,7 @@ def __init__(self, numpy_rng, theano_rng, batch_size, n_outs,conv_layer_configs,
172173
input = self.layers[-1].output #output of previous layer
173174

174175
config = conv_layer_configs[i]
175-
176+
conv_activation = parse_activation(config['activation']);
176177
conv_layer = ConvLayer(numpy_rng=numpy_rng, input=input,input_shape=config['input_shape'],
177178
filter_shape=config['filter_shape'],poolsize=config['poolsize'],
178179
activation = conv_activation)

src/run/run_CNN.py

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -25,9 +25,8 @@
2525

2626
from utils.load_conf import load_model,load_conv_spec,load_data_spec
2727
from io_modules.file_reader import read_dataset
28-
from utils.utils import parse_activation
2928
from io_modules import setLogger
30-
29+
from utils.utils import parse_activation
3130
from run import fineTunning,testing,exportFeatures,createDir
3231

3332
import logging
@@ -53,8 +52,7 @@ def runCNN(arg):
5352
theano_rng = RandomStreams(numpy_rng.randint(2 ** 30))
5453

5554
logger.info('> ... building the model')
56-
conv_activation = parse_activation(conv_config['activation']);
57-
hidden_activation = parse_activation(mlp_config['activation']);
55+
activationFn = parse_activation(mlp_config['activation']);
5856

5957
createDir(model_config['wdir']);
6058
#create working dir
@@ -64,14 +62,14 @@ def runCNN(arg):
6462
logger.info('>Initializing dropout cnn model')
6563
cnn = DropoutCNN(numpy_rng,theano_rng,conv_layer_configs = conv_layer_config, batch_size = batch_size,
6664
n_outs=model_config['n_outs'],hidden_layer_configs=mlp_config,
67-
conv_activation = conv_activation,hidden_activation = hidden_activation,
65+
hidden_activation = activationFn,
6866
use_fast = conv_config['use_fast'],l1_reg = mlp_config['l1_reg'],
6967
l2_reg = mlp_config['l1_reg'],max_col_norm = mlp_config['max_col_norm'],
7068
input_dropout_factor=conv_config['input_dropout_factor'])
7169
else:
7270
cnn = CNN(numpy_rng,theano_rng,conv_layer_configs = conv_layer_config, batch_size = batch_size,
7371
n_outs=model_config['n_outs'],hidden_layer_configs=mlp_config,
74-
conv_activation = conv_activation,hidden_activation = hidden_activation,
72+
hidden_activation = activationFn,
7573
use_fast = conv_config['use_fast'],l1_reg = mlp_config['l1_reg'],
7674
l2_reg = mlp_config['l1_reg'],max_col_norm = mlp_config['max_col_norm'])
7775

src/run/run_CNN3d.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
# See the Apache 2 License for the specific language governing permissions and
1616
# limitations under the License.
1717

18-
import cPickle, gzip, os, time,sys
18+
import sys
1919
from models.cnn3d import CNN3D
2020
import numpy
2121

@@ -53,7 +53,6 @@ def runCNN3D(arg):
5353
theano_rng = RandomStreams(numpy_rng.randint(2 ** 30))
5454

5555
logger.info('> ... building the model')
56-
conv_activation = parse_activation(conv_config['activation']);
5756
hidden_activation = parse_activation(mlp_config['activation']);
5857

5958
createDir(model_config['wdir']);
@@ -63,8 +62,7 @@ def runCNN3D(arg):
6362
batch_size = model_config['batch_size'];
6463

6564
cnn = CNN3D(numpy_rng,theano_rng,conv_layer_configs = conv_layer_config, batch_size = batch_size,
66-
n_outs=model_config['n_outs'],hidden_layer_configs=mlp_config,
67-
conv_activation = conv_activation,hidden_activation = hidden_activation,
65+
n_outs=model_config['n_outs'],hidden_layer_configs=mlp_config,hidden_activation = hidden_activation,
6866
l1_reg = mlp_config['l1_reg'],l2_reg = mlp_config['l1_reg'],max_col_norm = mlp_config['max_col_norm'])
6967

7068

src/utils/load_conf.py

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -277,6 +277,9 @@ def load_conv_spec(input_file,batch_size,input_shape):
277277
else:
278278
cnn_data['input_dropout_factor']=0.0;
279279

280+
if not cnn_data.has_key('activation'):
281+
cnn_data['activation'] = "sigmoid";
282+
280283
#layer_configs
281284
layer_configs=cnn_data.pop('layers');
282285
conv_configs = cnn_data;
@@ -310,13 +313,18 @@ def load_conv_spec(input_file,batch_size,input_shape):
310313
input_shape.append((inp-wdim+1)/pool);
311314
layer_configs[layer_index]['output_shape'].extend(input_shape)
312315

316+
if not layer_configs[layer_index].has_key('activation'):
317+
layer_configs[layer_index]['activation'] = cnn_data['activation'];
318+
313319
if (do_dropout and (not layer_configs[layer_index].has_key('dropout_factor') and
314320
not type(layer_configs[layer_index]['dropout_factor']) is float)):
315321
logger.critical("dropout_factor of cnn layer %d is not present (or not a float) in mlp_configuration"%layer_index)
316322
exit(1);
317323
elif not do_dropout:
318324
layer_configs[layer_index]['dropout_factor']=0.0;
319325

326+
327+
320328
prev_map_number = current_map_number
321329

322330

0 commit comments

Comments
 (0)