55import time
66
77class nnet (object ):
8- """Abstract class for all Network Models"""
8+ """Abstract class for all Network Models"""
99 def __init__ (self ):
1010 self .finetune_cost = None
1111 self .params = [];
@@ -31,7 +31,7 @@ def pretraining_functions(self, train_x, batch_size):
3131 #"Building fine tuning operation "
3232 def build_finetune_functions (self , train_shared_xy , valid_shared_xy , batch_size ):
3333 """
34- Generates a function `train` that implements one step of
34+ Generates a function `train` that implements one step of
3535 finetuning and a function `validate` that computes the error on
3636 a batch from the validation set
3737
@@ -48,12 +48,12 @@ def build_finetune_functions(self, train_shared_xy, valid_shared_xy, batch_size)
4848 :type batch_size: int
4949 :param batch_size: size of a minibatch
5050
51- :returns (theano.function,theano.function)
52- * A function for training takes minibatch_index,learning_rate,momentum
53- which updates weights,and return error rate
54- * A function for validation takes minibatch_indexand return error rate
51+ :returns (theano.function,theano.function)
52+ * A function for training takes minibatch_index,learning_rate,momentum
53+ which updates weights,and return error rate
54+ * A function for validation takes minibatch_indexand return error rate
5555
56- """
56+ """
5757
5858 (train_set_x , train_set_y ) = train_shared_xy
5959 (valid_set_x , valid_set_y ) = valid_shared_xy
@@ -94,21 +94,21 @@ def build_finetune_functions(self, train_shared_xy, valid_shared_xy, batch_size)
9494 return train_fn , valid_fn
9595
9696 def build_test_function (self ,test_shared_xy ,batch_size ):
97- """
98- Get Fuction for testing
97+ """
98+ Get Fuction for testing
9999
100- :type test_shared_xy: pairs of theano.tensor.TensorType
100+ :type test_shared_xy: pairs of theano.tensor.TensorType
101101 :param test_shared_xy: It is a list that contain all the test dataset,
102102 pair is formed of two Theano variables, one for the datapoints,
103103 the other for the labels
104104
105105 :type batch_size: int
106106 :param batch_size: size of a minibatch
107-
108- :returns theano.function
109- A function which takes index to minibatch and Generates Label Array and error
107+
108+ :returns theano.function
109+ A function which takes index to minibatch and Generates Label Array and error
110110
111- """
111+ """
112112 (test_set_x , test_set_y ) = test_shared_xy
113113 index = T .lscalar ('index' ) # index to a [mini]batch
114114 test_fn = theano .function (inputs = [index ],
@@ -119,14 +119,14 @@ def build_test_function(self,test_shared_xy,batch_size):
119119 return test_fn
120120
121121 def getFeaturesFunction (self ):
122- """
123- Get Function for extracting Feature/Bottle neck
124-
125- :returns theano.function
126- A function takes input features
127- """
122+ """
123+ Get Function for extracting Feature/Bottle neck
124+
125+ :returns theano.function
126+ A function takes input features
127+ """
128128 #in_x = T.matrix('in_x');
129- in_x = x .type ('in_x' );
129+ in_x = x .type ('in_x' );
130130 fn = theano .function (inputs = [in_x ],outputs = [self .features ],
131131 givens = {self .x : in_x },name = 'features' )#,on_unused_input='warn')
132132 return fn
@@ -137,89 +137,89 @@ def getFeaturesFunction(self):
137137
138138def testing (nnetModel ,test_sets , test_xy , test_x , test_y ,batch_size ):
139139
140- # get the testing function for the model
141- logger .info ('Getting the Test function' )
142- test_fn = nnetModel .build_test_function ((test_x , test_y ), batch_size = batch_size )
143-
144- logger .info ('Starting Testing' );
145-
146- test_error = []
147- test_output = numpy .array ([],int );
148- while not test_sets .is_finish ():
149- test_sets .make_partition_shared (test_xy )
150- n_test_batches = test_sets .cur_frame_num / batch_size ;
151- for i in xrange (n_test_batches ):
152- pred , err = test_fn (i )
153- test_error .append (err )
154- test_output = numpy .append (test_output ,pred )
155- test_sets .read_next_partition_data ()
156- logger .debug ("Test Error (upto curr part) = %f" ,numpy .mean (test_error ))
157- test_sets .initialize_read ();
158-
159- test_loss = numpy .mean (test_error )
160- logger .info ('Optimization complete with best Test score of %f %%' ,test_loss * 100 )
161-
162- return test_output ,test_loss ;
140+ # get the testing function for the model
141+ logger .info ('Getting the Test function' )
142+ test_fn = nnetModel .build_test_function ((test_x , test_y ), batch_size = batch_size )
143+
144+ logger .info ('Starting Testing' );
145+
146+ test_error = []
147+ test_output = numpy .array ([],int );
148+ while not test_sets .is_finish ():
149+ test_sets .make_partition_shared (test_xy )
150+ n_test_batches = test_sets .cur_frame_num / batch_size ;
151+ for i in xrange (n_test_batches ):
152+ pred , err = test_fn (i )
153+ test_error .append (err )
154+ test_output = numpy .append (test_output ,pred )
155+ test_sets .read_next_partition_data ()
156+ logger .debug ("Test Error (upto curr part) = %f" ,numpy .mean (test_error ))
157+ test_sets .initialize_read ();
158+
159+ test_loss = numpy .mean (test_error )
160+ logger .info ('Optimization complete with best Test score of %f %%' ,test_loss * 100 )
161+
162+ return test_output ,test_loss ;
163163
164164def fineTunning (nnetModel ,train_sets ,train_xy ,train_x ,train_y ,
165- valid_sets ,valid_xy ,valid_x ,valid_y ,lrate ,momentum ,batch_size ):
166-
167- def valid_score ():
168- valid_error = []
169- while not valid_sets .is_finish ():
170- valid_sets .make_partition_shared (valid_xy )
171- n_valid_batches = valid_sets .cur_frame_num / batch_size ;
172- validation_losses = [validate_fn (i ) for i in xrange (n_valid_batches )]
173- valid_error .append (validation_losses )
174- valid_sets .read_next_partition_data ()
175- logger .debug ("Valid Error (upto curr part) = %f" ,numpy .mean (valid_error ))
176- valid_sets .initialize_read ();
177- return numpy .mean (valid_error );
178-
179- # get the training, validation function for the model
180- logger .info ('Getting the finetuning functions' )
181- train_fn , validate_fn = nnetModel .build_finetune_functions ((train_x , train_y ),
182- (valid_x , valid_y ), batch_size = batch_size )
183-
184- best_validation_loss = float ('Inf' )
185-
186- logger .info ('Finetunning the model..' );
187- start_time = time .clock ()
188-
189- while (lrate .get_rate () != 0 ):
190- train_error = []
191- while not train_sets .is_finish ():
192- train_sets .make_partition_shared (train_xy )
193- for batch_index in xrange (train_sets .cur_frame_num / batch_size ): # loop over mini-batches
194- train_error .append (train_fn (index = batch_index ,
195- learning_rate = lrate .get_rate (), momentum = momentum ))
196- logger .debug ('Training batch %d error %f' ,batch_index , numpy .mean (train_error ))
197- train_sets .read_next_partition_data ()
198- logger .info ('Fine Tunning:epoch %d, training error %f' ,lrate .epoch , numpy .mean (train_error ));
199- train_sets .initialize_read ()
200-
201- valid_error = valid_score ()
202- if valid_error < best_validation_loss :
203- best_validation_loss = valid_error
204- logger .info ('Fine Tunning:epoch %d, validation error %f' ,lrate .epoch , valid_error );
205- lrate .get_next_rate (current_error = 100 * valid_error )
206-
207- end_time = time .clock ()
208-
209- logger .info ('Best validation error %f' ,best_validation_loss )
210-
211- logger .info ('The Fine tunning ran for %.2fm' % ((end_time - start_time ) / 60. ))
212- logger .info ('Optimization complete with best validation score of %f %%' , best_validation_loss * 100 )
213-
214- return best_validation_loss
165+ valid_sets ,valid_xy ,valid_x ,valid_y ,lrate ,momentum ,batch_size ):
166+
167+ def valid_score ():
168+ valid_error = []
169+ while not valid_sets .is_finish ():
170+ valid_sets .make_partition_shared (valid_xy )
171+ n_valid_batches = valid_sets .cur_frame_num / batch_size ;
172+ validation_losses = [validate_fn (i ) for i in xrange (n_valid_batches )]
173+ valid_error .append (validation_losses )
174+ valid_sets .read_next_partition_data ()
175+ logger .debug ("Valid Error (upto curr part) = %f" ,numpy .mean (valid_error ))
176+ valid_sets .initialize_read ();
177+ return numpy .mean (valid_error );
178+
179+ # get the training, validation function for the model
180+ logger .info ('Getting the finetuning functions' )
181+ train_fn , validate_fn = nnetModel .build_finetune_functions ((train_x , train_y ),
182+ (valid_x , valid_y ), batch_size = batch_size )
183+
184+ best_validation_loss = float ('Inf' )
185+
186+ logger .info ('Finetunning the model..' );
187+ start_time = time .clock ()
188+
189+ while (lrate .get_rate () != 0 ):
190+ train_error = []
191+ while not train_sets .is_finish ():
192+ train_sets .make_partition_shared (train_xy )
193+ for batch_index in xrange (train_sets .cur_frame_num / batch_size ): # loop over mini-batches
194+ train_error .append (train_fn (index = batch_index ,
195+ learning_rate = lrate .get_rate (), momentum = momentum ))
196+ logger .debug ('Training batch %d error %f' ,batch_index , numpy .mean (train_error ))
197+ train_sets .read_next_partition_data ()
198+ logger .info ('Fine Tunning:epoch %d, training error %f' ,lrate .epoch , numpy .mean (train_error ));
199+ train_sets .initialize_read ()
200+
201+ valid_error = valid_score ()
202+ if valid_error < best_validation_loss :
203+ best_validation_loss = valid_error
204+ logger .info ('Fine Tunning:epoch %d, validation error %f' ,lrate .epoch , valid_error );
205+ lrate .get_next_rate (current_error = 100 * valid_error )
206+
207+ end_time = time .clock ()
208+
209+ logger .info ('Best validation error %f' ,best_validation_loss )
210+
211+ logger .info ('The Fine tunning ran for %.2fm' % ((end_time - start_time ) / 60. ))
212+ logger .info ('Optimization complete with best validation score of %f %%' , best_validation_loss * 100 )
213+
214+ return best_validation_loss
215215
216216
217217def getFeatures (nnetModel ,data_spec_testing ):
218- out_function = nnetModel .getFeaturesFunction ()
219- test_sets , test_xy , test_x , test_y = read_dataset (data_spec_testing )
220- while (not test_sets .is_finish ()):
221- data = out_function (test_sets .feat )
222- test_sets .read_next_partition_data ()
223- #TODO write data
218+ out_function = nnetModel .getFeaturesFunction ()
219+ test_sets , test_xy , test_x , test_y = read_dataset (data_spec_testing )
220+ while (not test_sets .is_finish ()):
221+ data = out_function (test_sets .feat )
222+ test_sets .read_next_partition_data ()
223+ #TODO write data
224224
225225
0 commit comments