Skip to content

Commit cfa11a2

Browse files
committed
remove dead code
1 parent 93f3b72 commit cfa11a2

File tree

1 file changed

+0
-26
lines changed

1 file changed

+0
-26
lines changed

thesis/build_networks.py

Lines changed: 0 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -138,32 +138,6 @@ def loss_func(self, lambduh=3.0):
138138
loss = (loss.mean() + lambduh/self.mean_C * regularization_term).mean()
139139
return loss
140140

141-
# def train_fn(self, training_data, training_labels, updates='adadelta'):
142-
# self.training_labels_shared.set_value(training_labels.reshape(training_labels.shape[0], training_labels.shape[1], 1), borrow=True)
143-
# self.training_data_shared.set_value(np.asarray(training_data, dtype=dtype), borrow=True)
144-
# self.normlayer.set_normalisation(training_data)
145-
146-
# loss = self.loss_func()
147-
148-
# indx = theano.shared(0)
149-
# update_args = {
150-
# 'adadelta': (lasagne.updates.adadelta, {'learning_rate': 0.01, 'rho': 0.4, 'epsilon': 1e-6,}),
151-
# 'adam': (lasagne.updates.adam, {},),
152-
# }[updates]
153-
# update_func, update_params = update_args[0], update_args[1]
154-
155-
# params = lasagne.layers.get_all_params(self.network, trainable=True)
156-
# updates = update_func(loss, params, **update_params)
157-
# updates[indx] = indx + 1
158-
# train_fn = theano.function([], loss, updates=updates,
159-
# givens={
160-
# self.input_var: self.training_data_shared[indx, :, :, :, :],
161-
# self.soft_output_var: self.training_labels_shared[indx, :, :],
162-
# },
163-
# allow_input_downcast=True,
164-
# )
165-
# return indx, train_fn
166-
167141
def normalize_batches(self, training_data):
168142
self.normlayer.set_normalisation(training_data)
169143

0 commit comments

Comments
 (0)