-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathrelu.py
More file actions
36 lines (25 loc) · 1.03 KB
/
relu.py
File metadata and controls
36 lines (25 loc) · 1.03 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
import numpy as np
def relu_forward(input_data):
output = {
'height': input_data['height'],
'width': input_data['width'],
'channel': input_data['channel'],
'batch_size': input_data['batch_size'],
}
###### Fill in the code here ######
# Replace the following line with your implementation.
output['data'] = np.zeros_like(input_data['data'])
output['data'] = np.maximum(0, input_data['data'])
return output
def relu_backward(output, input_data, layer):
###### Fill in the code here ######
# Replace the following line with your implementation.
# Creating a matrix with the same shape as input_data['data']
input_od = np.zeros_like(input_data['data'])
input_od = np.copy(output['diff'])
input_od[input_data['data'] < 0] = 0
# print(output)
# print(layer)
# For the indices where input_data['data'] is positive, input_od = output['diff']
# input_od[input_data['data'] > 0] = output['diff'][input_data['data'] > 0]
return input_od