-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathLogistic Regression.py
75 lines (60 loc) · 1.7 KB
/
Logistic Regression.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
import tensorflow as tf
from utils import check_softmax, check_acc, check_model, check_ce
def softmax(logits):
"""
softmax implementation
args:
- logits [tensor]: 1xN logits tensor
returns:
- soft_logits [tensor]: softmax of logits
"""
exp = tf.exp(logits)
denom = tf.math.reduce_sum(exp, 1, keepdims=True)
return exp / denom
def cross_entropy(scaled_logits, one_hot):
"""
Cross entropy loss implementation
args:
- scaled_logits [tensor]: NxC tensor where N batch size / C number of classes
- one_hot [tensor]: one hot tensor
returns:
- loss [tensor]: cross entropy
"""
masked_logits = tf.boolean_mask(scaled_logits, one_hot)
return -tf.math.log(masked_logits)
def model(X, W, b):
"""
logistic regression model
args:
- X [tensor]: input HxWx3
- W [tensor]: weights
- b [tensor]: bias
returns:
- output [tensor]
"""
flatten_X = tf.reshape(X, (-1, W.shape[0]))
return softmax(tf.matmul(flatten_X, W) + b)
def accuracy(y_hat, Y):
"""
calculate accuracy
args:
- y_hat [tensor]: NxC tensor of models predictions
- y [tensor]: N tensor of ground truth classes
returns:
- acc [tensor]: accuracy
"""
# calculate argmax
argmax = tf.cast(tf.argmax(y_hat, axis=1), Y.dtype)
# calculate acc
acc = tf.math.reduce_sum(tf.cast(argmax == Y, tf.int32)) / Y.shape[0]
return acc
if __name__ == '__main__':
# checking the softmax implementation
check_softmax(softmax)
# checking the NLL implementation
check_ce(cross_entropy)
# check the model implementation
check_model(model)
# check the accuracy implementation
check_acc(accuracy)
;