completed part 1 of deep learning

This commit is contained in:
2019-07-18 22:21:59 +01:00
parent 906912eb15
commit 1c8aec47c2
16 changed files with 3458 additions and 280 deletions

View File

@@ -0,0 +1,10 @@
import numpy as np
# Write a function that takes as input two lists Y, P,
# and returns the float corresponding to their cross-entropy.
def cross_entropy(Y, P):
Y = np.float_(Y)
P = np.float_(P)
return -np.sum(Y * np.log(P) + (1 - Y) * np.log(1 - P))

View File

@@ -0,0 +1,18 @@
import numpy as np
# Write a function that takes as input a list of numbers, and returns
# the list of values given by the softmax function.
def softmax(L):
expL = np.exp(L)
sumExpL = sum(expL)
result = []
for i in expL:
result.append(i * 1.0 / sumExpL)
return result
# Note: The function np.divide can also be used here, as follows:
# def softmax(L):
# expL = np.exp(L)
# return np.divide (expL, expL.sum())