Skip to content

Commit 0f6684d

Browse files
committed
deeplearning第1专题编程作业
1 parent 7109649 commit 0f6684d

File tree

14 files changed

+6344
-0
lines changed

14 files changed

+6344
-0
lines changed

01神经网络和深度学习/Code编程作业/deeplearning第1专题编程作业/deeplearning编程作业/week2/Logistic Regression as a Neural Network/Logistic Regression with a Neural Network mindset v3.ipynb

+1,341
Large diffs are not rendered by default.
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,19 @@
1+
import numpy as np
2+
import h5py
3+
4+
5+
def load_dataset():
6+
train_dataset = h5py.File('datasets/train_catvnoncat.h5', "r")
7+
train_set_x_orig = np.array(train_dataset["train_set_x"][:]) # your train set features
8+
train_set_y_orig = np.array(train_dataset["train_set_y"][:]) # your train set labels
9+
10+
test_dataset = h5py.File('datasets/test_catvnoncat.h5', "r")
11+
test_set_x_orig = np.array(test_dataset["test_set_x"][:]) # your test set features
12+
test_set_y_orig = np.array(test_dataset["test_set_y"][:]) # your test set labels
13+
14+
classes = np.array(test_dataset["list_classes"][:]) # the list of classes
15+
16+
train_set_y_orig = train_set_y_orig.reshape((1, train_set_y_orig.shape[0]))
17+
test_set_y_orig = test_set_y_orig.reshape((1, test_set_y_orig.shape[0]))
18+
19+
return train_set_x_orig, train_set_y_orig, test_set_x_orig, test_set_y_orig, classes

01神经网络和深度学习/Code编程作业/deeplearning第1专题编程作业/deeplearning编程作业/week3/Planar data classification with one hidden layer/Planar data classification with one hidden layer v3.ipynb

+1,591
Large diffs are not rendered by default.
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,66 @@
1+
import matplotlib.pyplot as plt
2+
import numpy as np
3+
import sklearn
4+
import sklearn.datasets
5+
import sklearn.linear_model
6+
7+
def plot_decision_boundary(model, X, y):
8+
# Set min and max values and give it some padding
9+
x_min, x_max = X[0, :].min() - 1, X[0, :].max() + 1
10+
y_min, y_max = X[1, :].min() - 1, X[1, :].max() + 1
11+
h = 0.01
12+
# Generate a grid of points with distance h between them
13+
xx, yy = np.meshgrid(np.arange(x_min, x_max, h), np.arange(y_min, y_max, h))
14+
# Predict the function value for the whole grid
15+
Z = model(np.c_[xx.ravel(), yy.ravel()])
16+
Z = Z.reshape(xx.shape)
17+
# Plot the contour and training examples
18+
plt.contourf(xx, yy, Z, cmap=plt.cm.Spectral)
19+
plt.ylabel('x2')
20+
plt.xlabel('x1')
21+
plt.scatter(X[0, :], X[1, :], c=y, cmap=plt.cm.Spectral)
22+
23+
24+
def sigmoid(x):
25+
"""
26+
Compute the sigmoid of x
27+
28+
Arguments:
29+
x -- A scalar or numpy array of any size.
30+
31+
Return:
32+
s -- sigmoid(x)
33+
"""
34+
s = 1/(1+np.exp(-x))
35+
return s
36+
37+
def load_planar_dataset():
38+
np.random.seed(1)
39+
m = 400 # number of examples
40+
N = int(m/2) # number of points per class
41+
D = 2 # dimensionality
42+
X = np.zeros((m,D)) # data matrix where each row is a single example
43+
Y = np.zeros((m,1), dtype='uint8') # labels vector (0 for red, 1 for blue)
44+
a = 4 # maximum ray of the flower
45+
46+
for j in range(2):
47+
ix = range(N*j,N*(j+1))
48+
t = np.linspace(j*3.12,(j+1)*3.12,N) + np.random.randn(N)*0.2 # theta
49+
r = a*np.sin(4*t) + np.random.randn(N)*0.2 # radius
50+
X[ix] = np.c_[r*np.sin(t), r*np.cos(t)]
51+
Y[ix] = j
52+
53+
X = X.T
54+
Y = Y.T
55+
56+
return X, Y
57+
58+
def load_extra_datasets():
59+
N = 200
60+
noisy_circles = sklearn.datasets.make_circles(n_samples=N, factor=.5, noise=.3)
61+
noisy_moons = sklearn.datasets.make_moons(n_samples=N, noise=.2)
62+
blobs = sklearn.datasets.make_blobs(n_samples=N, random_state=5, n_features=2, centers=6)
63+
gaussian_quantiles = sklearn.datasets.make_gaussian_quantiles(mean=None, cov=0.5, n_samples=N, n_features=2, n_classes=2, shuffle=True, random_state=None)
64+
no_structure = np.random.rand(N, 2), np.random.rand(N, 2)
65+
66+
return noisy_circles, noisy_moons, blobs, gaussian_quantiles, no_structure
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,119 @@
1+
import numpy as np
2+
3+
def layer_sizes_test_case():
4+
np.random.seed(1)
5+
X_assess = np.random.randn(5, 3)
6+
Y_assess = np.random.randn(2, 3)
7+
return X_assess, Y_assess
8+
9+
def initialize_parameters_test_case():
10+
n_x, n_h, n_y = 2, 4, 1
11+
return n_x, n_h, n_y
12+
13+
def forward_propagation_test_case():
14+
np.random.seed(1)
15+
X_assess = np.random.randn(2, 3)
16+
17+
parameters = {'W1': np.array([[-0.00416758, -0.00056267],
18+
[-0.02136196, 0.01640271],
19+
[-0.01793436, -0.00841747],
20+
[ 0.00502881, -0.01245288]]),
21+
'W2': np.array([[-0.01057952, -0.00909008, 0.00551454, 0.02292208]]),
22+
'b1': np.array([[ 0.],
23+
[ 0.],
24+
[ 0.],
25+
[ 0.]]),
26+
'b2': np.array([[ 0.]])}
27+
28+
return X_assess, parameters
29+
30+
def compute_cost_test_case():
31+
np.random.seed(1)
32+
Y_assess = np.random.randn(1, 3)
33+
parameters = {'W1': np.array([[-0.00416758, -0.00056267],
34+
[-0.02136196, 0.01640271],
35+
[-0.01793436, -0.00841747],
36+
[ 0.00502881, -0.01245288]]),
37+
'W2': np.array([[-0.01057952, -0.00909008, 0.00551454, 0.02292208]]),
38+
'b1': np.array([[ 0.],
39+
[ 0.],
40+
[ 0.],
41+
[ 0.]]),
42+
'b2': np.array([[ 0.]])}
43+
44+
a2 = (np.array([[ 0.5002307 , 0.49985831, 0.50023963]]))
45+
46+
return a2, Y_assess, parameters
47+
48+
def backward_propagation_test_case():
49+
np.random.seed(1)
50+
X_assess = np.random.randn(2, 3)
51+
Y_assess = np.random.randn(1, 3)
52+
parameters = {'W1': np.array([[-0.00416758, -0.00056267],
53+
[-0.02136196, 0.01640271],
54+
[-0.01793436, -0.00841747],
55+
[ 0.00502881, -0.01245288]]),
56+
'W2': np.array([[-0.01057952, -0.00909008, 0.00551454, 0.02292208]]),
57+
'b1': np.array([[ 0.],
58+
[ 0.],
59+
[ 0.],
60+
[ 0.]]),
61+
'b2': np.array([[ 0.]])}
62+
63+
cache = {'A1': np.array([[-0.00616578, 0.0020626 , 0.00349619],
64+
[-0.05225116, 0.02725659, -0.02646251],
65+
[-0.02009721, 0.0036869 , 0.02883756],
66+
[ 0.02152675, -0.01385234, 0.02599885]]),
67+
'A2': np.array([[ 0.5002307 , 0.49985831, 0.50023963]]),
68+
'Z1': np.array([[-0.00616586, 0.0020626 , 0.0034962 ],
69+
[-0.05229879, 0.02726335, -0.02646869],
70+
[-0.02009991, 0.00368692, 0.02884556],
71+
[ 0.02153007, -0.01385322, 0.02600471]]),
72+
'Z2': np.array([[ 0.00092281, -0.00056678, 0.00095853]])}
73+
return parameters, cache, X_assess, Y_assess
74+
75+
def update_parameters_test_case():
76+
parameters = {'W1': np.array([[-0.00615039, 0.0169021 ],
77+
[-0.02311792, 0.03137121],
78+
[-0.0169217 , -0.01752545],
79+
[ 0.00935436, -0.05018221]]),
80+
'W2': np.array([[-0.0104319 , -0.04019007, 0.01607211, 0.04440255]]),
81+
'b1': np.array([[ -8.97523455e-07],
82+
[ 8.15562092e-06],
83+
[ 6.04810633e-07],
84+
[ -2.54560700e-06]]),
85+
'b2': np.array([[ 9.14954378e-05]])}
86+
87+
grads = {'dW1': np.array([[ 0.00023322, -0.00205423],
88+
[ 0.00082222, -0.00700776],
89+
[-0.00031831, 0.0028636 ],
90+
[-0.00092857, 0.00809933]]),
91+
'dW2': np.array([[ -1.75740039e-05, 3.70231337e-03, -1.25683095e-03,
92+
-2.55715317e-03]]),
93+
'db1': np.array([[ 1.05570087e-07],
94+
[ -3.81814487e-06],
95+
[ -1.90155145e-07],
96+
[ 5.46467802e-07]]),
97+
'db2': np.array([[ -1.08923140e-05]])}
98+
return parameters, grads
99+
100+
def nn_model_test_case():
101+
np.random.seed(1)
102+
X_assess = np.random.randn(2, 3)
103+
Y_assess = np.random.randn(1, 3)
104+
return X_assess, Y_assess
105+
106+
def predict_test_case():
107+
np.random.seed(1)
108+
X_assess = np.random.randn(2, 3)
109+
parameters = {'W1': np.array([[-0.00615039, 0.0169021 ],
110+
[-0.02311792, 0.03137121],
111+
[-0.0169217 , -0.01752545],
112+
[ 0.00935436, -0.05018221]]),
113+
'W2': np.array([[-0.0104319 , -0.04019007, 0.01607211, 0.04440255]]),
114+
'b1': np.array([[ -8.97523455e-07],
115+
[ 8.15562092e-06],
116+
[ 6.04810633e-07],
117+
[ -2.54560700e-06]]),
118+
'b2': np.array([[ 9.14954378e-05]])}
119+
return parameters, X_assess
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,82 @@
1+
import numpy as np
2+
3+
def sigmoid(Z):
4+
"""
5+
Implements the sigmoid activation in numpy
6+
7+
Arguments:
8+
Z -- numpy array of any shape
9+
10+
Returns:
11+
A -- output of sigmoid(z), same shape as Z
12+
cache -- returns Z as well, useful during backpropagation
13+
"""
14+
15+
A = 1/(1+np.exp(-Z))
16+
cache = Z
17+
18+
return A, cache
19+
20+
def relu(Z):
21+
"""
22+
Implement the RELU function.
23+
24+
Arguments:
25+
Z -- Output of the linear layer, of any shape
26+
27+
Returns:
28+
A -- Post-activation parameter, of the same shape as Z
29+
cache -- a python dictionary containing "A" ; stored for computing the backward pass efficiently
30+
"""
31+
32+
A = np.maximum(0,Z)
33+
34+
assert(A.shape == Z.shape)
35+
36+
cache = Z
37+
return A, cache
38+
39+
40+
def relu_backward(dA, cache):
41+
"""
42+
Implement the backward propagation for a single RELU unit.
43+
44+
Arguments:
45+
dA -- post-activation gradient, of any shape
46+
cache -- 'Z' where we store for computing backward propagation efficiently
47+
48+
Returns:
49+
dZ -- Gradient of the cost with respect to Z
50+
"""
51+
52+
Z = cache
53+
dZ = np.array(dA, copy=True) # just converting dz to a correct object.
54+
55+
# When z <= 0, you should set dz to 0 as well.
56+
dZ[Z <= 0] = 0
57+
58+
assert (dZ.shape == Z.shape)
59+
60+
return dZ
61+
62+
def sigmoid_backward(dA, cache):
63+
"""
64+
Implement the backward propagation for a single SIGMOID unit.
65+
66+
Arguments:
67+
dA -- post-activation gradient, of any shape
68+
cache -- 'Z' where we store for computing backward propagation efficiently
69+
70+
Returns:
71+
dZ -- Gradient of the cost with respect to Z
72+
"""
73+
74+
Z = cache
75+
76+
s = 1/(1+np.exp(-Z))
77+
dZ = dA * s * (1-s)
78+
79+
assert (dZ.shape == Z.shape)
80+
81+
return dZ
82+

0 commit comments

Comments
 (0)