Skip to content

Commit 7f2e471

Browse files
authored
Merge pull request #1 from cyanaspect/master
swapped out pyplot for plt for all exercises
2 parents 00736a5 + 99117dc commit 7f2e471

File tree

8 files changed

+114
-115
lines changed

8 files changed

+114
-115
lines changed

Exercise1/exercise1.ipynb

+26-27
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@
3030
"import numpy as np\n",
3131
"\n",
3232
"# Plotting library\n",
33-
"from matplotlib import pyplot\n",
33+
"from matplotlib import pyplot as plt\n",
3434
"from mpl_toolkits.mplot3d import Axes3D # needed to plot 3-D surfaces\n",
3535
"\n",
3636
"# library written for this exercise providing additional functions for assignment submission, and others\n",
@@ -220,17 +220,16 @@
220220
"\n",
221221
"In this course, we will be exclusively using `matplotlib` to do all our plotting. `matplotlib` is one of the most popular scientific plotting libraries in python and has extensive tools and functions to make beautiful plots. `pyplot` is a module within `matplotlib` which provides a simplified interface to `matplotlib`'s most common plotting tasks, mimicking MATLAB's plotting interface.\n",
222222
"\n",
223-
"<div class=\"alert alert-block alert-warning\">\n",
224-
"You might have noticed that we have imported the `pyplot` module at the beginning of this exercise using the command `from matplotlib import pyplot`. This is rather uncommon, and if you look at python code elsewhere or in the `matplotlib` tutorials, you will see that the module is named `plt`. This is used by module renaming by using the import command `import matplotlib.pyplot as plt`. We will not using the short name of `pyplot` module in this class exercises, but you should be aware of this deviation from norm.\n",
225-
"</div>\n",
223+
"\n",
224+
"Also, we have imported the `pyplot` module at the beginning of this exercise using the command `from matplotlib import pyplot as plt`. \"plt\" is a commonly used abbreviation of the pyplot module.\n",
226225
"\n",
227226
"\n",
228227
"In the following part, your first job is to complete the `plotData` function below. Modify the function and fill in the following code:\n",
229228
"\n",
230229
"```python\n",
231-
" pyplot.plot(x, y, 'ro', ms=10, mec='k')\n",
232-
" pyplot.ylabel('Profit in $10,000')\n",
233-
" pyplot.xlabel('Population of City in 10,000s')\n",
230+
" plt.plot(x, y, 'ro', ms=10, mec='k')\n",
231+
" plt.ylabel('Profit in $10,000')\n",
232+
" plt.xlabel('Population of City in 10,000s')\n",
234233
"```"
235234
]
236235
},
@@ -267,7 +266,7 @@
267266
" using plot(..., 'ro', ms=10), where `ms` refers to marker size. You \n",
268267
" can also set the marker edge color using the `mec` property.\n",
269268
" \"\"\"\n",
270-
" fig = pyplot.figure() # open a new figure\n",
269+
" fig = plt.figure() # open a new figure\n",
271270
" \n",
272271
" # ====================== YOUR CODE HERE ======================= \n",
273272
" \n",
@@ -299,7 +298,7 @@
299298
"cell_type": "markdown",
300299
"metadata": {},
301300
"source": [
302-
"To quickly learn more about the `matplotlib` plot function and what arguments you can provide to it, you can type `?pyplot.plot` in a cell within the jupyter notebook. This opens a separate page showing the documentation for the requested function. You can also search online for plotting documentation. \n",
301+
"To quickly learn more about the `matplotlib` plot function and what arguments you can provide to it, you can type `?plt.plot` in a cell within the jupyter notebook. This opens a separate page showing the documentation for the requested function. You can also search online for plotting documentation. \n",
303302
"\n",
304303
"To set the markers to red circles, we used the option `'or'` within the `plot` function."
305304
]
@@ -310,7 +309,7 @@
310309
"metadata": {},
311310
"outputs": [],
312311
"source": [
313-
"?pyplot.plot"
312+
"?plt.plot"
314313
]
315314
},
316315
{
@@ -599,8 +598,8 @@
599598
"source": [
600599
"# plot the linear fit\n",
601600
"plotData(X[:, 1], y)\n",
602-
"pyplot.plot(X[:, 1], np.dot(X, theta), '-')\n",
603-
"pyplot.legend(['Training data', 'Linear regression']);"
601+
"plt.plot(X[:, 1], np.dot(X, theta), '-')\n",
602+
"plt.legend(['Training data', 'Linear regression']);"
604603
]
605604
},
606605
{
@@ -687,21 +686,21 @@
687686
"J_vals = J_vals.T\n",
688687
"\n",
689688
"# surface plot\n",
690-
"fig = pyplot.figure(figsize=(12, 5))\n",
689+
"fig = plt.figure(figsize=(12, 5))\n",
691690
"ax = fig.add_subplot(121, projection='3d')\n",
692691
"ax.plot_surface(theta0_vals, theta1_vals, J_vals, cmap='viridis')\n",
693-
"pyplot.xlabel('theta0')\n",
694-
"pyplot.ylabel('theta1')\n",
695-
"pyplot.title('Surface')\n",
692+
"plt.xlabel('theta0')\n",
693+
"plt.ylabel('theta1')\n",
694+
"plt.title('Surface')\n",
696695
"\n",
697696
"# contour plot\n",
698697
"# Plot J_vals as 15 contours spaced logarithmically between 0.01 and 100\n",
699-
"ax = pyplot.subplot(122)\n",
700-
"pyplot.contour(theta0_vals, theta1_vals, J_vals, linewidths=2, cmap='viridis', levels=np.logspace(-2, 3, 20))\n",
701-
"pyplot.xlabel('theta0')\n",
702-
"pyplot.ylabel('theta1')\n",
703-
"pyplot.plot(theta[0], theta[1], 'ro', ms=10, lw=2)\n",
704-
"pyplot.title('Contour, showing minimum')\n",
698+
"ax = plt.subplot(122)\n",
699+
"plt.contour(theta0_vals, theta1_vals, J_vals, linewidths=2, cmap='viridis', levels=np.logspace(-2, 3, 20))\n",
700+
"plt.xlabel('theta0')\n",
701+
"plt.ylabel('theta1')\n",
702+
"plt.plot(theta[0], theta[1], 'ro', ms=10, lw=2)\n",
703+
"plt.title('Contour, showing minimum')\n",
705704
"pass"
706705
]
707706
},
@@ -1082,7 +1081,7 @@
10821081
"</div>\n",
10831082
"\n",
10841083
"<div class=\"alert alert-block alert-warning\">\n",
1085-
"**MATPLOTLIB tip:** To compare how different learning learning rates affect convergence, it is helpful to plot $J$ for several learning rates on the same figure. This can be done by making `alpha` a python list, and looping across the values within this list, and calling the plot function in every iteration of the loop. It is also useful to have a legend to distinguish the different lines within the plot. Search online for `pyplot.legend` for help on showing legends in `matplotlib`.\n",
1084+
"**MATPLOTLIB tip:** To compare how different learning learning rates affect convergence, it is helpful to plot $J$ for several learning rates on the same figure. This can be done by making `alpha` a python list, and looping across the values within this list, and calling the plot function in every iteration of the loop. It is also useful to have a legend to distinguish the different lines within the plot. Search online for `plt.legend` for help on showing legends in `matplotlib`.\n",
10861085
"</div>\n",
10871086
"\n",
10881087
"Notice the changes in the convergence curves as the learning rate changes. With a small learning rate, you should find that gradient descent takes a very long time to converge to the optimal value. Conversely, with a large learning rate, gradient descent might not converge or might even diverge!\n",
@@ -1127,9 +1126,9 @@
11271126
"theta, J_history = gradientDescentMulti(X, y, theta, alpha, num_iters)\n",
11281127
"\n",
11291128
"# Plot the convergence graph\n",
1130-
"pyplot.plot(np.arange(len(J_history)), J_history, lw=2)\n",
1131-
"pyplot.xlabel('Number of iterations')\n",
1132-
"pyplot.ylabel('Cost J')\n",
1129+
"plt.plot(np.arange(len(J_history)), J_history, lw=2)\n",
1130+
"plt.xlabel('Number of iterations')\n",
1131+
"plt.ylabel('Cost J')\n",
11331132
"\n",
11341133
"# Display the gradient descent's result\n",
11351134
"print('theta computed from gradient descent: {:s}'.format(str(theta)))\n",
@@ -1299,7 +1298,7 @@
12991298
"name": "python",
13001299
"nbconvert_exporter": "python",
13011300
"pygments_lexer": "ipython3",
1302-
"version": "3.6.6"
1301+
"version": "3.9.1"
13031302
}
13041303
},
13051304
"nbformat": 4,

Exercise2/exercise2.ipynb

+16-16
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@
3030
"import numpy as np\n",
3131
"\n",
3232
"# Plotting library\n",
33-
"from matplotlib import pyplot\n",
33+
"from matplotlib import pyplot as plt\n",
3434
"\n",
3535
"# Optimization module in scipy\n",
3636
"from scipy import optimize\n",
@@ -118,8 +118,8 @@
118118
"neg = y == 0\n",
119119
"\n",
120120
"# Plot Examples\n",
121-
"pyplot.plot(X[pos, 0], X[pos, 1], 'k*', lw=2, ms=10)\n",
122-
"pyplot.plot(X[neg, 0], X[neg, 1], 'ko', mfc='y', ms=8, mec='k', mew=1)\n",
121+
"plt.plot(X[pos, 0], X[pos, 1], 'k*', lw=2, ms=10)\n",
122+
"plt.plot(X[neg, 0], X[neg, 1], 'ko', mfc='y', ms=8, mec='k', mew=1)\n",
123123
"```"
124124
]
125125
},
@@ -148,7 +148,7 @@
148148
" option 'k*' for the positive examples and 'ko' for the negative examples. \n",
149149
" \"\"\"\n",
150150
" # Create New Figure\n",
151-
" fig = pyplot.figure()\n",
151+
" fig = plt.figure()\n",
152152
"\n",
153153
" # ====================== YOUR CODE HERE ======================\n",
154154
"\n",
@@ -171,9 +171,9 @@
171171
"source": [
172172
"plotData(X, y)\n",
173173
"# add axes labels\n",
174-
"pyplot.xlabel('Exam 1 score')\n",
175-
"pyplot.ylabel('Exam 2 score')\n",
176-
"pyplot.legend(['Admitted', 'Not admitted'])\n",
174+
"plt.xlabel('Exam 1 score')\n",
175+
"plt.ylabel('Exam 2 score')\n",
176+
"plt.legend(['Admitted', 'Not admitted'])\n",
177177
"pass"
178178
]
179179
},
@@ -660,11 +660,11 @@
660660
"source": [
661661
"plotData(X, y)\n",
662662
"# Labels and Legend\n",
663-
"pyplot.xlabel('Microchip Test 1')\n",
664-
"pyplot.ylabel('Microchip Test 2')\n",
663+
"plt.xlabel('Microchip Test 1')\n",
664+
"plt.ylabel('Microchip Test 2')\n",
665665
"\n",
666666
"# Specified in plot order\n",
667-
"pyplot.legend(['y = 1', 'y = 0'], loc='upper right')\n",
667+
"plt.legend(['y = 1', 'y = 0'], loc='upper right')\n",
668668
"pass"
669669
]
670670
},
@@ -920,11 +920,11 @@
920920
"theta = res.x\n",
921921
"\n",
922922
"utils.plotDecisionBoundary(plotData, theta, X, y)\n",
923-
"pyplot.xlabel('Microchip Test 1')\n",
924-
"pyplot.ylabel('Microchip Test 2')\n",
925-
"pyplot.legend(['y = 1', 'y = 0'])\n",
926-
"pyplot.grid(False)\n",
927-
"pyplot.title('lambda = %0.2f' % lambda_)\n",
923+
"plt.xlabel('Microchip Test 1')\n",
924+
"plt.ylabel('Microchip Test 2')\n",
925+
"plt.legend(['y = 1', 'y = 0'])\n",
926+
"plt.grid(False)\n",
927+
"plt.title('lambda = %0.2f' % lambda_)\n",
928928
"\n",
929929
"# Compute accuracy on our training set\n",
930930
"p = predict(theta, X)\n",
@@ -957,7 +957,7 @@
957957
"name": "python",
958958
"nbconvert_exporter": "python",
959959
"pygments_lexer": "ipython3",
960-
"version": "3.6.4"
960+
"version": "3.9.1"
961961
}
962962
},
963963
"nbformat": 4,

Exercise3/exercise3.ipynb

+2-2
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@
3030
"import numpy as np\n",
3131
"\n",
3232
"# Plotting library\n",
33-
"from matplotlib import pyplot\n",
33+
"from matplotlib import pyplot as plt\n",
3434
"\n",
3535
"# Optimization module in scipy\n",
3636
"from scipy import optimize\n",
@@ -915,7 +915,7 @@
915915
"name": "python",
916916
"nbconvert_exporter": "python",
917917
"pygments_lexer": "ipython3",
918-
"version": "3.6.6"
918+
"version": "3.9.1"
919919
}
920920
},
921921
"nbformat": 4,

Exercise4/exercise4.ipynb

+2-2
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@
2929
"import numpy as np\n",
3030
"\n",
3131
"# Plotting library\n",
32-
"from matplotlib import pyplot\n",
32+
"from matplotlib import pyplot as plt\n",
3333
"\n",
3434
"# Optimization module in scipy\n",
3535
"from scipy import optimize\n",
@@ -932,7 +932,7 @@
932932
"name": "python",
933933
"nbconvert_exporter": "python",
934934
"pygments_lexer": "ipython3",
935-
"version": "3.6.6"
935+
"version": "3.9.1"
936936
}
937937
},
938938
"nbformat": 4,

Exercise5/exercise5.ipynb

+31-31
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@
2929
"import numpy as np\n",
3030
"\n",
3131
"# Plotting library\n",
32-
"from matplotlib import pyplot\n",
32+
"from matplotlib import pyplot as plt\n",
3333
"\n",
3434
"# Optimization module in scipy\n",
3535
"from scipy import optimize\n",
@@ -114,9 +114,9 @@
114114
"m = y.size\n",
115115
"\n",
116116
"# Plot training data\n",
117-
"pyplot.plot(X, y, 'ro', ms=10, mec='k', mew=1)\n",
118-
"pyplot.xlabel('Change in water level (x)')\n",
119-
"pyplot.ylabel('Water flowing out of the dam (y)');"
117+
"plt.plot(X, y, 'ro', ms=10, mec='k', mew=1)\n",
118+
"plt.xlabel('Change in water level (x)')\n",
119+
"plt.ylabel('Water flowing out of the dam (y)');"
120120
]
121121
},
122122
{
@@ -317,10 +317,10 @@
317317
"theta = utils.trainLinearReg(linearRegCostFunction, X_aug, y, lambda_=0)\n",
318318
"\n",
319319
"# Plot fit over the data\n",
320-
"pyplot.plot(X, y, 'ro', ms=10, mec='k', mew=1.5)\n",
321-
"pyplot.xlabel('Change in water level (x)')\n",
322-
"pyplot.ylabel('Water flowing out of the dam (y)')\n",
323-
"pyplot.plot(X, np.dot(X_aug, theta), '--', lw=2);"
320+
"plt.plot(X, y, 'ro', ms=10, mec='k', mew=1.5)\n",
321+
"plt.xlabel('Change in water level (x)')\n",
322+
"plt.ylabel('Water flowing out of the dam (y)')\n",
323+
"plt.plot(X, np.dot(X_aug, theta), '--', lw=2);"
324324
]
325325
},
326326
{
@@ -464,12 +464,12 @@
464464
"Xval_aug = np.concatenate([np.ones((yval.size, 1)), Xval], axis=1)\n",
465465
"error_train, error_val = learningCurve(X_aug, y, Xval_aug, yval, lambda_=0)\n",
466466
"\n",
467-
"pyplot.plot(np.arange(1, m+1), error_train, np.arange(1, m+1), error_val, lw=2)\n",
468-
"pyplot.title('Learning curve for linear regression')\n",
469-
"pyplot.legend(['Train', 'Cross Validation'])\n",
470-
"pyplot.xlabel('Number of training examples')\n",
471-
"pyplot.ylabel('Error')\n",
472-
"pyplot.axis([0, 13, 0, 150])\n",
467+
"plt.plot(np.arange(1, m+1), error_train, np.arange(1, m+1), error_val, lw=2)\n",
468+
"plt.title('Learning curve for linear regression')\n",
469+
"plt.legend(['Train', 'Cross Validation'])\n",
470+
"plt.xlabel('Number of training examples')\n",
471+
"plt.ylabel('Error')\n",
472+
"plt.axis([0, 13, 0, 150])\n",
473473
"\n",
474474
"print('# Training Examples\\tTrain Error\\tCross Validation Error')\n",
475475
"for i in range(m):\n",
@@ -654,24 +654,24 @@
654654
" lambda_=lambda_, maxiter=55)\n",
655655
"\n",
656656
"# Plot training data and fit\n",
657-
"pyplot.plot(X, y, 'ro', ms=10, mew=1.5, mec='k')\n",
657+
"plt.plot(X, y, 'ro', ms=10, mew=1.5, mec='k')\n",
658658
"\n",
659659
"utils.plotFit(polyFeatures, np.min(X), np.max(X), mu, sigma, theta, p)\n",
660660
"\n",
661-
"pyplot.xlabel('Change in water level (x)')\n",
662-
"pyplot.ylabel('Water flowing out of the dam (y)')\n",
663-
"pyplot.title('Polynomial Regression Fit (lambda = %f)' % lambda_)\n",
664-
"pyplot.ylim([-20, 50])\n",
661+
"plt.xlabel('Change in water level (x)')\n",
662+
"plt.ylabel('Water flowing out of the dam (y)')\n",
663+
"plt.title('Polynomial Regression Fit (lambda = %f)' % lambda_)\n",
664+
"plt.ylim([-20, 50])\n",
665665
"\n",
666-
"pyplot.figure()\n",
666+
"plt.figure()\n",
667667
"error_train, error_val = learningCurve(X_poly, y, X_poly_val, yval, lambda_)\n",
668-
"pyplot.plot(np.arange(1, 1+m), error_train, np.arange(1, 1+m), error_val)\n",
668+
"plt.plot(np.arange(1, 1+m), error_train, np.arange(1, 1+m), error_val)\n",
669669
"\n",
670-
"pyplot.title('Polynomial Regression Learning Curve (lambda = %f)' % lambda_)\n",
671-
"pyplot.xlabel('Number of training examples')\n",
672-
"pyplot.ylabel('Error')\n",
673-
"pyplot.axis([0, 13, 0, 100])\n",
674-
"pyplot.legend(['Train', 'Cross Validation'])\n",
670+
"plt.title('Polynomial Regression Learning Curve (lambda = %f)' % lambda_)\n",
671+
"plt.xlabel('Number of training examples')\n",
672+
"plt.ylabel('Error')\n",
673+
"plt.axis([0, 13, 0, 100])\n",
674+
"plt.legend(['Train', 'Cross Validation'])\n",
675675
"\n",
676676
"print('Polynomial Regression (lambda = %f)\\n' % lambda_)\n",
677677
"print('# Training Examples\\tTrain Error\\tCross Validation Error')\n",
@@ -821,10 +821,10 @@
821821
"source": [
822822
"lambda_vec, error_train, error_val = validationCurve(X_poly, y, X_poly_val, yval)\n",
823823
"\n",
824-
"pyplot.plot(lambda_vec, error_train, '-o', lambda_vec, error_val, '-o', lw=2)\n",
825-
"pyplot.legend(['Train', 'Cross Validation'])\n",
826-
"pyplot.xlabel('lambda')\n",
827-
"pyplot.ylabel('Error')\n",
824+
"plt.plot(lambda_vec, error_train, '-o', lambda_vec, error_val, '-o', lw=2)\n",
825+
"plt.legend(['Train', 'Cross Validation'])\n",
826+
"plt.xlabel('lambda')\n",
827+
"plt.ylabel('Error')\n",
828828
"\n",
829829
"print('lambda\\t\\tTrain Error\\tValidation Error')\n",
830830
"for i in range(len(lambda_vec)):\n",
@@ -907,7 +907,7 @@
907907
"name": "python",
908908
"nbconvert_exporter": "python",
909909
"pygments_lexer": "ipython3",
910-
"version": "3.6.4"
910+
"version": "3.9.1"
911911
}
912912
},
913913
"nbformat": 4,

Exercise6/exercise6.ipynb

+2-2
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@
3232
"import re\n",
3333
"\n",
3434
"# Plotting library\n",
35-
"from matplotlib import pyplot\n",
35+
"from matplotlib import pyplot as plt\n",
3636
"\n",
3737
"# Optimization module in scipy\n",
3838
"from scipy import optimize\n",
@@ -1021,7 +1021,7 @@
10211021
"name": "python",
10221022
"nbconvert_exporter": "python",
10231023
"pygments_lexer": "ipython3",
1024-
"version": "3.6.4"
1024+
"version": "3.9.1"
10251025
}
10261026
},
10271027
"nbformat": 4,

0 commit comments

Comments
 (0)