Skip to content

Commit 61eedc1

Browse files
Remove useless code in doctests (TheAlgorithms#7733)
* refactor: Fix matrix display deprecation * refactor: Remove useless `print` and `pass` statements * revert: Replace broken doctests * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * revert: Fix failing doctests * chore: Satisfy pre-commit Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
1 parent 501a1cf commit 61eedc1

21 files changed

+51
-61
lines changed

backtracking/hamiltonian_cycle.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -71,7 +71,7 @@ def util_hamilton_cycle(graph: list[list[int]], path: list[int], curr_ind: int)
7171
>>> curr_ind = 1
7272
>>> util_hamilton_cycle(graph, path, curr_ind)
7373
True
74-
>>> print(path)
74+
>>> path
7575
[0, 1, 2, 4, 3, 0]
7676
7777
Case 2: Use exact graph as in previous case, but in the properties taken from
@@ -85,7 +85,7 @@ def util_hamilton_cycle(graph: list[list[int]], path: list[int], curr_ind: int)
8585
>>> curr_ind = 3
8686
>>> util_hamilton_cycle(graph, path, curr_ind)
8787
True
88-
>>> print(path)
88+
>>> path
8989
[0, 1, 2, 4, 3, 0]
9090
"""
9191

computer_vision/flip_augmentation.py

-3
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,6 @@ def main() -> None:
2222
Get images list and annotations list from input dir.
2323
Update new images and annotations.
2424
Save images and annotations in output dir.
25-
>>> pass # A doctest is not possible for this function.
2625
"""
2726
img_paths, annos = get_dataset(LABEL_DIR, IMAGE_DIR)
2827
print("Processing...")
@@ -48,7 +47,6 @@ def get_dataset(label_dir: str, img_dir: str) -> tuple[list, list]:
4847
- label_dir <type: str>: Path to label include annotation of images
4948
- img_dir <type: str>: Path to folder contain images
5049
Return <type: list>: List of images path and labels
51-
>>> pass # A doctest is not possible for this function.
5250
"""
5351
img_paths = []
5452
labels = []
@@ -88,7 +86,6 @@ def update_image_and_anno(
8886
- new_imgs_list <type: narray>: image after resize
8987
- new_annos_lists <type: list>: list of new annotation after scale
9088
- path_list <type: list>: list the name of image file
91-
>>> pass # A doctest is not possible for this function.
9289
"""
9390
new_annos_lists = []
9491
path_list = []

computer_vision/mosaic_augmentation.py

-3
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,6 @@ def main() -> None:
2323
Get images list and annotations list from input dir.
2424
Update new images and annotations.
2525
Save images and annotations in output dir.
26-
>>> pass # A doctest is not possible for this function.
2726
"""
2827
img_paths, annos = get_dataset(LABEL_DIR, IMG_DIR)
2928
for index in range(NUMBER_IMAGES):
@@ -60,7 +59,6 @@ def get_dataset(label_dir: str, img_dir: str) -> tuple[list, list]:
6059
- label_dir <type: str>: Path to label include annotation of images
6160
- img_dir <type: str>: Path to folder contain images
6261
Return <type: list>: List of images path and labels
63-
>>> pass # A doctest is not possible for this function.
6462
"""
6563
img_paths = []
6664
labels = []
@@ -105,7 +103,6 @@ def update_image_and_anno(
105103
- output_img <type: narray>: image after resize
106104
- new_anno <type: list>: list of new annotation after scale
107105
- path[0] <type: string>: get the name of image file
108-
>>> pass # A doctest is not possible for this function.
109106
"""
110107
output_img = np.zeros([output_size[0], output_size[1], 3], dtype=np.uint8)
111108
scale_x = scale_range[0] + random.random() * (scale_range[1] - scale_range[0])

data_structures/heap/binomial_heap.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -71,7 +71,7 @@ class BinomialHeap:
7171
... first_heap.insert(number)
7272
7373
Size test
74-
>>> print(first_heap.size)
74+
>>> first_heap.size
7575
30
7676
7777
Deleting - delete() test
@@ -97,7 +97,7 @@ class BinomialHeap:
9797
# # # #
9898
9999
preOrder() test
100-
>>> print(second_heap.preOrder())
100+
>>> second_heap.preOrder()
101101
[(17, 0), ('#', 1), (31, 1), (20, 2), ('#', 3), ('#', 3), (34, 2), ('#', 3), ('#', 3)]
102102
103103
printing Heap - __str__() test

data_structures/heap/heap.py

+4-4
Original file line numberDiff line numberDiff line change
@@ -9,20 +9,20 @@ class Heap:
99
>>> unsorted = [103, 9, 1, 7, 11, 15, 25, 201, 209, 107, 5]
1010
>>> h = Heap()
1111
>>> h.build_max_heap(unsorted)
12-
>>> print(h)
12+
>>> h
1313
[209, 201, 25, 103, 107, 15, 1, 9, 7, 11, 5]
1414
>>>
1515
>>> h.extract_max()
1616
209
17-
>>> print(h)
17+
>>> h
1818
[201, 107, 25, 103, 11, 15, 1, 9, 7, 5]
1919
>>>
2020
>>> h.insert(100)
21-
>>> print(h)
21+
>>> h
2222
[201, 107, 25, 103, 100, 15, 1, 9, 7, 5, 11]
2323
>>>
2424
>>> h.heap_sort()
25-
>>> print(h)
25+
>>> h
2626
[1, 5, 7, 9, 11, 15, 25, 100, 103, 107, 201]
2727
"""
2828

data_structures/heap/min_heap.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ class MinHeap:
2727
>>> myMinHeap.decrease_key(b, -17)
2828
>>> print(b)
2929
Node(B, -17)
30-
>>> print(myMinHeap["B"])
30+
>>> myMinHeap["B"]
3131
-17
3232
"""
3333

data_structures/linked_list/skip_list.py

+3
Original file line numberDiff line numberDiff line change
@@ -443,4 +443,7 @@ def main():
443443

444444

445445
if __name__ == "__main__":
446+
import doctest
447+
448+
doctest.testmod()
446449
main()

graphs/gale_shapley_bigraph.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ def stable_matching(
1717
1818
>>> donor_pref = [[0, 1, 3, 2], [0, 2, 3, 1], [1, 0, 2, 3], [0, 3, 1, 2]]
1919
>>> recipient_pref = [[3, 1, 2, 0], [3, 1, 0, 2], [0, 3, 1, 2], [1, 0, 3, 2]]
20-
>>> print(stable_matching(donor_pref, recipient_pref))
20+
>>> stable_matching(donor_pref, recipient_pref)
2121
[1, 2, 3, 0]
2222
"""
2323
assert len(donor_pref) == len(recipient_pref)

graphs/graph_list.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -18,15 +18,15 @@ class GraphAdjacencyList(Generic[T]):
1818
1919
Directed graph example:
2020
>>> d_graph = GraphAdjacencyList()
21-
>>> d_graph
21+
>>> print(d_graph)
2222
{}
2323
>>> d_graph.add_edge(0, 1)
2424
{0: [1], 1: []}
2525
>>> d_graph.add_edge(1, 2).add_edge(1, 4).add_edge(1, 5)
2626
{0: [1], 1: [2, 4, 5], 2: [], 4: [], 5: []}
2727
>>> d_graph.add_edge(2, 0).add_edge(2, 6).add_edge(2, 7)
2828
{0: [1], 1: [2, 4, 5], 2: [0, 6, 7], 4: [], 5: [], 6: [], 7: []}
29-
>>> print(d_graph)
29+
>>> d_graph
3030
{0: [1], 1: [2, 4, 5], 2: [0, 6, 7], 4: [], 5: [], 6: [], 7: []}
3131
>>> print(repr(d_graph))
3232
{0: [1], 1: [2, 4, 5], 2: [0, 6, 7], 4: [], 5: [], 6: [], 7: []}
@@ -68,7 +68,7 @@ class GraphAdjacencyList(Generic[T]):
6868
{'a': ['b'], 'b': ['a']}
6969
>>> char_graph.add_edge('b', 'c').add_edge('b', 'e').add_edge('b', 'f')
7070
{'a': ['b'], 'b': ['a', 'c', 'e', 'f'], 'c': ['b'], 'e': ['b'], 'f': ['b']}
71-
>>> print(char_graph)
71+
>>> char_graph
7272
{'a': ['b'], 'b': ['a', 'c', 'e', 'f'], 'c': ['b'], 'e': ['b'], 'f': ['b']}
7373
"""
7474

graphs/minimum_spanning_tree_prims2.py

+4-4
Original file line numberDiff line numberDiff line change
@@ -69,16 +69,16 @@ class MinPriorityQueue(Generic[T]):
6969
>>> queue.push(3, 4000)
7070
>>> queue.push(4, 3000)
7171
72-
>>> print(queue.extract_min())
72+
>>> queue.extract_min()
7373
2
7474
7575
>>> queue.update_key(4, 50)
7676
77-
>>> print(queue.extract_min())
77+
>>> queue.extract_min()
7878
4
79-
>>> print(queue.extract_min())
79+
>>> queue.extract_min()
8080
1
81-
>>> print(queue.extract_min())
81+
>>> queue.extract_min()
8282
3
8383
"""
8484

graphs/random_graph_generator.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,7 @@ def complete_graph(vertices_number: int) -> dict:
5353
@input: vertices_number (number of vertices),
5454
directed (False if the graph is undirected, True otherwise)
5555
@example:
56-
>>> print(complete_graph(3))
56+
>>> complete_graph(3)
5757
{0: [1, 2], 1: [0, 2], 2: [0, 1]}
5858
"""
5959
return {

machine_learning/local_weighted_learning/local_weighted_learning.py

-2
Original file line numberDiff line numberDiff line change
@@ -71,7 +71,6 @@ def local_weight_regression(
7171
def load_data(dataset_name: str, cola_name: str, colb_name: str) -> np.mat:
7272
"""
7373
Function used for loading data from the seaborn splitting into x and y points
74-
>>> pass # this function has no doctest
7574
"""
7675
import seaborn as sns
7776

@@ -112,7 +111,6 @@ def plot_preds(
112111
) -> plt.plot:
113112
"""
114113
This function used to plot predictions and display the graph
115-
>>> pass #this function has no doctest
116114
"""
117115
xsort = training_data_x.copy()
118116
xsort.sort(axis=0)

maths/polynomial_evaluation.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,7 @@ def horner(poly: Sequence[float], x: float) -> float:
4545
>>> poly = (0.0, 0.0, 5.0, 9.3, 7.0) # f(x) = 7.0x^4 + 9.3x^3 + 5.0x^2
4646
>>> x = -13.0
4747
>>> # f(-13) = 7.0(-13)^4 + 9.3(-13)^3 + 5.0(-13)^2 = 180339.9
48-
>>> print(evaluate_poly(poly, x))
48+
>>> evaluate_poly(poly, x)
4949
180339.9
5050
"""
5151
poly = (0.0, 0.0, 5.0, 9.3, 7.0)

maths/radix2_fft.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,7 @@ class FFT:
3939
>>> x = FFT(A, B)
4040
4141
Print product
42-
>>> print(x.product) # 2x + 3x^2 + 8x^3 + 4x^4 + 6x^5
42+
>>> x.product # 2x + 3x^2 + 8x^3 + 4x^4 + 6x^5
4343
[(-0+0j), (2+0j), (3+0j), (8+0j), (6+0j), (8+0j)]
4444
4545
__str__ test

matrix/matrix_class.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -21,9 +21,9 @@ class Matrix:
2121
[7. 8. 9.]]
2222
2323
Matrix rows and columns are available as 2D arrays
24-
>>> print(matrix.rows)
24+
>>> matrix.rows
2525
[[1, 2, 3], [4, 5, 6], [7, 8, 9]]
26-
>>> print(matrix.columns())
26+
>>> matrix.columns()
2727
[[1, 4, 7], [2, 5, 8], [3, 6, 9]]
2828
2929
Order is returned as a tuple
@@ -55,7 +55,7 @@ class Matrix:
5555
[[-3. 6. -3.]
5656
[6. -12. 6.]
5757
[-3. 6. -3.]]
58-
>>> print(matrix.inverse())
58+
>>> matrix.inverse()
5959
Traceback (most recent call last):
6060
...
6161
TypeError: Only matrices with a non-zero determinant have an inverse

searches/simple_binary_search.py

+10-10
Original file line numberDiff line numberDiff line change
@@ -13,25 +13,25 @@
1313
def binary_search(a_list: list[int], item: int) -> bool:
1414
"""
1515
>>> test_list = [0, 1, 2, 8, 13, 17, 19, 32, 42]
16-
>>> print(binary_search(test_list, 3))
16+
>>> binary_search(test_list, 3)
1717
False
18-
>>> print(binary_search(test_list, 13))
18+
>>> binary_search(test_list, 13)
1919
True
20-
>>> print(binary_search([4, 4, 5, 6, 7], 4))
20+
>>> binary_search([4, 4, 5, 6, 7], 4)
2121
True
22-
>>> print(binary_search([4, 4, 5, 6, 7], -10))
22+
>>> binary_search([4, 4, 5, 6, 7], -10)
2323
False
24-
>>> print(binary_search([-18, 2], -18))
24+
>>> binary_search([-18, 2], -18)
2525
True
26-
>>> print(binary_search([5], 5))
26+
>>> binary_search([5], 5)
2727
True
28-
>>> print(binary_search(['a', 'c', 'd'], 'c'))
28+
>>> binary_search(['a', 'c', 'd'], 'c')
2929
True
30-
>>> print(binary_search(['a', 'c', 'd'], 'f'))
30+
>>> binary_search(['a', 'c', 'd'], 'f')
3131
False
32-
>>> print(binary_search([], 1))
32+
>>> binary_search([], 1)
3333
False
34-
>>> print(binary_search([-.1, .1 , .8], .1))
34+
>>> binary_search([-.1, .1 , .8], .1)
3535
True
3636
>>> binary_search(range(-5000, 5000, 10), 80)
3737
True

sorts/bitonic_sort.py

+6-6
Original file line numberDiff line numberDiff line change
@@ -16,19 +16,19 @@ def comp_and_swap(array: list[int], index1: int, index2: int, direction: int) ->
1616
1717
>>> arr = [12, 42, -21, 1]
1818
>>> comp_and_swap(arr, 1, 2, 1)
19-
>>> print(arr)
19+
>>> arr
2020
[12, -21, 42, 1]
2121
2222
>>> comp_and_swap(arr, 1, 2, 0)
23-
>>> print(arr)
23+
>>> arr
2424
[12, 42, -21, 1]
2525
2626
>>> comp_and_swap(arr, 0, 3, 1)
27-
>>> print(arr)
27+
>>> arr
2828
[1, 42, -21, 12]
2929
3030
>>> comp_and_swap(arr, 0, 3, 0)
31-
>>> print(arr)
31+
>>> arr
3232
[12, 42, -21, 1]
3333
"""
3434
if (direction == 1 and array[index1] > array[index2]) or (
@@ -46,11 +46,11 @@ def bitonic_merge(array: list[int], low: int, length: int, direction: int) -> No
4646
4747
>>> arr = [12, 42, -21, 1]
4848
>>> bitonic_merge(arr, 0, 4, 1)
49-
>>> print(arr)
49+
>>> arr
5050
[-21, 1, 12, 42]
5151
5252
>>> bitonic_merge(arr, 0, 4, 0)
53-
>>> print(arr)
53+
>>> arr
5454
[42, 12, 1, -21]
5555
"""
5656
if length > 1:

sorts/normal_distribution_quick_sort.md

+2-2
Original file line numberDiff line numberDiff line change
@@ -17,8 +17,8 @@ The array elements are taken from a Standard Normal Distribution, having mean =
1717
>>> mu, sigma = 0, 1 # mean and standard deviation
1818
>>> X = np.random.normal(mu, sigma, p)
1919
>>> np.save(outfile, X)
20-
>>> print('The array is')
21-
>>> print(X)
20+
>>> 'The array is'
21+
>>> X
2222

2323
```
2424

sorts/recursive_insertion_sort.py

+6-6
Original file line numberDiff line numberDiff line change
@@ -14,17 +14,17 @@ def rec_insertion_sort(collection: list, n: int):
1414
1515
>>> col = [1, 2, 1]
1616
>>> rec_insertion_sort(col, len(col))
17-
>>> print(col)
17+
>>> col
1818
[1, 1, 2]
1919
2020
>>> col = [2, 1, 0, -1, -2]
2121
>>> rec_insertion_sort(col, len(col))
22-
>>> print(col)
22+
>>> col
2323
[-2, -1, 0, 1, 2]
2424
2525
>>> col = [1]
2626
>>> rec_insertion_sort(col, len(col))
27-
>>> print(col)
27+
>>> col
2828
[1]
2929
"""
3030
# Checks if the entire collection has been sorted
@@ -41,17 +41,17 @@ def insert_next(collection: list, index: int):
4141
4242
>>> col = [3, 2, 4, 2]
4343
>>> insert_next(col, 1)
44-
>>> print(col)
44+
>>> col
4545
[2, 3, 4, 2]
4646
4747
>>> col = [3, 2, 3]
4848
>>> insert_next(col, 2)
49-
>>> print(col)
49+
>>> col
5050
[3, 2, 3]
5151
5252
>>> col = []
5353
>>> insert_next(col, 1)
54-
>>> print(col)
54+
>>> col
5555
[]
5656
"""
5757
# Checks order between adjacent elements

web_programming/reddit.py

-2
Original file line numberDiff line numberDiff line change
@@ -23,8 +23,6 @@ def get_subreddit_data(
2323
limit : Number of posts to fetch
2424
age : ["new", "top", "hot"]
2525
wanted_data : Get only the required data in the list
26-
27-
>>> pass
2826
"""
2927
wanted_data = wanted_data or []
3028
if invalid_search_terms := ", ".join(sorted(set(wanted_data) - valid_terms)):

0 commit comments

Comments
 (0)