diff --git a/.github/workflows/pre-commit.yml b/.github/workflows/pre-commit.yml index dd1a8a945092..27a5a97c0b6c 100644 --- a/.github/workflows/pre-commit.yml +++ b/.github/workflows/pre-commit.yml @@ -14,7 +14,7 @@ jobs: ~/.cache/pip key: ${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }} - uses: actions/setup-python@v2 - - uses: psf/black@20.8b1 + - uses: psf/black@21.4b0 - name: Install pre-commit run: | python -m pip install --upgrade pip diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index b48da86ee57d..b666e88aa162 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -13,17 +13,17 @@ repos: )$ - id: requirements-txt-fixer - repo: https://github.com/psf/black - rev: 20.8b1 + rev: 21.4b0 hooks: - id: black - repo: https://github.com/PyCQA/isort - rev: 5.7.0 + rev: 5.8.0 hooks: - id: isort args: - --profile=black - repo: https://gitlab.com/pycqa/flake8 - rev: 3.9.0 + rev: 3.9.1 hooks: - id: flake8 args: diff --git a/data_structures/binary_tree/binary_search_tree.py b/data_structures/binary_tree/binary_search_tree.py index 45c3933fe899..a1ed1d0ac2a5 100644 --- a/data_structures/binary_tree/binary_search_tree.py +++ b/data_structures/binary_tree/binary_search_tree.py @@ -150,7 +150,7 @@ def inorder(self, arr: list, node: Node): self.inorder(arr, node.right) def find_kth_smallest(self, k: int, node: Node) -> int: - """Return the kth smallest element in a binary search tree """ + """Return the kth smallest element in a binary search tree""" arr = [] self.inorder(arr, node) # append all values to list using inorder traversal return arr[k - 1] diff --git a/data_structures/heap/heap.py b/data_structures/heap/heap.py index 8592362c23b9..65a70e468d1c 100644 --- a/data_structures/heap/heap.py +++ b/data_structures/heap/heap.py @@ -32,7 +32,7 @@ def __repr__(self) -> str: return str(self.h) def parent_index(self, child_idx: int) -> Optional[int]: - """ return the parent index of given child """ + """return the parent index of given child""" if child_idx > 0: return (child_idx - 1) // 2 return None @@ -78,7 +78,7 @@ def max_heapify(self, index: int) -> None: self.max_heapify(violation) def build_max_heap(self, collection: Iterable[float]) -> None: - """ build max heap from an unsorted array""" + """build max heap from an unsorted array""" self.h = list(collection) self.heap_size = len(self.h) if self.heap_size > 1: @@ -87,14 +87,14 @@ def build_max_heap(self, collection: Iterable[float]) -> None: self.max_heapify(i) def max(self) -> float: - """ return the max in the heap """ + """return the max in the heap""" if self.heap_size >= 1: return self.h[0] else: raise Exception("Empty heap") def extract_max(self) -> float: - """ get and remove max from heap """ + """get and remove max from heap""" if self.heap_size >= 2: me = self.h[0] self.h[0] = self.h.pop(-1) @@ -108,7 +108,7 @@ def extract_max(self) -> float: raise Exception("Empty heap") def insert(self, value: float) -> None: - """ insert a new value into the max heap """ + """insert a new value into the max heap""" self.h.append(value) idx = (self.heap_size - 1) // 2 self.heap_size += 1 diff --git a/data_structures/heap/max_heap.py b/data_structures/heap/max_heap.py index 2a08f8fa2cd1..fbc8eed09226 100644 --- a/data_structures/heap/max_heap.py +++ b/data_structures/heap/max_heap.py @@ -21,7 +21,7 @@ def __init__(self): self.__size = 0 def __swap_up(self, i: int) -> None: - """ Swap the element up """ + """Swap the element up""" temporary = self.__heap[i] while i // 2 > 0: if self.__heap[i] > self.__heap[i // 2]: @@ -30,13 +30,13 @@ def __swap_up(self, i: int) -> None: i //= 2 def insert(self, value: int) -> None: - """ Insert new element """ + """Insert new element""" self.__heap.append(value) self.__size += 1 self.__swap_up(self.__size) def __swap_down(self, i: int) -> None: - """ Swap the element down """ + """Swap the element down""" while self.__size >= 2 * i: if 2 * i + 1 > self.__size: bigger_child = 2 * i @@ -52,7 +52,7 @@ def __swap_down(self, i: int) -> None: i = bigger_child def pop(self) -> int: - """ Pop the root element """ + """Pop the root element""" max_value = self.__heap[1] self.__heap[1] = self.__heap[self.__size] self.__size -= 1 @@ -65,7 +65,7 @@ def get_list(self): return self.__heap[1:] def __len__(self): - """ Length of the array """ + """Length of the array""" return self.__size diff --git a/data_structures/linked_list/deque_doubly.py b/data_structures/linked_list/deque_doubly.py index c9ae8b3d1ba2..2b9d70c223c4 100644 --- a/data_structures/linked_list/deque_doubly.py +++ b/data_structures/linked_list/deque_doubly.py @@ -9,7 +9,7 @@ class _DoublyLinkedBase: - """ A Private class (to be inherited) """ + """A Private class (to be inherited)""" class _Node: __slots__ = "_prev", "_data", "_next" diff --git a/data_structures/stacks/stack.py b/data_structures/stacks/stack.py index 840cde099d38..276684e12184 100644 --- a/data_structures/stacks/stack.py +++ b/data_structures/stacks/stack.py @@ -22,28 +22,28 @@ def __str__(self) -> str: return str(self.stack) def push(self, data): - """ Push an element to the top of the stack.""" + """Push an element to the top of the stack.""" if len(self.stack) >= self.limit: raise StackOverflowError self.stack.append(data) def pop(self): - """ Pop an element off of the top of the stack.""" + """Pop an element off of the top of the stack.""" return self.stack.pop() def peek(self): - """ Peek at the top-most element of the stack.""" + """Peek at the top-most element of the stack.""" return self.stack[-1] def is_empty(self) -> bool: - """ Check if a stack is empty.""" + """Check if a stack is empty.""" return not bool(self.stack) def is_full(self) -> bool: return self.size() == self.limit def size(self) -> int: - """ Return the size of the stack.""" + """Return the size of the stack.""" return len(self.stack) def __contains__(self, item) -> bool: diff --git a/digital_image_processing/sepia.py b/digital_image_processing/sepia.py index dfb5951676aa..e9dd2c06066d 100644 --- a/digital_image_processing/sepia.py +++ b/digital_image_processing/sepia.py @@ -19,7 +19,7 @@ def to_grayscale(blue, green, red): return 0.2126 * red + 0.587 * green + 0.114 * blue def normalize(value): - """ Helper function to normalize R/G/B value -> return 255 if value > 255""" + """Helper function to normalize R/G/B value -> return 255 if value > 255""" return min(value, 255) for i in range(pixel_h): diff --git a/hashes/md5.py b/hashes/md5.py index b7888fb610ac..b08ab957340a 100644 --- a/hashes/md5.py +++ b/hashes/md5.py @@ -94,7 +94,6 @@ def not32(i): def sum32(a, b): - """""" return (a + b) % 2 ** 32 diff --git a/machine_learning/linear_discriminant_analysis.py b/machine_learning/linear_discriminant_analysis.py index 0d19e970e973..18553a77ad1c 100644 --- a/machine_learning/linear_discriminant_analysis.py +++ b/machine_learning/linear_discriminant_analysis.py @@ -283,7 +283,7 @@ def valid_input( # Main Function def main(): - """ This function starts execution phase """ + """This function starts execution phase""" while True: print(" Linear Discriminant Analysis ".center(50, "*")) print("*" * 50, "\n") diff --git a/machine_learning/linear_regression.py b/machine_learning/linear_regression.py index a726629efe00..b0bbc7b904c3 100644 --- a/machine_learning/linear_regression.py +++ b/machine_learning/linear_regression.py @@ -88,7 +88,7 @@ def run_linear_regression(data_x, data_y): def main(): - """ Driver function """ + """Driver function""" data = collect_dataset() len_data = data.shape[0] diff --git a/maths/monte_carlo_dice.py b/maths/monte_carlo_dice.py index e8e3abe83a99..17cedbdbcb18 100644 --- a/maths/monte_carlo_dice.py +++ b/maths/monte_carlo_dice.py @@ -7,7 +7,7 @@ class Dice: NUM_SIDES = 6 def __init__(self): - """ Initialize a six sided dice """ + """Initialize a six sided dice""" self.sides = list(range(1, Dice.NUM_SIDES + 1)) def roll(self): diff --git a/other/least_recently_used.py b/other/least_recently_used.py index 213339636469..d0e27efc6dc8 100644 --- a/other/least_recently_used.py +++ b/other/least_recently_used.py @@ -4,7 +4,7 @@ class LRUCache: - """ Page Replacement Algorithm, Least Recently Used (LRU) Caching.""" + """Page Replacement Algorithm, Least Recently Used (LRU) Caching.""" dq_store = object() # Cache store of keys key_reference_map = object() # References of the keys in cache