Only one carriage return (#2155)

* updating DIRECTORY.md

* touch

* fixup! Format Python code with psf/black push

* Update word_frequency_functions.py

* updating DIRECTORY.md

* Update word_frequency_functions.py

* Update lfu_cache.py

* Update sol1.py

Co-authored-by: github-actions <${GITHUB_ACTOR}@users.noreply.github.com>
This commit is contained in:
Christian Clauss 2020-06-25 19:15:30 +02:00 committed by GitHub
parent d2fa91b18e
commit 8ab84fd794
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 77 additions and 68 deletions

View File

@ -309,6 +309,7 @@
* [Scoring Functions](https://github.com/TheAlgorithms/Python/blob/master/machine_learning/scoring_functions.py)
* [Sequential Minimum Optimization](https://github.com/TheAlgorithms/Python/blob/master/machine_learning/sequential_minimum_optimization.py)
* [Support Vector Machines](https://github.com/TheAlgorithms/Python/blob/master/machine_learning/support_vector_machines.py)
* [Word Frequency Functions](https://github.com/TheAlgorithms/Python/blob/master/machine_learning/word_frequency_functions.py)
## Maths
* [3N Plus 1](https://github.com/TheAlgorithms/Python/blob/master/maths/3n_plus_1.py)
@ -442,7 +443,9 @@
* [Integeration By Simpson Approx](https://github.com/TheAlgorithms/Python/blob/master/other/integeration_by_simpson_approx.py)
* [Largest Subarray Sum](https://github.com/TheAlgorithms/Python/blob/master/other/largest_subarray_sum.py)
* [Least Recently Used](https://github.com/TheAlgorithms/Python/blob/master/other/least_recently_used.py)
* [Lfu Cache](https://github.com/TheAlgorithms/Python/blob/master/other/lfu_cache.py)
* [Linear Congruential Generator](https://github.com/TheAlgorithms/Python/blob/master/other/linear_congruential_generator.py)
* [Lru Cache](https://github.com/TheAlgorithms/Python/blob/master/other/lru_cache.py)
* [Magicdiamondpattern](https://github.com/TheAlgorithms/Python/blob/master/other/magicdiamondpattern.py)
* [Markov Chain](https://github.com/TheAlgorithms/Python/blob/master/other/markov_chain.py)
* [Nested Brackets](https://github.com/TheAlgorithms/Python/blob/master/other/nested_brackets.py)
@ -566,6 +569,8 @@
* [Sol1](https://github.com/TheAlgorithms/Python/blob/master/project_euler/problem_40/sol1.py)
* Problem 42
* [Solution42](https://github.com/TheAlgorithms/Python/blob/master/project_euler/problem_42/solution42.py)
* Problem 47
* [Sol1](https://github.com/TheAlgorithms/Python/blob/master/project_euler/problem_47/sol1.py)
* Problem 48
* [Sol1](https://github.com/TheAlgorithms/Python/blob/master/project_euler/problem_48/sol1.py)
* Problem 52

View File

@ -204,14 +204,16 @@ def del_node(root, data):
if root is None:
return root
if get_height(root.get_right()) - get_height(root.get_left()) == 2:
if get_height(root.get_right().get_right()) > \
get_height(root.get_right().get_left()):
if get_height(root.get_right().get_right()) > get_height(
root.get_right().get_left()
):
root = left_rotation(root)
else:
root = rl_rotation(root)
elif get_height(root.get_right()) - get_height(root.get_left()) == -2:
if get_height(root.get_left().get_left()) > \
get_height(root.get_left().get_right()):
if get_height(root.get_left().get_left()) > get_height(
root.get_left().get_right()
):
root = right_rotation(root)
else:
root = lr_rotation(root)
@ -253,6 +255,7 @@ class AVLtree:
2 *
*************************************
"""
def __init__(self):
self.root = None
@ -307,6 +310,7 @@ class AVLtree:
def _test():
import doctest
doctest.testmod()

View File

@ -40,7 +40,7 @@ from math import log10
"""
def term_frequency(term : str, document : str) -> int:
def term_frequency(term: str, document: str) -> int:
"""
Return the number of times a term occurs within
a given document.
@ -58,9 +58,7 @@ def term_frequency(term : str, document : str) -> int:
str.maketrans("", "", string.punctuation)
).replace("\n", "")
tokenize_document = document_without_punctuation.split(" ") # word tokenization
return len(
[word for word in tokenize_document if word.lower() == term.lower()]
)
return len([word for word in tokenize_document if word.lower() == term.lower()])
def document_frequency(term: str, corpus: str) -> int:
@ -77,17 +75,18 @@ is the second document in the corpus.\\nTHIS is \
the third document in the corpus.")
(1, 3)
"""
corpus_without_punctuation = corpus.translate(
corpus_without_punctuation = corpus.lower().translate(
str.maketrans("", "", string.punctuation)
) # strip all punctuation and replace it with ''
documents = corpus_without_punctuation.split("\n")
lowercase_documents = [document.lower() for document in documents]
return len(
[document for document in lowercase_documents if term.lower() in document]
), len(documents)
docs = corpus_without_punctuation.split("\n")
term = term.lower()
return (
len([doc for doc in docs if term in doc]),
len(docs),
)
def inverse_document_frequency(df : int, N: int) -> float:
def inverse_document_frequency(df: int, N: int) -> float:
"""
Return an integer denoting the importance
of a word. This measure of importance is
@ -116,7 +115,7 @@ def inverse_document_frequency(df : int, N: int) -> float:
return round(log10(N / df), 3)
def tf_idf(tf : int, idf: int) -> float:
def tf_idf(tf: int, idf: int) -> float:
"""
Combine the term frequency
and inverse document frequency functions to

View File

@ -2,9 +2,9 @@ from typing import Callable, Optional
class DoubleLinkedListNode:
'''
"""
Double Linked List Node built specifically for LFU Cache
'''
"""
def __init__(self, key: int, val: int):
self.key = key
@ -15,9 +15,9 @@ class DoubleLinkedListNode:
class DoubleLinkedList:
'''
"""
Double Linked List built specifically for LFU Cache
'''
"""
def __init__(self):
self.head = DoubleLinkedListNode(None, None)
@ -25,9 +25,9 @@ class DoubleLinkedList:
self.head.next, self.rear.prev = self.rear, self.head
def add(self, node: DoubleLinkedListNode) -> None:
'''
"""
Adds the given node at the head of the list and shifting it to proper position
'''
"""
temp = self.rear.prev
@ -43,9 +43,9 @@ class DoubleLinkedList:
node1.next, node2.prev = node2, node1
def remove(self, node: DoubleLinkedListNode) -> DoubleLinkedListNode:
'''
"""
Removes and returns the given node from the list
'''
"""
temp_last, temp_next = node.prev, node.next
node.prev, node.next = None, None
@ -54,7 +54,7 @@ class DoubleLinkedList:
class LFUCache:
'''
"""
LFU Cache to store a given capacity of data. Can be used as a stand-alone object
or as a function decorator.
@ -72,7 +72,7 @@ class LFUCache:
>>> cache.get(4)
4
>>> cache
CacheInfo(hits=3, misses=2, capacity=2, current size=2)
CacheInfo(hits=3, misses=2, capacity=2, current_size=2)
>>> @LFUCache.decorator(100)
... def fib(num):
... if num in (1, 2):
@ -83,8 +83,8 @@ class LFUCache:
... res = fib(i)
>>> fib.cache_info()
CacheInfo(hits=196, misses=100, capacity=100, current size=100)
'''
CacheInfo(hits=196, misses=100, capacity=100, current_size=100)
"""
# class variable to map the decorator functions to their respective instance
decorator_function_to_instance_map = {}
@ -98,30 +98,32 @@ class LFUCache:
self.cache = {}
def __repr__(self) -> str:
'''
"""
Return the details for the cache instance
[hits, misses, capacity, current_size]
'''
"""
return (f'CacheInfo(hits={self.hits}, misses={self.miss}, '
f'capacity={self.capacity}, current size={self.num_keys})')
return (
f"CacheInfo(hits={self.hits}, misses={self.miss}, "
f"capacity={self.capacity}, current_size={self.num_keys})"
)
def __contains__(self, key: int) -> bool:
'''
"""
>>> cache = LFUCache(1)
>>> 1 in cache
False
>>> cache.set(1, 1)
>>> 1 in cache
True
'''
"""
return key in self.cache
def get(self, key: int) -> Optional[int]:
'''
"""
Returns the value for the input key and updates the Double Linked List. Returns
None if key is not present in cache
'''
"""
if key in self.cache:
self.hits += 1
@ -131,9 +133,9 @@ class LFUCache:
return None
def set(self, key: int, value: int) -> None:
'''
"""
Sets the value for the input key and updates the Double Linked List
'''
"""
if key not in self.cache:
if self.num_keys >= self.capacity:
@ -152,12 +154,11 @@ class LFUCache:
@staticmethod
def decorator(size: int = 128):
'''
"""
Decorator version of LFU Cache
'''
"""
def cache_decorator_inner(func: Callable):
def cache_decorator_wrapper(*args, **kwargs):
if func not in LFUCache.decorator_function_to_instance_map:
LFUCache.decorator_function_to_instance_map[func] = LFUCache(size)

View File

@ -2,9 +2,9 @@ from typing import Callable, Optional
class DoubleLinkedListNode:
'''
"""
Double Linked List Node built specifically for LRU Cache
'''
"""
def __init__(self, key: int, val: int):
self.key = key
@ -14,9 +14,9 @@ class DoubleLinkedListNode:
class DoubleLinkedList:
'''
"""
Double Linked List built specifically for LRU Cache
'''
"""
def __init__(self):
self.head = DoubleLinkedListNode(None, None)
@ -24,18 +24,18 @@ class DoubleLinkedList:
self.head.next, self.rear.prev = self.rear, self.head
def add(self, node: DoubleLinkedListNode) -> None:
'''
"""
Adds the given node to the end of the list (before rear)
'''
"""
temp = self.rear.prev
temp.next, node.prev = node, temp
self.rear.prev, node.next = node, self.rear
def remove(self, node: DoubleLinkedListNode) -> DoubleLinkedListNode:
'''
"""
Removes and returns the given node from the list
'''
"""
temp_last, temp_next = node.prev, node.next
node.prev, node.next = None, None
@ -45,7 +45,7 @@ class DoubleLinkedList:
class LRUCache:
'''
"""
LRU Cache to store a given capacity of data. Can be used as a stand-alone object
or as a function decorator.
@ -86,7 +86,7 @@ class LRUCache:
>>> fib.cache_info()
CacheInfo(hits=194, misses=99, capacity=100, current size=99)
'''
"""
# class variable to map the decorator functions to their respective instance
decorator_function_to_instance_map = {}
@ -100,16 +100,18 @@ class LRUCache:
self.cache = {}
def __repr__(self) -> str:
'''
"""
Return the details for the cache instance
[hits, misses, capacity, current_size]
'''
"""
return (f'CacheInfo(hits={self.hits}, misses={self.miss}, '
f'capacity={self.capacity}, current size={self.num_keys})')
return (
f"CacheInfo(hits={self.hits}, misses={self.miss}, "
f"capacity={self.capacity}, current size={self.num_keys})"
)
def __contains__(self, key: int) -> bool:
'''
"""
>>> cache = LRUCache(1)
>>> 1 in cache
@ -119,15 +121,15 @@ class LRUCache:
>>> 1 in cache
True
'''
"""
return key in self.cache
def get(self, key: int) -> Optional[int]:
'''
"""
Returns the value for the input key and updates the Double Linked List. Returns
None if key is not present in cache
'''
"""
if key in self.cache:
self.hits += 1
@ -137,9 +139,9 @@ class LRUCache:
return None
def set(self, key: int, value: int) -> None:
'''
"""
Sets the value for the input key and updates the Double Linked List
'''
"""
if key not in self.cache:
if self.num_keys >= self.capacity:
@ -158,12 +160,11 @@ class LRUCache:
@staticmethod
def decorator(size: int = 128):
'''
"""
Decorator version of LRU Cache
'''
"""
def cache_decorator_inner(func: Callable):
def cache_decorator_wrapper(*args, **kwargs):
if func not in LRUCache.decorator_function_to_instance_map:
LRUCache.decorator_function_to_instance_map[func] = LRUCache(size)

View File

@ -1 +0,0 @@

View File

@ -28,9 +28,9 @@ def unique_prime_factors(n: int) -> set:
not the order in which it is produced.
>>> sorted(set(unique_prime_factors(14)))
[2, 7]
>>> set(sorted(unique_prime_factors(644)))
>>> sorted(set(unique_prime_factors(644)))
[2, 7, 23]
>>> set(sorted(unique_prime_factors(646)))
>>> sorted(set(unique_prime_factors(646)))
[2, 17, 19]
"""
i = 2
@ -64,7 +64,7 @@ def equality(iterable: list) -> bool:
>>> equality([2, 2, 2, 2])
True
>>> equality([1, 2, 3, 2, 1])
True
False
"""
return len(set(iterable)) in (0, 1)