Skip to content

Commit 3ca9568

Browse files
JeelGajeraJeel Gajerapre-commit-ci[bot]cclauss
authored andcommitted
feat: adding Apriori Algorithm (TheAlgorithms#10491)
* feat: adding Apriori Algorithm * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * fix: doctest, typo * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * fix: type error, code refactore * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * fix: refactore code * fix: doctest * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * fix: E501, B007 * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * fix: err * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * fix: arg typ err * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * fix: typo * fix: typo * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Replace generate_candidates() with itertools.combinations() * mypy * Update apriori_algorithm.py --------- Co-authored-by: Jeel Gajera <jeelgajera00@gmail.com> Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Christian Clauss <cclauss@me.com>
1 parent 4debb4b commit 3ca9568

File tree

2 files changed

+113
-0
lines changed

2 files changed

+113
-0
lines changed

DIRECTORY.md

+1
Original file line numberDiff line numberDiff line change
@@ -554,6 +554,7 @@
554554
* [Word Frequency Functions](machine_learning/word_frequency_functions.py)
555555
* [Xgboost Classifier](machine_learning/xgboost_classifier.py)
556556
* [Xgboost Regressor](machine_learning/xgboost_regressor.py)
557+
* [Apriori Algorithm](machine_learning/apriori_algorithm.py)
557558

558559
## Maths
559560
* [Abs](maths/abs.py)

machine_learning/apriori_algorithm.py

+112
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,112 @@
1+
"""
2+
Apriori Algorithm is a Association rule mining technique, also known as market basket
3+
analysis, aims to discover interesting relationships or associations among a set of
4+
items in a transactional or relational database.
5+
6+
For example, Apriori Algorithm states: "If a customer buys item A and item B, then they
7+
are likely to buy item C." This rule suggests a relationship between items A, B, and C,
8+
indicating that customers who purchased A and B are more likely to also purchase item C.
9+
10+
WIKI: https://en.wikipedia.org/wiki/Apriori_algorithm
11+
Examples: https://www.kaggle.com/code/earthian/apriori-association-rules-mining
12+
"""
13+
from itertools import combinations
14+
15+
16+
def load_data() -> list[list[str]]:
17+
"""
18+
Returns a sample transaction dataset.
19+
20+
>>> load_data()
21+
[['milk'], ['milk', 'butter'], ['milk', 'bread'], ['milk', 'bread', 'chips']]
22+
"""
23+
return [["milk"], ["milk", "butter"], ["milk", "bread"], ["milk", "bread", "chips"]]
24+
25+
26+
def prune(itemset: list, candidates: list, length: int) -> list:
27+
"""
28+
Prune candidate itemsets that are not frequent.
29+
The goal of pruning is to filter out candidate itemsets that are not frequent. This
30+
is done by checking if all the (k-1) subsets of a candidate itemset are present in
31+
the frequent itemsets of the previous iteration (valid subsequences of the frequent
32+
itemsets from the previous iteration).
33+
34+
Prunes candidate itemsets that are not frequent.
35+
36+
>>> itemset = ['X', 'Y', 'Z']
37+
>>> candidates = [['X', 'Y'], ['X', 'Z'], ['Y', 'Z']]
38+
>>> prune(itemset, candidates, 2)
39+
[['X', 'Y'], ['X', 'Z'], ['Y', 'Z']]
40+
41+
>>> itemset = ['1', '2', '3', '4']
42+
>>> candidates = ['1', '2', '4']
43+
>>> prune(itemset, candidates, 3)
44+
[]
45+
"""
46+
pruned = []
47+
for candidate in candidates:
48+
is_subsequence = True
49+
for item in candidate:
50+
if item not in itemset or itemset.count(item) < length - 1:
51+
is_subsequence = False
52+
break
53+
if is_subsequence:
54+
pruned.append(candidate)
55+
return pruned
56+
57+
58+
def apriori(data: list[list[str]], min_support: int) -> list[tuple[list[str], int]]:
59+
"""
60+
Returns a list of frequent itemsets and their support counts.
61+
62+
>>> data = [['A', 'B', 'C'], ['A', 'B'], ['A', 'C'], ['A', 'D'], ['B', 'C']]
63+
>>> apriori(data, 2)
64+
[(['A', 'B'], 1), (['A', 'C'], 2), (['B', 'C'], 2)]
65+
66+
>>> data = [['1', '2', '3'], ['1', '2'], ['1', '3'], ['1', '4'], ['2', '3']]
67+
>>> apriori(data, 3)
68+
[]
69+
"""
70+
itemset = [list(transaction) for transaction in data]
71+
frequent_itemsets = []
72+
length = 1
73+
74+
while itemset:
75+
# Count itemset support
76+
counts = [0] * len(itemset)
77+
for transaction in data:
78+
for j, candidate in enumerate(itemset):
79+
if all(item in transaction for item in candidate):
80+
counts[j] += 1
81+
82+
# Prune infrequent itemsets
83+
itemset = [item for i, item in enumerate(itemset) if counts[i] >= min_support]
84+
85+
# Append frequent itemsets (as a list to maintain order)
86+
for i, item in enumerate(itemset):
87+
frequent_itemsets.append((sorted(item), counts[i]))
88+
89+
length += 1
90+
itemset = prune(itemset, list(combinations(itemset, length)), length)
91+
92+
return frequent_itemsets
93+
94+
95+
if __name__ == "__main__":
96+
"""
97+
Apriori algorithm for finding frequent itemsets.
98+
99+
Args:
100+
data: A list of transactions, where each transaction is a list of items.
101+
min_support: The minimum support threshold for frequent itemsets.
102+
103+
Returns:
104+
A list of frequent itemsets along with their support counts.
105+
"""
106+
import doctest
107+
108+
doctest.testmod()
109+
110+
# user-defined threshold or minimum support level
111+
frequent_itemsets = apriori(data=load_data(), min_support=2)
112+
print("\n".join(f"{itemset}: {support}" for itemset, support in frequent_itemsets))

0 commit comments

Comments
 (0)