Skip to content

Consolidate the two existing kNN implementations #8903

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 9 commits into from
Sep 27, 2023
1 change: 0 additions & 1 deletion DIRECTORY.md
Original file line number Diff line number Diff line change
Expand Up @@ -502,7 +502,6 @@
* [Gradient Descent](machine_learning/gradient_descent.py)
* [K Means Clust](machine_learning/k_means_clust.py)
* [K Nearest Neighbours](machine_learning/k_nearest_neighbours.py)
* [Knn Sklearn](machine_learning/knn_sklearn.py)
* [Linear Discriminant Analysis](machine_learning/linear_discriminant_analysis.py)
* [Linear Regression](machine_learning/linear_regression.py)
* Local Weighted Learning
Expand Down
128 changes: 79 additions & 49 deletions machine_learning/k_nearest_neighbours.py
Original file line number Diff line number Diff line change
@@ -1,58 +1,88 @@
"""
k-Nearest Neighbours (kNN) is a simple non-parametric supervised learning
algorithm used for classification. Given some labelled training data, a given
point is classified using its k nearest neighbours according to some distance
metric. The most commonly occurring label among the neighbours becomes the label
of the given point. In effect, the label of the given point is decided by a
majority vote.

This implementation uses the commonly used Euclidean distance metric, but other
distance metrics can also be used.

Reference: https://en.wikipedia.org/wiki/K-nearest_neighbors_algorithm
"""

from collections import Counter
from heapq import nsmallest

import numpy as np
from sklearn import datasets
from sklearn.model_selection import train_test_split

data = datasets.load_iris()

X = np.array(data["data"])
y = np.array(data["target"])
classes = data["target_names"]

X_train, X_test, y_train, y_test = train_test_split(X, y)


def euclidean_distance(a, b):
"""
Gives the euclidean distance between two points
>>> euclidean_distance([0, 0], [3, 4])
5.0
>>> euclidean_distance([1, 2, 3], [1, 8, 11])
10.0
"""
return np.linalg.norm(np.array(a) - np.array(b))


def classifier(train_data, train_target, classes, point, k=5):
"""
Classifies the point using the KNN algorithm
k closest points are found (ranked in ascending order of euclidean distance)
Params:
:train_data: Set of points that are classified into two or more classes
:train_target: List of classes in the order of train_data points
:classes: Labels of the classes
:point: The data point that needs to be classified

>>> X_train = [[0, 0], [1, 0], [0, 1], [0.5, 0.5], [3, 3], [2, 3], [3, 2]]
>>> y_train = [0, 0, 0, 0, 1, 1, 1]
>>> classes = ['A','B']; point = [1.2,1.2]
>>> classifier(X_train, y_train, classes,point)
'A'
"""
data = zip(train_data, train_target)
# List of distances of all points from the point to be classified
distances = []
for data_point in data:
distance = euclidean_distance(data_point[0], point)
distances.append((distance, data_point[1]))
# Choosing 'k' points with the least distances.
votes = [i[1] for i in sorted(distances)[:k]]
# Most commonly occurring class among them
# is the class into which the point is classified
result = Counter(votes).most_common(1)[0][0]
return classes[result]

class KNN:
def __init__(
self,
train_data: np.ndarray[float],
train_target: np.ndarray[int],
class_labels: list[str],
) -> None:
"""
Create a kNN classifier using the given training data and class labels
"""
self.data = zip(train_data, train_target)
self.labels = class_labels

@staticmethod
def _euclidean_distance(a: np.ndarray[float], b: np.ndarray[float]) -> float:
"""
Calculate the Euclidean distance between two points
>>> KNN._euclidean_distance(np.array([0, 0]), np.array([3, 4]))
5.0
>>> KNN._euclidean_distance(np.array([1, 2, 3]), np.array([1, 8, 11]))
10.0
"""
return np.linalg.norm(a - b)

def classify(self, pred_point: np.ndarray[float], k: int = 5) -> str:
"""
Classify a given point using the kNN algorithm
>>> train_X = np.array(
... [[0, 0], [1, 0], [0, 1], [0.5, 0.5], [3, 3], [2, 3], [3, 2]]
... )
>>> train_y = np.array([0, 0, 0, 0, 1, 1, 1])
>>> classes = ['A', 'B']
>>> knn = KNN(train_X, train_y, classes)
>>> point = np.array([1.2, 1.2])
>>> knn.classify(point)
'A'
"""
# Distances of all points from the point to be classified
distances = (
(self._euclidean_distance(data_point[0], pred_point), data_point[1])
for data_point in self.data
)

# Choosing k points with the shortest distances
votes = (i[1] for i in nsmallest(k, distances))

# Most commonly occurring class is the one into which the point is classified
result = Counter(votes).most_common(1)[0][0]
return self.labels[result]


if __name__ == "__main__":
print(classifier(X_train, y_train, classes, [4.4, 3.1, 1.3, 1.4]))
import doctest

doctest.testmod()

iris = datasets.load_iris()

X = np.array(iris["data"])
y = np.array(iris["target"])
iris_classes = iris["target_names"]

X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=0)
iris_point = np.array([4.4, 3.1, 1.3, 1.4])
classifier = KNN(X_train, y_train, iris_classes)
print(classifier.classify(iris_point, k=3))
31 changes: 0 additions & 31 deletions machine_learning/knn_sklearn.py

This file was deleted.