#include <stdio.h>
#include <math.h>
#define K 3
#define TRAINING_DATA_SIZE 4
#define TEST_DATA_SIZE 1
#define NUM_FEATURES 2
int y_train[TRAINING_DATA_SIZE] = {0, 0, 1, 1};
double X_train[TRAINING_DATA_SIZE][NUM_FEATURES] = {
{1, 2},
{2, 4},
{4, 1},
{5, 3}
};
double X_test[TEST_DATA_SIZE][NUM_FEATURES] = {
{3, 5}
};
double euclidean_distance(double x1[NUM_FEATURES], double x2[NUM_FEATURES]) {
double sum = 0;
for (int i = 0; i < NUM_FEATURES; i++) {
sum += pow(x1[i] - x2[i], 2);
}
return sqrt(sum);
}
int majority_vote(int *votes) {
int count0 = 0;
int count1 = 0;
for (int i = 0; i < K; i++) {
if (votes[i] == 0) {
count0++;
} else {
count1++;
}
}
return count0 > count1 ? 0 : 1;
}
int main() {
int votes[K];
double distances[TRAINING_DATA_SIZE];
int nearest_neighbors[K];
for (int i = 0; i < TEST_DATA_SIZE; i++) {
for (int j = 0; j < TRAINING_DATA_SIZE; j++) {
distances[j] = euclidean_distance(X_test[i], X_train[j]);
}
for (int j = 0; j < K; j++) {
double min_distance = INFINITY;
int min_index;
for (int k = 0; k < TRAINING_DATA_SIZE; k++) {
if (distances[k] < min_distance) {
min_distance = distances[k];
min_index = k;
}
}
nearest_neighbors[j] = min_index;
distances[min_index] = INFINITY;
}
for (int j = 0; j < K; j++) {
votes[j] = y_train[nearest_neighbors[j]];
}
int prediction = majority_vote(votes);
printf("Predicted class: %d\n", prediction);
}
return 0;
}
python code with sklearn
import numpy as np
from sklearn.neighbors import KNeighborsClassifier
# sample training data
X_train = np.array([[1,2],[2,4],[4,1],[5,3]])
y_train = np.array([0,0,1,1])# sample test data
X_test = np.array([[3,5]])# Create the KNN classifier with K=3
knn = KNeighborsClassifier(n_neighbors=3)# Fit the classifier to the training data
knn.fit(X_train, y_train)# Predict the class of the test data
prediction = knn.predict(X_test)print("Predicted class:", prediction)