R Programming

May 12, 2026
Updated 1 day ago
1 min read

R Programming

linear regression

rust
height = c (140, 142, 150, 147, 139, 162, 164, 136, 148, 147)
weight = c(59, 61, 66, 62, 57, 68, 69, 58, 63, 62)

model_data = data.frame(
  height,
  weight
)

print(model_data)
linear_model = lm(height ~ weight, data=model_data);

summary(linear_model);
coefficients(linear_model);

Logistic Regression

rust
data(iris)
str(iris)

iris_subset = subset(iris, iris$Species != "abc")
iris_subset$Species = factor(iris_subset$Species)

reg_model = glm(
  Species ~ Sepal.Length + Sepal.Width,
  data=iris_subset,
  family=binomial
)

summary(reg_model);

predicted_prob = predict(iris_subset, type="response");
predicted_class = ifelse(predicted_prob >0.5, "a", "b")
table(
  predicted = predicted_class,
  actual = iris_subset$Species
)

KNN

rust
library(class)

data(iris)

str(iris)

# Features and labels
features = iris[, 1:4]
class_labels = iris$Species

# Normalize function
normalize = function(x){
  return(
    (x - min(x)) / (max(x) - min(x))
  )
}

# Normalize features
features_normalize = as.data.frame(
  lapply(features, normalize)
)

# Train-test split
set.seed(123)

n = nrow(features_normalize)

train_index = sample(1:n, 0.7 * n)

train_data = features_normalize[train_index, ]
test_data  = features_normalize[-train_index, ]

train_label = class_labels[train_index]
test_label  = class_labels[-train_index]

# KNN model
knn_prediction = knn(
  train = train_data,
  test  = test_data,
  cl    = train_label,
  k     = 5
)

# Predictions
print(knn_prediction)

# Confusion matrix
confusion_matrix = table(
  Predicted = knn_prediction,
  Actual    = test_label
)

print(confusion_matrix)

# Accuracy
accuracy = sum(diag(confusion_matrix)) / sum(confusion_matrix)

print(paste("Accuracy:", round(accuracy * 100, 2), "%"))