Setting

rm(list=ls(all=TRUE))
setwd('C:/Users/sitdo/Documents/GitHub/IBD-EDA/paper1/')

Loading Data

library(dplyr)

载入程辑包:‘dplyr’

The following objects are masked from ‘package:stats’:

    filter, lag

The following objects are masked from ‘package:base’:

    intersect, setdiff, setequal, union
data <- read.csv("./data_preprocessed/data.csv") %>% select(-1)

Installing Packages

library(randomForest)
randomForest 4.7-1.1
Type rfNews() to see new features/changes/bug fixes.

载入程辑包:‘randomForest’

The following object is masked from ‘package:dplyr’:

    combine
library(pROC)
Type 'citation("pROC")' for a citation.

载入程辑包:‘pROC’

The following objects are masked from ‘package:stats’:

    cov, smooth, var

Method I: Splitting Data

set.seed(123)
splitting_ratio <- 0.7

indices <- 1:nrow(data)
shuffled_indices <- sample(indices) 
train_size <- floor(splitting_ratio * length(indices))

train_indices <- shuffled_indices[1:train_size]
test_indices <- shuffled_indices[(train_size + 1):length(indices)]

train_data <- data[train_indices, ]
test_data <- data[test_indices, ]
train_X <- as.matrix(train_data[, -1])
train_y <- train_data[, 1]

test_X <- as.matrix(test_data[, -1])
test_y <- test_data[, 1]

Building Model

rf_model <- randomForest(train_X, train_y)
Warning: The response has five or fewer unique values.  Are you sure you want to do regression?
predictions <- predict(rf_model, test_X)

Performance

Confusion Matrix

confusion_matrix <- table(
  as.numeric(test_data$dod), as.numeric(ifelse(predictions > 0.5, 1, 0))
)

TP <- confusion_matrix[1, 1]
TN <- confusion_matrix[2, 2]
FP <- confusion_matrix[2, 1]
FN <- confusion_matrix[1, 2]

## Calculate Accuracy
accuracy <- (TP + TN) / (TP + FP + TN + FN)
cat("Accuracy:", accuracy, "\n")
Accuracy: 0.8677686 
## Calculate Recall
recall <- TP / (TP + FN)
cat("Recall:", recall, "\n")
Recall: 0.9495114 
## Calculate Precision
precision <- TP / (TP + FP)
cat("Precision:", precision, "\n")
Precision: 0.8996914 
## Calculate Specificity
specificity <- TN / (TN + FP)
cat("Specificity:", specificity, "\n")
Specificity: 0.4196429 
## Calculate F1 Score
f1_score <- 2 * (precision * recall) / (precision + recall)
cat("F1 Score:", f1_score, "\n")
F1 Score: 0.9239303 

ROC Curve

# Calculate ROC curve using the actual values and predictions
roc_obj <- roc(
  as.numeric(test_data$dod), predictions
)
Setting levels: control = 0, case = 1
Setting direction: controls < cases
# Plot the ROC curve
plot(
  roc_obj,
  col = "blue",
  main = "ROC Curve - Random Forest",
  legacy.axes = TRUE,
  print.auc = TRUE,
  print.thres = TRUE,
  grid = c(0.2, 0.2),
  grid.col = c("green", "orange")
)

Method II: Cross Validation

# Perform 10-fold cross-validation
num_folds <- 10
folds <- cut(seq(1, nrow(data)), breaks = num_folds, labels = FALSE)

# Create empty vectors to store the predictions and actual values
all_predictions <- vector()
all_actuals <- vector()

for (i in 1:num_folds) {
  # Split the data into training and test sets for the current fold
  train_data <- data[folds != i, ]
  test_data <- data[folds == i, ]
  
  # Convert training data to matrix format
  train_X <- as.matrix(train_data[, -1])
  train_y <- train_data[, 1]
  
  # Train the random forest model
  rf_model <- randomForest(train_X, train_y)
  
  # Convert test data to matrix format
  test_X <- as.matrix(test_data[, -1])
  test_y <- test_data[, 1]
  
  # Make predictions on the test set
  predictions <- predict(rf_model, test_X)
  
  # Append the predictions and actual values to the vectors
  all_predictions <- c(all_predictions, predictions)
  all_actuals <- c(all_actuals, test_y)
}
Warning: The response has five or fewer unique values.  Are you sure you want to do regression?Warning: The response has five or fewer unique values.  Are you sure you want to do regression?Warning: The response has five or fewer unique values.  Are you sure you want to do regression?Warning: The response has five or fewer unique values.  Are you sure you want to do regression?Warning: The response has five or fewer unique values.  Are you sure you want to do regression?Warning: The response has five or fewer unique values.  Are you sure you want to do regression?Warning: The response has five or fewer unique values.  Are you sure you want to do regression?Warning: The response has five or fewer unique values.  Are you sure you want to do regression?Warning: The response has five or fewer unique values.  Are you sure you want to do regression?Warning: The response has five or fewer unique values.  Are you sure you want to do regression?

Performance

Confusion Matrix

confusion_matrix <- table(
  as.numeric(all_actuals), 
  as.numeric(ifelse(all_predictions > 0.5, 1, 0))
)

TP <- confusion_matrix[1, 1]
TN <- confusion_matrix[2, 2]
FP <- confusion_matrix[2, 1]
FN <- confusion_matrix[1, 2]

## Calculate Accuracy
accuracy <- (TP + TN) / (TP + FP + TN + FN)
cat("Accuracy:", accuracy, "\n")
Accuracy: 0.8709144 
## Calculate Recall
recall <- TP / (TP + FN)
cat("Recall:", recall, "\n")
Recall: 0.9543446 
## Calculate Precision
precision <- TP / (TP + FP)
cat("Precision:", precision, "\n")
Precision: 0.8987517 
## Calculate Specificity
specificity <- TN / (TN + FP)
cat("Specificity:", specificity, "\n")
Specificity: 0.4236842 
## Calculate F1 Score
f1_score <- 2 * (precision * recall) / (precision + recall)
cat("F1 Score:", f1_score, "\n")
F1 Score: 0.9257143 

ROC Curve

# Calculate ROC curve using the actual values and predictions
roc_obj <- roc(
  as.numeric(all_actuals), all_predictions
)
Setting levels: control = 0, case = 1
Setting direction: controls < cases
# Plot the ROC curve
plot(
  roc_obj,
  col = "blue",
  main = "ROC Curve - Random Forest (Cross Validation)",
  legacy.axes = TRUE,
  print.auc = TRUE,
  print.thres = TRUE,
  grid = c(0.2, 0.2),
  grid.col = c("green", "orange")
)

LS0tDQp0aXRsZTogIlJGIg0Kb3V0cHV0OiANCiAgaHRtbF9ub3RlYm9vazogDQogICAgdG9jOiB0cnVlDQogICAgdGhlbWU6IGNvc21vDQotLS0NCg0KIyBTZXR0aW5nDQoNCmBgYHtyfQ0Kcm0obGlzdD1scyhhbGw9VFJVRSkpDQpzZXR3ZCgnQzovVXNlcnMvc2l0ZG8vRG9jdW1lbnRzL0dpdEh1Yi9JQkQtRURBL3BhcGVyMS8nKQ0KYGBgDQoNCiMgTG9hZGluZyBEYXRhDQoNCmBgYHtyfQ0KbGlicmFyeShkcGx5cikNCg0KZGF0YSA8LSByZWFkLmNzdigiLi9kYXRhX3ByZXByb2Nlc3NlZC9kYXRhLmNzdiIpICU+JSBzZWxlY3QoLTEpDQpgYGANCg0KIyBJbnN0YWxsaW5nIFBhY2thZ2VzDQoNCmBgYHtyfQ0KbGlicmFyeShyYW5kb21Gb3Jlc3QpDQpsaWJyYXJ5KHBST0MpDQpgYGANCg0KIyBNZXRob2QgSTogU3BsaXR0aW5nIERhdGENCg0KYGBge3J9DQpzZXQuc2VlZCgxMjMpDQpzcGxpdHRpbmdfcmF0aW8gPC0gMC43DQoNCmluZGljZXMgPC0gMTpucm93KGRhdGEpDQpzaHVmZmxlZF9pbmRpY2VzIDwtIHNhbXBsZShpbmRpY2VzKSANCnRyYWluX3NpemUgPC0gZmxvb3Ioc3BsaXR0aW5nX3JhdGlvICogbGVuZ3RoKGluZGljZXMpKQ0KDQp0cmFpbl9pbmRpY2VzIDwtIHNodWZmbGVkX2luZGljZXNbMTp0cmFpbl9zaXplXQ0KdGVzdF9pbmRpY2VzIDwtIHNodWZmbGVkX2luZGljZXNbKHRyYWluX3NpemUgKyAxKTpsZW5ndGgoaW5kaWNlcyldDQoNCnRyYWluX2RhdGEgPC0gZGF0YVt0cmFpbl9pbmRpY2VzLCBdDQp0ZXN0X2RhdGEgPC0gZGF0YVt0ZXN0X2luZGljZXMsIF0NCmBgYA0KDQpgYGB7cn0NCnRyYWluX1ggPC0gYXMubWF0cml4KHRyYWluX2RhdGFbLCAtMV0pDQp0cmFpbl95IDwtIHRyYWluX2RhdGFbLCAxXQ0KDQp0ZXN0X1ggPC0gYXMubWF0cml4KHRlc3RfZGF0YVssIC0xXSkNCnRlc3RfeSA8LSB0ZXN0X2RhdGFbLCAxXQ0KYGBgDQoNCkJ1aWxkaW5nIE1vZGVsDQoNCmBgYHtyfQ0KcmZfbW9kZWwgPC0gcmFuZG9tRm9yZXN0KHRyYWluX1gsIHRyYWluX3kpDQpwcmVkaWN0aW9ucyA8LSBwcmVkaWN0KHJmX21vZGVsLCB0ZXN0X1gpDQpgYGANCg0KIyMgUGVyZm9ybWFuY2UNCg0KIyMjIENvbmZ1c2lvbiBNYXRyaXgNCg0KYGBge3J9DQpjb25mdXNpb25fbWF0cml4IDwtIHRhYmxlKA0KICBhcy5udW1lcmljKHRlc3RfZGF0YSRkb2QpLCBhcy5udW1lcmljKGlmZWxzZShwcmVkaWN0aW9ucyA+IDAuNSwgMSwgMCkpDQopDQoNClRQIDwtIGNvbmZ1c2lvbl9tYXRyaXhbMSwgMV0NClROIDwtIGNvbmZ1c2lvbl9tYXRyaXhbMiwgMl0NCkZQIDwtIGNvbmZ1c2lvbl9tYXRyaXhbMiwgMV0NCkZOIDwtIGNvbmZ1c2lvbl9tYXRyaXhbMSwgMl0NCg0KIyMgQ2FsY3VsYXRlIEFjY3VyYWN5DQphY2N1cmFjeSA8LSAoVFAgKyBUTikgLyAoVFAgKyBGUCArIFROICsgRk4pDQpjYXQoIkFjY3VyYWN5OiIsIGFjY3VyYWN5LCAiXG4iKQ0KDQojIyBDYWxjdWxhdGUgUmVjYWxsDQpyZWNhbGwgPC0gVFAgLyAoVFAgKyBGTikNCmNhdCgiUmVjYWxsOiIsIHJlY2FsbCwgIlxuIikNCg0KIyMgQ2FsY3VsYXRlIFByZWNpc2lvbg0KcHJlY2lzaW9uIDwtIFRQIC8gKFRQICsgRlApDQpjYXQoIlByZWNpc2lvbjoiLCBwcmVjaXNpb24sICJcbiIpDQoNCiMjIENhbGN1bGF0ZSBTcGVjaWZpY2l0eQ0Kc3BlY2lmaWNpdHkgPC0gVE4gLyAoVE4gKyBGUCkNCmNhdCgiU3BlY2lmaWNpdHk6Iiwgc3BlY2lmaWNpdHksICJcbiIpDQoNCiMjIENhbGN1bGF0ZSBGMSBTY29yZQ0KZjFfc2NvcmUgPC0gMiAqIChwcmVjaXNpb24gKiByZWNhbGwpIC8gKHByZWNpc2lvbiArIHJlY2FsbCkNCmNhdCgiRjEgU2NvcmU6IiwgZjFfc2NvcmUsICJcbiIpDQpgYGANCg0KIyMjIFJPQyBDdXJ2ZQ0KDQpgYGB7cn0NCiMgQ2FsY3VsYXRlIFJPQyBjdXJ2ZSB1c2luZyB0aGUgYWN0dWFsIHZhbHVlcyBhbmQgcHJlZGljdGlvbnMNCnJvY19vYmogPC0gcm9jKA0KICBhcy5udW1lcmljKHRlc3RfZGF0YSRkb2QpLCBwcmVkaWN0aW9ucw0KKQ0KDQojIFBsb3QgdGhlIFJPQyBjdXJ2ZQ0KcGxvdCgNCiAgcm9jX29iaiwNCiAgY29sID0gImJsdWUiLA0KICBtYWluID0gIlJPQyBDdXJ2ZSAtIFJhbmRvbSBGb3Jlc3QiLA0KICBsZWdhY3kuYXhlcyA9IFRSVUUsDQogIHByaW50LmF1YyA9IFRSVUUsDQogIHByaW50LnRocmVzID0gVFJVRSwNCiAgZ3JpZCA9IGMoMC4yLCAwLjIpLA0KICBncmlkLmNvbCA9IGMoImdyZWVuIiwgIm9yYW5nZSIpDQopDQpgYGANCg0KIyBNZXRob2QgSUk6IENyb3NzIFZhbGlkYXRpb24NCg0KYGBge3J9DQojIFBlcmZvcm0gMTAtZm9sZCBjcm9zcy12YWxpZGF0aW9uDQpudW1fZm9sZHMgPC0gMTANCmZvbGRzIDwtIGN1dChzZXEoMSwgbnJvdyhkYXRhKSksIGJyZWFrcyA9IG51bV9mb2xkcywgbGFiZWxzID0gRkFMU0UpDQoNCiMgQ3JlYXRlIGVtcHR5IHZlY3RvcnMgdG8gc3RvcmUgdGhlIHByZWRpY3Rpb25zIGFuZCBhY3R1YWwgdmFsdWVzDQphbGxfcHJlZGljdGlvbnMgPC0gdmVjdG9yKCkNCmFsbF9hY3R1YWxzIDwtIHZlY3RvcigpDQoNCmZvciAoaSBpbiAxOm51bV9mb2xkcykgew0KICAjIFNwbGl0IHRoZSBkYXRhIGludG8gdHJhaW5pbmcgYW5kIHRlc3Qgc2V0cyBmb3IgdGhlIGN1cnJlbnQgZm9sZA0KICB0cmFpbl9kYXRhIDwtIGRhdGFbZm9sZHMgIT0gaSwgXQ0KICB0ZXN0X2RhdGEgPC0gZGF0YVtmb2xkcyA9PSBpLCBdDQogIA0KICAjIENvbnZlcnQgdHJhaW5pbmcgZGF0YSB0byBtYXRyaXggZm9ybWF0DQogIHRyYWluX1ggPC0gYXMubWF0cml4KHRyYWluX2RhdGFbLCAtMV0pDQogIHRyYWluX3kgPC0gdHJhaW5fZGF0YVssIDFdDQogIA0KICAjIFRyYWluIHRoZSByYW5kb20gZm9yZXN0IG1vZGVsDQogIHJmX21vZGVsIDwtIHJhbmRvbUZvcmVzdCh0cmFpbl9YLCB0cmFpbl95KQ0KICANCiAgIyBDb252ZXJ0IHRlc3QgZGF0YSB0byBtYXRyaXggZm9ybWF0DQogIHRlc3RfWCA8LSBhcy5tYXRyaXgodGVzdF9kYXRhWywgLTFdKQ0KICB0ZXN0X3kgPC0gdGVzdF9kYXRhWywgMV0NCiAgDQogICMgTWFrZSBwcmVkaWN0aW9ucyBvbiB0aGUgdGVzdCBzZXQNCiAgcHJlZGljdGlvbnMgPC0gcHJlZGljdChyZl9tb2RlbCwgdGVzdF9YKQ0KICANCiAgIyBBcHBlbmQgdGhlIHByZWRpY3Rpb25zIGFuZCBhY3R1YWwgdmFsdWVzIHRvIHRoZSB2ZWN0b3JzDQogIGFsbF9wcmVkaWN0aW9ucyA8LSBjKGFsbF9wcmVkaWN0aW9ucywgcHJlZGljdGlvbnMpDQogIGFsbF9hY3R1YWxzIDwtIGMoYWxsX2FjdHVhbHMsIHRlc3RfeSkNCn0NCmBgYA0KDQojIyBQZXJmb3JtYW5jZQ0KDQojIyMgQ29uZnVzaW9uIE1hdHJpeA0KDQpgYGB7cn0NCmNvbmZ1c2lvbl9tYXRyaXggPC0gdGFibGUoDQogIGFzLm51bWVyaWMoYWxsX2FjdHVhbHMpLCANCiAgYXMubnVtZXJpYyhpZmVsc2UoYWxsX3ByZWRpY3Rpb25zID4gMC41LCAxLCAwKSkNCikNCg0KVFAgPC0gY29uZnVzaW9uX21hdHJpeFsxLCAxXQ0KVE4gPC0gY29uZnVzaW9uX21hdHJpeFsyLCAyXQ0KRlAgPC0gY29uZnVzaW9uX21hdHJpeFsyLCAxXQ0KRk4gPC0gY29uZnVzaW9uX21hdHJpeFsxLCAyXQ0KDQojIyBDYWxjdWxhdGUgQWNjdXJhY3kNCmFjY3VyYWN5IDwtIChUUCArIFROKSAvIChUUCArIEZQICsgVE4gKyBGTikNCmNhdCgiQWNjdXJhY3k6IiwgYWNjdXJhY3ksICJcbiIpDQoNCiMjIENhbGN1bGF0ZSBSZWNhbGwNCnJlY2FsbCA8LSBUUCAvIChUUCArIEZOKQ0KY2F0KCJSZWNhbGw6IiwgcmVjYWxsLCAiXG4iKQ0KDQojIyBDYWxjdWxhdGUgUHJlY2lzaW9uDQpwcmVjaXNpb24gPC0gVFAgLyAoVFAgKyBGUCkNCmNhdCgiUHJlY2lzaW9uOiIsIHByZWNpc2lvbiwgIlxuIikNCg0KIyMgQ2FsY3VsYXRlIFNwZWNpZmljaXR5DQpzcGVjaWZpY2l0eSA8LSBUTiAvIChUTiArIEZQKQ0KY2F0KCJTcGVjaWZpY2l0eToiLCBzcGVjaWZpY2l0eSwgIlxuIikNCg0KIyMgQ2FsY3VsYXRlIEYxIFNjb3JlDQpmMV9zY29yZSA8LSAyICogKHByZWNpc2lvbiAqIHJlY2FsbCkgLyAocHJlY2lzaW9uICsgcmVjYWxsKQ0KY2F0KCJGMSBTY29yZToiLCBmMV9zY29yZSwgIlxuIikNCg0KYGBgDQoNCiMjIyBST0MgQ3VydmUNCg0KYGBge3J9DQojIENhbGN1bGF0ZSBST0MgY3VydmUgdXNpbmcgdGhlIGFjdHVhbCB2YWx1ZXMgYW5kIHByZWRpY3Rpb25zDQpyb2Nfb2JqIDwtIHJvYygNCiAgYXMubnVtZXJpYyhhbGxfYWN0dWFscyksIGFsbF9wcmVkaWN0aW9ucw0KKQ0KDQojIFBsb3QgdGhlIFJPQyBjdXJ2ZQ0KcGxvdCgNCiAgcm9jX29iaiwNCiAgY29sID0gImJsdWUiLA0KICBtYWluID0gIlJPQyBDdXJ2ZSAtIFJhbmRvbSBGb3Jlc3QgKENyb3NzIFZhbGlkYXRpb24pIiwNCiAgbGVnYWN5LmF4ZXMgPSBUUlVFLA0KICBwcmludC5hdWMgPSBUUlVFLA0KICBwcmludC50aHJlcyA9IFRSVUUsDQogIGdyaWQgPSBjKDAuMiwgMC4yKSwNCiAgZ3JpZC5jb2wgPSBjKCJncmVlbiIsICJvcmFuZ2UiKQ0KKQ0KYGBgDQo=