Predict method for DNN objects.

# S3 method for class 'DNN'
predict(object, newdata, newoutcome = NULL, verbose = FALSE, ...)

Arguments

object

A model fitting object from SEMdnn() function.

newdata

A matrix containing new data with rows corresponding to subjects, and columns to variables.

newoutcome

A new character vector (as.factor) of labels for a categorical output (target) (default = NULL).

verbose

Print predicted out-of-sample MSE values (default = FALSE).

...

Currently ignored.

Value

A list of three objects:

  1. "PE", vector of the amse = average MSE over all (sink and mediators) graph nodes; r2 = 1 - amse; and srmr= Standardized Root Means Square Residual between the out-of-bag correlation matrix and the model correlation matrix.

  2. "mse", vector of the Mean Squared Error (MSE) for each out-of-bag prediction of the sink and mediators graph nodes.

  3. "Yhat", the matrix of continuous predicted values of graph nodes (excluding source nodes) based on out-of-bag samples.

Author

Mario Grassi mario.grassi@unipv.it

Examples


# \donttest{
if (torch::torch_is_installed()){

# Load Amyotrophic Lateral Sclerosis (ALS)
ig<- alsData$graph
data<- alsData$exprs
data<- transformData(data)$data
group<- alsData$group 

#...with train-test (0.5-0.5) samples
set.seed(123)
train<- sample(1:nrow(data), 0.5*nrow(data))
#ncores<- parallel::detectCores(logical = FALSE)

start<- Sys.time()
dnn0 <- SEMdnn(ig, data[train, ],
      # hidden = 5*K, link = "selu", bias = TRUE, 
      hidden = c(10,10,10), link = "selu", bias = TRUE, 
      validation = 0, epochs = 32, ncores = 2)
end<- Sys.time()
print(end-start)
pred.dnn <- predict(dnn0, data[-train, ], verbose=TRUE)

# SEMrun vs. SEMdnn MSE comparison
sem0 <- SEMrun(ig, data[train, ], algo="ricf", n_rep=0)
pred.sem <- predict(sem0, data[-train,], verbose=TRUE)

#...with a categorical (as.factor) outcome
outcome <- factor(ifelse(group == 0, "control", "case")); table(outcome) 

start<- Sys.time()
dnn1 <- SEMdnn(ig, data[train, ], outcome[train],
      #hidden = 5*K, link = "selu", bias = TRUE,
      hidden = c(10,10,10), link = "selu", bias = TRUE,
      validation = 0,  epochs = 32, ncores = 2)
end<- Sys.time()
print(end-start)

pred <- predict(dnn1, data[-train, ], outcome[-train], verbose=TRUE)
yhat <- pred$Yhat[ ,levels(outcome)]; head(yhat)
yobs <- outcome[-train]; head(yobs)
classificationReport(yobs, yhat, verbose=TRUE)$stats
}
#> Conducting the nonparanormal transformation via shrunkun ECDF...done.
#> Running SEM model via DNN...
#>  done.
#> 
#> DNN solver ended normally after 736 iterations
#> 
#>  logL:-41.321965  srmr:0.203492
#> Time difference of 4.699288 secs
#>      amse        r2      srmr 
#> 0.6428678 0.3571322 0.2519231 
#> RICF solver ended normally after 2 iterations 
#> 
#> deviance/df: 6.262846  srmr: 0.3040025 
#> 
#>      amse        r2      srmr 
#> 0.7653813 0.2346187 0.2948502 
#> Running SEM model via DNN...
#>  done.
#> 
#> DNN solver ended normally after 800 iterations
#> 
#>  logL:-38.883117  srmr:0.167098
#> Time difference of 4.834204 secs
#>      amse        r2      srmr 
#> 0.5953616 0.4046384 0.2199239 
#>          pred
#> yobs      case control
#>   case      65       9
#>   control    1       5
#> 

#>              precision    recall        f1 accuracy       mcc support
#> case         0.9848485 0.8783784 0.9285714    0.875 0.4933551      74
#> control      0.3571429 0.8333333 0.5000000    0.875 0.4933551       6
#> macro avg    0.6709957 0.8558559 0.7142857    0.875 0.4933551      80
#> weighted avg 0.9377706 0.8750000 0.8964286    0.875 0.4933551      80
#>              support_prop
#> case                0.925
#> control             0.075
#> macro avg           1.000
#> weighted avg        1.000
# }