Mass-univariate GLM Analysis
Arguments
- x
tabular data: Predictor variables. Usually a small number of covariates.
- y
data.frame or similar: Each column is a different outcome. The function will train one GLM for each column of
y. Usually a large number of features.- scale_y
Logical: If TRUE, scale each column of
yto have mean 0 and sd 1. IfNULL, defaults to TRUE ifyis numeric, FALSE otherwise.- center_y
Logical: If TRUE, center each column of
yto have mean 0. IfNULL, defaults to TRUE ifscale_yis TRUE, FALSE otherwise.- verbosity
Integer: Verbosity level.
Examples
set.seed(2022)
y <- rnormmat(500, 40, return_df = TRUE)
x <- data.frame(
x1 = y[[3]] - y[[5]] + y[[14]] + rnorm(500),
x2 = y[[21]] + rnorm(500)
)
massmod <- massGLM(x, y)
#> 2026-02-22 18:59:29
#> ▶
#> [massGLM]
#> 2026-02-22 18:59:29
#> Scaling and centering 40 numeric features...
#> [preprocess]
#> 2026-02-22 18:59:29
#> Preprocessing done.
#> [preprocess]
#> 2026-02-22 18:59:29
#> Fitting 40 GLMs of family gaussian with 2 predictors each...
#> [massGLM]
#> 2026-02-22 18:59:29
#> ✓ Done in 0.20 seconds.
#> [massGLM]
# Print table of coefficients, p-values, etc. for all models
summary(massmod)
#> Variable Coefficient_x1 SE_x1 t_value_x1 p_value_x1 Coefficient_x2
#> <char> <num> <num> <num> <num> <num>
#> 1: V1 -0.012227696 0.02340701 -0.52239466 6.016283e-01 -0.0074959134
#> 2: V2 -0.016172444 0.02339501 -0.69127736 4.897140e-01 0.0198761005
#> 3: V3 0.263738165 0.02019066 13.06238443 9.893697e-34 0.0215936740
#> 4: V4 -0.007691143 0.02341074 -0.32853056 7.426489e-01 0.0081904684
#> 5: V5 -0.234867451 0.02091023 -11.23218183 3.042673e-26 0.0072831797
#> 6: V6 0.018957397 0.02339518 0.81031207 4.181487e-01 0.0134406273
#> 7: V7 0.026099855 0.02336429 1.11708331 2.644984e-01 -0.0314172255
#> 8: V8 -0.022938869 0.02335230 -0.98229587 3.264320e-01 -0.0423806876
#> 9: V9 -0.007386125 0.02341222 -0.31548164 7.525284e-01 -0.0025163749
#> 10: V10 -0.055494131 0.02322289 -2.38963040 1.723706e-02 -0.0511887882
#> 11: V11 -0.028554451 0.02336725 -1.22198577 2.222923e-01 -0.0234632736
#> 12: V12 0.005642339 0.02336555 0.24148106 8.092819e-01 -0.0468416543
#> 13: V13 -0.003267662 0.02341394 -0.13956054 8.890638e-01 0.0037928730
#> 14: V14 0.282719258 0.01967239 14.37137338 2.048671e-39 0.0168034914
#> 15: V15 -0.003714319 0.02338291 -0.15884759 8.738535e-01 0.0378488461
#> 16: V16 -0.046501626 0.02310769 -2.01238730 4.471809e-02 0.0991615864
#> 17: V17 -0.041450294 0.02334077 -1.77587527 7.636526e-02 0.0001407537
#> 18: V18 -0.025193136 0.02338618 -1.07726610 2.818840e-01 -0.0072418856
#> 19: V19 -0.029300872 0.02336930 -1.25381905 2.104972e-01 0.0200726066
#> 20: V20 0.001861342 0.02338267 0.07960348 9.365847e-01 -0.0382402831
#> 21: V21 -0.003134092 0.01575767 -0.19889307 8.424277e-01 0.5418038887
#> 22: V22 0.005374441 0.02341028 0.22957611 8.185156e-01 -0.0121552719
#> 23: V23 -0.020033928 0.02338932 -0.85654158 3.921111e-01 -0.0190524987
#> 24: V24 -0.007015160 0.02340311 -0.29975332 7.644906e-01 -0.0207609242
#> 25: V25 -0.032768433 0.02334984 -1.40336834 1.611315e-01 -0.0288231792
#> 26: V26 -0.001338262 0.02341384 -0.05715688 9.544432e-01 -0.0060216279
#> 27: V27 0.015239586 0.02336616 0.65220768 5.145687e-01 0.0418301657
#> 28: V28 -0.021506765 0.02337479 -0.92008380 3.579753e-01 0.0305487667
#> 29: V29 0.006203109 0.02340938 0.26498388 7.911317e-01 -0.0130516042
#> 30: V30 0.052244665 0.02321664 2.25031111 2.486571e-02 0.0598893250
#> 31: V31 0.001588453 0.02340607 0.06786500 9.459204e-01 0.0197542346
#> 32: V32 0.045153824 0.02332694 1.93569430 5.347195e-02 -0.0009619657
#> 33: V33 -0.004527866 0.02340663 -0.19344373 8.466905e-01 -0.0181041983
#> 34: V34 0.004215134 0.02339374 0.18018214 8.570830e-01 -0.0304749116
#> 35: V35 -0.035121615 0.02334260 -1.50461430 1.330585e-01 -0.0290719417
#> 36: V36 -0.016799952 0.02328890 -0.72137179 4.710200e-01 0.0722800222
#> 37: V37 0.033387969 0.02333179 1.43100766 1.530565e-01 -0.0403900644
#> 38: V38 -0.006480618 0.02340358 -0.27690715 7.819665e-01 0.0207497547
#> 39: V39 -0.023194665 0.02338610 -0.99181418 3.217708e-01 0.0161289508
#> 40: V40 -0.001320403 0.02337967 -0.05647653 9.549849e-01 -0.0399987517
#> Variable Coefficient_x1 SE_x1 t_value_x1 p_value_x1 Coefficient_x2
#> <char> <num> <num> <num> <num> <num>
#> SE_x2 t_value_x2 p_value_x2
#> <num> <num> <num>
#> 1: 0.03284654 -0.228210147 8.195767e-01
#> 2: 0.03282971 0.605430307 5.451695e-01
#> 3: 0.02833311 0.762135723 4.463405e-01
#> 4: 0.03285177 0.249315880 8.032195e-01
#> 5: 0.02934286 0.248209622 8.040747e-01
#> 6: 0.03282994 0.409401512 6.824214e-01
#> 7: 0.03278659 -0.958234047 3.384107e-01
#> 8: 0.03276977 -1.293286104 1.965129e-01
#> 9: 0.03285385 -0.076593003 9.389781e-01
#> 10: 0.03258817 -1.570778053 1.168706e-01
#> 11: 0.03279075 -0.715545467 4.746081e-01
#> 12: 0.03278837 -1.428605902 1.537457e-01
#> 13: 0.03285627 0.115438328 9.081443e-01
#> 14: 0.02760583 0.608693558 5.430057e-01
#> 15: 0.03281272 1.153480759 2.492675e-01
#> 16: 0.03242651 3.058040210 2.348000e-03
#> 17: 0.03275359 0.004297352 9.965729e-01
#> 18: 0.03281731 -0.220672756 8.254379e-01
#> 19: 0.03279362 0.612088742 5.407590e-01
#> 20: 0.03281238 -1.165422327 2.444071e-01
#> 21: 0.02211240 24.502267916 1.590332e-87
#> 22: 0.03285113 -0.370010788 7.115321e-01
#> 23: 0.03282172 -0.580484426 5.618511e-01
#> 24: 0.03284107 -0.632163519 5.275707e-01
#> 25: 0.03276632 -0.879658649 3.794694e-01
#> 26: 0.03285612 -0.183272635 8.546588e-01
#> 27: 0.03278921 1.275729529 2.026471e-01
#> 28: 0.03280133 0.931327163 3.521365e-01
#> 29: 0.03284987 -0.397310685 6.913089e-01
#> 30: 0.03257940 1.838257435 6.662107e-02
#> 31: 0.03284522 0.601434105 5.478251e-01
#> 32: 0.03273418 -0.029387194 9.765676e-01
#> 33: 0.03284601 -0.551184103 5.817552e-01
#> 34: 0.03282792 -0.928322953 3.536907e-01
#> 35: 0.03275616 -0.887525905 3.752251e-01
#> 36: 0.03268079 2.211697277 2.744178e-02
#> 37: 0.03274099 -1.233623971 2.179261e-01
#> 38: 0.03284173 0.631810704 5.278011e-01
#> 39: 0.03281720 0.491478616 6.233049e-01
#> 40: 0.03280817 -1.219170313 2.233579e-01
#> SE_x2 t_value_x2 p_value_x2
#> <num> <num> <num>