CRAN Package Check Results for Package mboost

Last updated on 2019-03-22 19:48:52 CET.

Flavor Version Tinstall Tcheck Ttotal Status Flags
r-devel-linux-x86_64-debian-clang 2.9-1 15.31 481.83 497.14 OK
r-devel-linux-x86_64-debian-gcc 2.9-1 13.47 356.87 370.34 OK
r-devel-linux-x86_64-fedora-clang 2.9-1 602.82 OK
r-devel-linux-x86_64-fedora-gcc 2.9-1 545.59 OK
r-devel-windows-ix86+x86_64 2.9-1 39.00 158.00 197.00 OK --no-vignettes
r-patched-linux-x86_64 2.9-1 11.78 478.34 490.12 OK
r-patched-solaris-x86 2.9-1 801.90 OK
r-release-linux-x86_64 2.9-1 11.49 480.05 491.54 OK
r-release-windows-ix86+x86_64 2.9-1 22.00 146.00 168.00 OK --no-vignettes
r-release-osx-x86_64 2.9-1 OK
r-oldrel-windows-ix86+x86_64 2.9-1 18.00 169.00 187.00 ERROR --no-vignettes
r-oldrel-osx-x86_64 2.9-1 OK

Check Details

Version: 2.9-1
Flags: --no-vignettes
Check: running examples for arch ‘i386’
Result: ERROR
    Running examples in 'mboost-Ex.R' failed
    The error most likely occurred in:
    
    > ### Name: baselearners
    > ### Title: Base-learners for Gradient Boosting
    > ### Aliases: baselearners baselearner base-learner bols bbs bspatial brad
    > ### bkernel brandom btree bmono bmrf buser bns bss %+% %X% %O%
    > ### Keywords: models
    >
    > ### ** Examples
    >
    >
    > set.seed(290875)
    >
    > n <- 100
    > x1 <- rnorm(n)
    > x2 <- rnorm(n) + 0.25 * x1
    > x3 <- as.factor(sample(0:1, 100, replace = TRUE))
    > x4 <- gl(4, 25)
    > y <- 3 * sin(x1) + x2^2 + rnorm(n)
    > weights <- drop(rmultinom(1, n, rep.int(1, n) / n))
    >
    > ### set up base-learners
    > spline1 <- bbs(x1, knots = 20, df = 4)
    > extract(spline1, "design")[1:10, 1:10]
     1 2 3 4 5 6 7 8 9
     [1,] 0 0 0 0 0.00000000 0.00000000 0.000000e+00 0.000000000 0.000000000
     [2,] 0 0 0 0 0.00000000 0.00000000 0.000000e+00 0.000000000 0.000000000
     [3,] 0 0 0 0 0.00000000 0.00000000 0.000000e+00 0.000000000 0.000000000
     [4,] 0 0 0 0 0.00000000 0.00000000 0.000000e+00 0.000000000 0.000000000
     [5,] 0 0 0 0 0.01490533 0.44554054 5.113987e-01 0.028155480 0.000000000
     [6,] 0 0 0 0 0.00000000 0.00000000 0.000000e+00 0.000000000 0.000000000
     [7,] 0 0 0 0 0.00000000 0.06481227 6.035695e-01 0.328334430 0.003283771
     [8,] 0 0 0 0 0.00000000 0.00000000 0.000000e+00 0.000000000 0.000000000
     [9,] 0 0 0 0 0.00000000 0.00000000 1.551197e-09 0.167720617 0.666662247
    [10,] 0 0 0 0 0.00000000 0.00000000 0.000000e+00 0.009196839 0.401902997
     10
     [1,] 0.0000000
     [2,] 0.0000000
     [3,] 0.0000000
     [4,] 0.0000000
     [5,] 0.0000000
     [6,] 0.0000000
     [7,] 0.0000000
     [8,] 0.0000000
     [9,] 0.1656171
    [10,] 0.5493155
    > extract(spline1, "penalty")
     [,1] [,2] [,3] [,4] [,5] [,6] [,7] [,8] [,9] [,10] [,11] [,12] [,13]
     [1,] 1 -2 1 0 0 0 0 0 0 0 0 0 0
     [2,] -2 5 -4 1 0 0 0 0 0 0 0 0 0
     [3,] 1 -4 6 -4 1 0 0 0 0 0 0 0 0
     [4,] 0 1 -4 6 -4 1 0 0 0 0 0 0 0
     [5,] 0 0 1 -4 6 -4 1 0 0 0 0 0 0
     [6,] 0 0 0 1 -4 6 -4 1 0 0 0 0 0
     [7,] 0 0 0 0 1 -4 6 -4 1 0 0 0 0
     [8,] 0 0 0 0 0 1 -4 6 -4 1 0 0 0
     [9,] 0 0 0 0 0 0 1 -4 6 -4 1 0 0
    [10,] 0 0 0 0 0 0 0 1 -4 6 -4 1 0
    [11,] 0 0 0 0 0 0 0 0 1 -4 6 -4 1
    [12,] 0 0 0 0 0 0 0 0 0 1 -4 6 -4
    [13,] 0 0 0 0 0 0 0 0 0 0 1 -4 6
    [14,] 0 0 0 0 0 0 0 0 0 0 0 1 -4
    [15,] 0 0 0 0 0 0 0 0 0 0 0 0 1
    [16,] 0 0 0 0 0 0 0 0 0 0 0 0 0
    [17,] 0 0 0 0 0 0 0 0 0 0 0 0 0
    [18,] 0 0 0 0 0 0 0 0 0 0 0 0 0
    [19,] 0 0 0 0 0 0 0 0 0 0 0 0 0
    [20,] 0 0 0 0 0 0 0 0 0 0 0 0 0
    [21,] 0 0 0 0 0 0 0 0 0 0 0 0 0
    [22,] 0 0 0 0 0 0 0 0 0 0 0 0 0
    [23,] 0 0 0 0 0 0 0 0 0 0 0 0 0
    [24,] 0 0 0 0 0 0 0 0 0 0 0 0 0
     [,14] [,15] [,16] [,17] [,18] [,19] [,20] [,21] [,22] [,23] [,24]
     [1,] 0 0 0 0 0 0 0 0 0 0 0
     [2,] 0 0 0 0 0 0 0 0 0 0 0
     [3,] 0 0 0 0 0 0 0 0 0 0 0
     [4,] 0 0 0 0 0 0 0 0 0 0 0
     [5,] 0 0 0 0 0 0 0 0 0 0 0
     [6,] 0 0 0 0 0 0 0 0 0 0 0
     [7,] 0 0 0 0 0 0 0 0 0 0 0
     [8,] 0 0 0 0 0 0 0 0 0 0 0
     [9,] 0 0 0 0 0 0 0 0 0 0 0
    [10,] 0 0 0 0 0 0 0 0 0 0 0
    [11,] 0 0 0 0 0 0 0 0 0 0 0
    [12,] 1 0 0 0 0 0 0 0 0 0 0
    [13,] -4 1 0 0 0 0 0 0 0 0 0
    [14,] 6 -4 1 0 0 0 0 0 0 0 0
    [15,] -4 6 -4 1 0 0 0 0 0 0 0
    [16,] 1 -4 6 -4 1 0 0 0 0 0 0
    [17,] 0 1 -4 6 -4 1 0 0 0 0 0
    [18,] 0 0 1 -4 6 -4 1 0 0 0 0
    [19,] 0 0 0 1 -4 6 -4 1 0 0 0
    [20,] 0 0 0 0 1 -4 6 -4 1 0 0
    [21,] 0 0 0 0 0 1 -4 6 -4 1 0
    [22,] 0 0 0 0 0 0 1 -4 6 -4 1
    [23,] 0 0 0 0 0 0 0 1 -4 5 -2
    [24,] 0 0 0 0 0 0 0 0 1 -2 1
    > knots.x2 <- quantile(x2, c(0.25, 0.5, 0.75))
    > spline2 <- bbs(x2, knots = knots.x2, df = 5)
    > ols3 <- bols(x3)
    > extract(ols3)
     (Intercept) x31
    1 1 0
    3 1 1
    attr(,"assign")
    [1] 0 1
    attr(,"contrasts")
    attr(,"contrasts")$x3
    [1] "contr.treatment"
    
    > ols4 <- bols(x4)
    >
    > ### compute base-models
    > drop(ols3$dpp(weights)$fit(y)$model) ## same as:
    (Intercept) x31
     1.094457 1.008338
    > coef(lm(y ~ x3, weights = weights))
    (Intercept) x31
     1.094457 1.008338
    >
    > drop(ols4$dpp(weights)$fit(y)$model) ## same as:
    (Intercept) x42 x43 x44
     0.9162875 0.3180593 0.8982705 0.8162401
    > coef(lm(y ~ x4, weights = weights))
    (Intercept) x42 x43 x44
     0.9162875 0.3180593 0.8982705 0.8162401
    >
    > ### fit model, component-wise
    > mod1 <- mboost_fit(list(spline1, spline2, ols3, ols4), y, weights)
    >
    > ### more convenient formula interface
    > mod2 <- mboost(y ~ bbs(x1, knots = 20, df = 4) +
    + bbs(x2, knots = knots.x2, df = 5) +
    + bols(x3) + bols(x4), weights = weights)
    > all.equal(coef(mod1), coef(mod2))
    [1] TRUE
    >
    >
    > ### grouped linear effects
    > # center x1 and x2 first
    > x1 <- scale(x1, center = TRUE, scale = FALSE)
    > x2 <- scale(x2, center = TRUE, scale = FALSE)
    > model <- gamboost(y ~ bols(x1, x2, intercept = FALSE) +
    + bols(x1, intercept = FALSE) +
    + bols(x2, intercept = FALSE),
    + control = boost_control(mstop = 50))
    > coef(model, which = 1) # one base-learner for x1 and x2
    $`bols(x1, x2, intercept = FALSE)`
     x1 x2
     1.81077137 -0.02249335
    
    attr(,"offset")
    [1] 1.334042
    > coef(model, which = 2:3) # two separate base-learners for x1 and x2
    $`bols(x1, intercept = FALSE)`
    x1
     0
    
    $`bols(x2, intercept = FALSE)`
    x2
     0
    
    attr(,"offset")
    [1] 1.334042
    > # zero because they were (not yet) selected.
    >
    > ### example for bspatial
    > x1 <- runif(250,-pi,pi)
    > x2 <- runif(250,-pi,pi)
    >
    > y <- sin(x1) * sin(x2) + rnorm(250, sd = 0.4)
    >
    > spline3 <- bspatial(x1, x2, knots = 12)
    > Xmat <- extract(spline3, "design")
    > ## 12 inner knots + 4 boundary knots = 16 knots per direction
    > ## THUS: 16 * 16 = 256 columns
    > dim(Xmat)
    [1] 250 256
    > extract(spline3, "penalty")[1:10, 1:10]
    10 x 10 sparse Matrix of class "dgTMatrix"
    Error in isFALSE(suppRows) : could not find function "isFALSE"
    Calls: <Anonymous> -> <Anonymous> -> printSpMatrix2
    Execution halted
Flavor: r-oldrel-windows-ix86+x86_64

Version: 2.9-1
Flags: --no-vignettes
Check: running examples for arch ‘x64’
Result: ERROR
    Running examples in 'mboost-Ex.R' failed
    The error most likely occurred in:
    
    > ### Name: baselearners
    > ### Title: Base-learners for Gradient Boosting
    > ### Aliases: baselearners baselearner base-learner bols bbs bspatial brad
    > ### bkernel brandom btree bmono bmrf buser bns bss %+% %X% %O%
    > ### Keywords: models
    >
    > ### ** Examples
    >
    >
    > set.seed(290875)
    >
    > n <- 100
    > x1 <- rnorm(n)
    > x2 <- rnorm(n) + 0.25 * x1
    > x3 <- as.factor(sample(0:1, 100, replace = TRUE))
    > x4 <- gl(4, 25)
    > y <- 3 * sin(x1) + x2^2 + rnorm(n)
    > weights <- drop(rmultinom(1, n, rep.int(1, n) / n))
    >
    > ### set up base-learners
    > spline1 <- bbs(x1, knots = 20, df = 4)
    > extract(spline1, "design")[1:10, 1:10]
     1 2 3 4 5 6 7 8 9
     [1,] 0 0 0 0 0.00000000 0.00000000 0.000000e+00 0.000000000 0.000000000
     [2,] 0 0 0 0 0.00000000 0.00000000 0.000000e+00 0.000000000 0.000000000
     [3,] 0 0 0 0 0.00000000 0.00000000 0.000000e+00 0.000000000 0.000000000
     [4,] 0 0 0 0 0.00000000 0.00000000 0.000000e+00 0.000000000 0.000000000
     [5,] 0 0 0 0 0.01490533 0.44554054 5.113987e-01 0.028155480 0.000000000
     [6,] 0 0 0 0 0.00000000 0.00000000 0.000000e+00 0.000000000 0.000000000
     [7,] 0 0 0 0 0.00000000 0.06481227 6.035695e-01 0.328334430 0.003283771
     [8,] 0 0 0 0 0.00000000 0.00000000 0.000000e+00 0.000000000 0.000000000
     [9,] 0 0 0 0 0.00000000 0.00000000 1.551197e-09 0.167720617 0.666662247
    [10,] 0 0 0 0 0.00000000 0.00000000 0.000000e+00 0.009196839 0.401902997
     10
     [1,] 0.0000000
     [2,] 0.0000000
     [3,] 0.0000000
     [4,] 0.0000000
     [5,] 0.0000000
     [6,] 0.0000000
     [7,] 0.0000000
     [8,] 0.0000000
     [9,] 0.1656171
    [10,] 0.5493155
    > extract(spline1, "penalty")
     [,1] [,2] [,3] [,4] [,5] [,6] [,7] [,8] [,9] [,10] [,11] [,12] [,13]
     [1,] 1 -2 1 0 0 0 0 0 0 0 0 0 0
     [2,] -2 5 -4 1 0 0 0 0 0 0 0 0 0
     [3,] 1 -4 6 -4 1 0 0 0 0 0 0 0 0
     [4,] 0 1 -4 6 -4 1 0 0 0 0 0 0 0
     [5,] 0 0 1 -4 6 -4 1 0 0 0 0 0 0
     [6,] 0 0 0 1 -4 6 -4 1 0 0 0 0 0
     [7,] 0 0 0 0 1 -4 6 -4 1 0 0 0 0
     [8,] 0 0 0 0 0 1 -4 6 -4 1 0 0 0
     [9,] 0 0 0 0 0 0 1 -4 6 -4 1 0 0
    [10,] 0 0 0 0 0 0 0 1 -4 6 -4 1 0
    [11,] 0 0 0 0 0 0 0 0 1 -4 6 -4 1
    [12,] 0 0 0 0 0 0 0 0 0 1 -4 6 -4
    [13,] 0 0 0 0 0 0 0 0 0 0 1 -4 6
    [14,] 0 0 0 0 0 0 0 0 0 0 0 1 -4
    [15,] 0 0 0 0 0 0 0 0 0 0 0 0 1
    [16,] 0 0 0 0 0 0 0 0 0 0 0 0 0
    [17,] 0 0 0 0 0 0 0 0 0 0 0 0 0
    [18,] 0 0 0 0 0 0 0 0 0 0 0 0 0
    [19,] 0 0 0 0 0 0 0 0 0 0 0 0 0
    [20,] 0 0 0 0 0 0 0 0 0 0 0 0 0
    [21,] 0 0 0 0 0 0 0 0 0 0 0 0 0
    [22,] 0 0 0 0 0 0 0 0 0 0 0 0 0
    [23,] 0 0 0 0 0 0 0 0 0 0 0 0 0
    [24,] 0 0 0 0 0 0 0 0 0 0 0 0 0
     [,14] [,15] [,16] [,17] [,18] [,19] [,20] [,21] [,22] [,23] [,24]
     [1,] 0 0 0 0 0 0 0 0 0 0 0
     [2,] 0 0 0 0 0 0 0 0 0 0 0
     [3,] 0 0 0 0 0 0 0 0 0 0 0
     [4,] 0 0 0 0 0 0 0 0 0 0 0
     [5,] 0 0 0 0 0 0 0 0 0 0 0
     [6,] 0 0 0 0 0 0 0 0 0 0 0
     [7,] 0 0 0 0 0 0 0 0 0 0 0
     [8,] 0 0 0 0 0 0 0 0 0 0 0
     [9,] 0 0 0 0 0 0 0 0 0 0 0
    [10,] 0 0 0 0 0 0 0 0 0 0 0
    [11,] 0 0 0 0 0 0 0 0 0 0 0
    [12,] 1 0 0 0 0 0 0 0 0 0 0
    [13,] -4 1 0 0 0 0 0 0 0 0 0
    [14,] 6 -4 1 0 0 0 0 0 0 0 0
    [15,] -4 6 -4 1 0 0 0 0 0 0 0
    [16,] 1 -4 6 -4 1 0 0 0 0 0 0
    [17,] 0 1 -4 6 -4 1 0 0 0 0 0
    [18,] 0 0 1 -4 6 -4 1 0 0 0 0
    [19,] 0 0 0 1 -4 6 -4 1 0 0 0
    [20,] 0 0 0 0 1 -4 6 -4 1 0 0
    [21,] 0 0 0 0 0 1 -4 6 -4 1 0
    [22,] 0 0 0 0 0 0 1 -4 6 -4 1
    [23,] 0 0 0 0 0 0 0 1 -4 5 -2
    [24,] 0 0 0 0 0 0 0 0 1 -2 1
    > knots.x2 <- quantile(x2, c(0.25, 0.5, 0.75))
    > spline2 <- bbs(x2, knots = knots.x2, df = 5)
    > ols3 <- bols(x3)
    > extract(ols3)
     (Intercept) x31
    1 1 0
    3 1 1
    attr(,"assign")
    [1] 0 1
    attr(,"contrasts")
    attr(,"contrasts")$x3
    [1] "contr.treatment"
    
    > ols4 <- bols(x4)
    >
    > ### compute base-models
    > drop(ols3$dpp(weights)$fit(y)$model) ## same as:
    (Intercept) x31
     1.094457 1.008338
    > coef(lm(y ~ x3, weights = weights))
    (Intercept) x31
     1.094457 1.008338
    >
    > drop(ols4$dpp(weights)$fit(y)$model) ## same as:
    (Intercept) x42 x43 x44
     0.9162875 0.3180593 0.8982705 0.8162401
    > coef(lm(y ~ x4, weights = weights))
    (Intercept) x42 x43 x44
     0.9162875 0.3180593 0.8982705 0.8162401
    >
    > ### fit model, component-wise
    > mod1 <- mboost_fit(list(spline1, spline2, ols3, ols4), y, weights)
    >
    > ### more convenient formula interface
    > mod2 <- mboost(y ~ bbs(x1, knots = 20, df = 4) +
    + bbs(x2, knots = knots.x2, df = 5) +
    + bols(x3) + bols(x4), weights = weights)
    > all.equal(coef(mod1), coef(mod2))
    [1] TRUE
    >
    >
    > ### grouped linear effects
    > # center x1 and x2 first
    > x1 <- scale(x1, center = TRUE, scale = FALSE)
    > x2 <- scale(x2, center = TRUE, scale = FALSE)
    > model <- gamboost(y ~ bols(x1, x2, intercept = FALSE) +
    + bols(x1, intercept = FALSE) +
    + bols(x2, intercept = FALSE),
    + control = boost_control(mstop = 50))
    > coef(model, which = 1) # one base-learner for x1 and x2
    $`bols(x1, x2, intercept = FALSE)`
     x1 x2
     1.81077137 -0.02249335
    
    attr(,"offset")
    [1] 1.334042
    > coef(model, which = 2:3) # two separate base-learners for x1 and x2
    $`bols(x1, intercept = FALSE)`
    x1
     0
    
    $`bols(x2, intercept = FALSE)`
    x2
     0
    
    attr(,"offset")
    [1] 1.334042
    > # zero because they were (not yet) selected.
    >
    > ### example for bspatial
    > x1 <- runif(250,-pi,pi)
    > x2 <- runif(250,-pi,pi)
    >
    > y <- sin(x1) * sin(x2) + rnorm(250, sd = 0.4)
    >
    > spline3 <- bspatial(x1, x2, knots = 12)
    > Xmat <- extract(spline3, "design")
    > ## 12 inner knots + 4 boundary knots = 16 knots per direction
    > ## THUS: 16 * 16 = 256 columns
    > dim(Xmat)
    [1] 250 256
    > extract(spline3, "penalty")[1:10, 1:10]
    10 x 10 sparse Matrix of class "dgTMatrix"
    Error in isFALSE(suppRows) : could not find function "isFALSE"
    Calls: <Anonymous> -> <Anonymous> -> printSpMatrix2
    Execution halted
Flavor: r-oldrel-windows-ix86+x86_64