Last updated on 2020-12-31 00:46:43 CET.
Flavor | Version | Tinstall | Tcheck | Ttotal | Status | Flags |
---|---|---|---|---|---|---|
r-devel-linux-x86_64-debian-clang | 2.0-1.1 | 11.42 | 291.74 | 303.16 | ERROR | |
r-devel-linux-x86_64-debian-gcc | 2.0-1.1 | 9.54 | 215.13 | 224.67 | ERROR | |
r-devel-linux-x86_64-fedora-clang | 2.0-1.1 | 365.86 | ERROR | |||
r-devel-linux-x86_64-fedora-gcc | 2.0-1.1 | 359.07 | ERROR | |||
r-devel-windows-ix86+x86_64 | 2.0-1.1 | 23.00 | 369.00 | 392.00 | ERROR | |
r-patched-linux-x86_64 | 2.0-1.1 | 13.22 | 277.01 | 290.23 | OK | |
r-patched-solaris-x86 | 2.0-1.1 | 357.30 | OK | --no-vignettes | ||
r-release-linux-x86_64 | 2.0-1.1 | 11.17 | 274.58 | 285.75 | WARN | |
r-release-macos-x86_64 | 2.0-1.1 | OK | ||||
r-release-windows-ix86+x86_64 | 2.0-1.1 | 23.00 | 274.00 | 297.00 | OK | |
r-oldrel-macos-x86_64 | 2.0-1.1 | OK | ||||
r-oldrel-windows-ix86+x86_64 | 2.0-1.1 | 14.00 | 256.00 | 270.00 | OK |
Version: 2.0-1.1
Check: tests
Result: ERROR
Running 'bugfixes.R' [5s/5s]
Running 'regtest-families.R' [10s/12s]
Running 'regtest-gamboostLSS.R' [9s/10s]
Running 'regtest-glmboostLSS.R' [7s/8s]
Running 'regtest-mstop.R' [5s/5s]
Running 'regtest-noncyclic_fitting.R' [14s/15s]
Running 'regtest-stabilization.R' [33s/36s]
Running 'regtest-stabsel.R' [11s/9s]
Running the tests in 'tests/regtest-noncyclic_fitting.R' failed.
Complete output:
> require("gamboostLSS")
Loading required package: gamboostLSS
Loading required package: mboost
Loading required package: parallel
Loading required package: stabs
Attaching package: 'gamboostLSS'
The following object is masked from 'package:stats':
model.weights
>
> ###negbin dist, linear###
>
> set.seed(2611)
> x1 <- rnorm(1000)
> x2 <- rnorm(1000)
> x3 <- rnorm(1000)
> x4 <- rnorm(1000)
> x5 <- rnorm(1000)
> x6 <- rnorm(1000)
> mu <- exp(1.5 + x1^2 +0.5 * x2 - 3 * sin(x3) -1 * x4)
> sigma <- exp(-0.2 * x4 +0.2 * x5 +0.4 * x6)
> y <- numeric(1000)
> for (i in 1:1000)
+ y[i] <- rnbinom(1, size = sigma[i], mu = mu[i])
> dat <- data.frame(x1, x2, x3, x4, x5, x6, y)
>
> #fit models at number of params + 1
>
> #glmboost
> model <- glmboostLSS(y ~ ., families = NBinomialLSS(), data = dat,
+ control = boost_control(mstop = 3), method = "noncyclic")
>
> #linear baselearner with bols
> model <- gamboostLSS(y ~ ., families = NBinomialLSS(), data = dat,
+ control = boost_control(mstop = 3), method = "noncyclic",
+ baselearner = "bols")
>
> #nonlinear bbs baselearner
>
> model <- gamboostLSS(y ~ ., families = NBinomialLSS(), data = dat,
+ control = boost_control(mstop = 3), method = "noncyclic",
+ baselearner = "bbs")
>
> #reducing model and increasing it afterwards should yield the same fit
>
> model <- glmboostLSS(y ~ ., families = NBinomialLSS(), data = dat,
+ control = boost_control(mstop = 50), method = "noncyclic")
>
> m_co <- coef(model)
>
> mstop(model) <- 5
> mstop(model) <- 50
>
> stopifnot(all.equal(m_co, coef(model)))
>
>
> model <- gamboostLSS(y ~ ., families = NBinomialLSS(), data = dat,
+ control = boost_control(mstop = 50), method = "noncyclic",
+ baselearner = "bols")
>
> m_co <- coef(model)
>
> mstop(model) <- 5
> mstop(model) <- 50
>
> stopifnot(all.equal(m_co, coef(model)))
>
>
> model <- gamboostLSS(y ~ ., families = NBinomialLSS(), data = dat,
+ control = boost_control(mstop = 50), method = "noncyclic",
+ baselearner = "bbs")
>
> m_co <- coef(model)
>
> mstop(model) <- 5
> mstop(model) <- 50
>
> stopifnot(all.equal(m_co, coef(model)))
>
>
> model <- gamboostLSS(y ~ ., families = NBinomialLSS(), data = dat,
+ control = boost_control(mstop = 50), method = "noncyclic",
+ baselearner = "bbs")
>
> m_co <- coef(model)
>
> mstop(model) <- 5
> mstop(model) <- 50
>
> stopifnot(all.equal(m_co, coef(model)))
>
> ## check cvrisk for noncyclic models
> model <- glmboostLSS(y ~ ., families = NBinomialLSS(), data = dat,
+ control = boost_control(mstop = 3), method = "noncyclic")
> cvr1 <- cvrisk(model, grid = 1:50, cv(model.weights(model), B = 5))
Starting cross-validation...
[Fold: 1]
[Fold: 2]
[ 1] ......[ 1] .................................................. -- risk: 1805.447
.[ 41] .................
Final risk: 1799.837
..
[Fold: 3]
......[ 1] ............. -- risk: 1734.54
[ 41] ...................
Final risk: 1732.471
...
[Fold: 4]
........[ 1] ................... -- risk: 1836.44
.[ 41] ................
Final risk: 1830.995
..
[Fold: 5]
......[ 1] ........................ -- risk: 1644.972
.[ 41] ...................
Final risk: 1643.654
....................... -- risk: 1748.201
[ 41] .........
Final risk: 1745.054
> cvr1
Cross-validated
glmboostLSS(formula = y ~ ., data = dat, families = NBinomialLSS(), control = boost_control(mstop = 3), method = "noncyclic")
1 2 3 4 5 6 7 8
4.857889 4.857889 4.854336 4.851760 4.848367 4.845487 4.843456 4.840692
9 10 11 12 13 14 15 16
4.837335 4.835769 4.833281 4.831312 4.829041 4.827176 4.824955 4.822376
17 18 19 20 21 22 23 24
4.820667 4.819740 4.817552 4.816011 4.813472 4.812703 4.810620 4.809356
25 26 27 28 29 30 31 32
4.807388 4.806713 4.804566 4.803307 4.801508 4.800541 4.799011 4.798048
33 34 35 36 37 38 39 40
4.796181 4.795305 4.793928 4.793380 4.791924 4.790579 4.789696 4.788866
41 42 43 44 45 46 47 48
4.787808 4.786352 4.785354 4.784043 4.783679 4.782704 4.781571 4.780256
49 50
4.779861 4.778943
Optimal number of boosting iterations: 50
> plot(cvr1)
>
> risk(model, merge = TRUE)
mu sigma sigma sigma mu
4755.327 4755.327 4752.028 4749.214 4746.600
> risk(model, merge = FALSE)
$mu
[1] 4755.327 4746.600
$sigma
[1] 4755.327 4752.028 4749.214
attr(,"class")
[1] "inbag"
>
>
> ## test that mstop = 0 is possible
> compare_models <- function (m1, m2) {
+ stopifnot(all.equal(coef(m1), coef(m2)))
+ stopifnot(all.equal(predict(m1), predict(m2)))
+ stopifnot(all.equal(fitted(m1), fitted(m2)))
+ stopifnot(all.equal(selected(m1), selected(m2)))
+ stopifnot(all.equal(risk(m1), risk(m2)))
+ ## remove obvious differences from objects
+ m1$control <- m2$control <- NULL
+ m1$call <- m2$call <- NULL
+ if (!all.equal(m1, m2))
+ stop("Objects of offset model + 1 step and model with 1 step not identical")
+ invisible(NULL)
+ }
>
> # set up models
> mod <- glmboostLSS(y ~ ., data = dat, method = "noncyclic", control = boost_control(mstop = 0))
> mod2 <- glmboostLSS(y ~ ., data = dat, method = "noncyclic", control = boost_control(mstop = 1))
> mod3 <- glmboostLSS(y ~ ., data = dat, method = "noncyclic", control = boost_control(mstop = 1))
>
> lapply(coef(mod), function(x) stopifnot(is.null(x)))
$mu
NULL
$sigma
NULL
>
> mstop(mod3) <- 0
> mapply(compare_models, m1 = mod, m2 = mod3)
Error in !all.equal(m1, m2) : invalid argument type
Calls: mapply -> <Anonymous>
Execution halted
Flavor: r-devel-linux-x86_64-debian-clang
Version: 2.0-1.1
Check: tests
Result: ERROR
Running ‘bugfixes.R’ [3s/5s]
Running ‘regtest-families.R’ [7s/10s]
Running ‘regtest-gamboostLSS.R’ [6s/9s]
Running ‘regtest-glmboostLSS.R’ [5s/8s]
Running ‘regtest-mstop.R’ [3s/5s]
Running ‘regtest-noncyclic_fitting.R’ [10s/13s]
Running ‘regtest-stabilization.R’ [23s/31s]
Running ‘regtest-stabsel.R’ [8s/9s]
Running the tests in ‘tests/regtest-noncyclic_fitting.R’ failed.
Complete output:
> require("gamboostLSS")
Loading required package: gamboostLSS
Loading required package: mboost
Loading required package: parallel
Loading required package: stabs
Attaching package: 'gamboostLSS'
The following object is masked from 'package:stats':
model.weights
>
> ###negbin dist, linear###
>
> set.seed(2611)
> x1 <- rnorm(1000)
> x2 <- rnorm(1000)
> x3 <- rnorm(1000)
> x4 <- rnorm(1000)
> x5 <- rnorm(1000)
> x6 <- rnorm(1000)
> mu <- exp(1.5 + x1^2 +0.5 * x2 - 3 * sin(x3) -1 * x4)
> sigma <- exp(-0.2 * x4 +0.2 * x5 +0.4 * x6)
> y <- numeric(1000)
> for (i in 1:1000)
+ y[i] <- rnbinom(1, size = sigma[i], mu = mu[i])
> dat <- data.frame(x1, x2, x3, x4, x5, x6, y)
>
> #fit models at number of params + 1
>
> #glmboost
> model <- glmboostLSS(y ~ ., families = NBinomialLSS(), data = dat,
+ control = boost_control(mstop = 3), method = "noncyclic")
>
> #linear baselearner with bols
> model <- gamboostLSS(y ~ ., families = NBinomialLSS(), data = dat,
+ control = boost_control(mstop = 3), method = "noncyclic",
+ baselearner = "bols")
>
> #nonlinear bbs baselearner
>
> model <- gamboostLSS(y ~ ., families = NBinomialLSS(), data = dat,
+ control = boost_control(mstop = 3), method = "noncyclic",
+ baselearner = "bbs")
>
> #reducing model and increasing it afterwards should yield the same fit
>
> model <- glmboostLSS(y ~ ., families = NBinomialLSS(), data = dat,
+ control = boost_control(mstop = 50), method = "noncyclic")
>
> m_co <- coef(model)
>
> mstop(model) <- 5
> mstop(model) <- 50
>
> stopifnot(all.equal(m_co, coef(model)))
>
>
> model <- gamboostLSS(y ~ ., families = NBinomialLSS(), data = dat,
+ control = boost_control(mstop = 50), method = "noncyclic",
+ baselearner = "bols")
>
> m_co <- coef(model)
>
> mstop(model) <- 5
> mstop(model) <- 50
>
> stopifnot(all.equal(m_co, coef(model)))
>
>
> model <- gamboostLSS(y ~ ., families = NBinomialLSS(), data = dat,
+ control = boost_control(mstop = 50), method = "noncyclic",
+ baselearner = "bbs")
>
> m_co <- coef(model)
>
> mstop(model) <- 5
> mstop(model) <- 50
>
> stopifnot(all.equal(m_co, coef(model)))
>
>
> model <- gamboostLSS(y ~ ., families = NBinomialLSS(), data = dat,
+ control = boost_control(mstop = 50), method = "noncyclic",
+ baselearner = "bbs")
>
> m_co <- coef(model)
>
> mstop(model) <- 5
> mstop(model) <- 50
>
> stopifnot(all.equal(m_co, coef(model)))
>
> ## check cvrisk for noncyclic models
> model <- glmboostLSS(y ~ ., families = NBinomialLSS(), data = dat,
+ control = boost_control(mstop = 3), method = "noncyclic")
> cvr1 <- cvrisk(model, grid = 1:50, cv(model.weights(model), B = 5))
Starting cross-validation...
[Fold: 1]
[Fold: 2]
[ 1] .....[ 1] .......................................................................... -- risk: 1734.54
. -- risk: 1805.447
[ 41] .[ 41] ..............
Final risk: 1732.471
...
Final risk: 1799.837
[Fold: 3]
[Fold: 4]
[ 1] ....[ 1] ......................................................... -- risk: 1836.44
[ 41] ..............
Final risk: 1830.995
...
[Fold: 5]
....[ 1] ............. -- risk: 1644.972
[ 41] ............
Final risk: 1643.654
............................... -- risk: 1748.201
[ 41] .........
Final risk: 1745.054
> cvr1
Cross-validated
glmboostLSS(formula = y ~ ., data = dat, families = NBinomialLSS(), control = boost_control(mstop = 3), method = "noncyclic")
1 2 3 4 5 6 7 8
4.857889 4.857889 4.854336 4.851760 4.848367 4.845487 4.843456 4.840692
9 10 11 12 13 14 15 16
4.837335 4.835769 4.833281 4.831312 4.829041 4.827176 4.824955 4.822376
17 18 19 20 21 22 23 24
4.820667 4.819740 4.817552 4.816011 4.813472 4.812703 4.810620 4.809356
25 26 27 28 29 30 31 32
4.807388 4.806713 4.804566 4.803307 4.801508 4.800541 4.799011 4.798048
33 34 35 36 37 38 39 40
4.796181 4.795305 4.793928 4.793380 4.791924 4.790579 4.789696 4.788866
41 42 43 44 45 46 47 48
4.787808 4.786352 4.785354 4.784043 4.783679 4.782704 4.781571 4.780256
49 50
4.779861 4.778943
Optimal number of boosting iterations: 50
> plot(cvr1)
>
> risk(model, merge = TRUE)
mu sigma sigma sigma mu
4755.327 4755.327 4752.028 4749.214 4746.600
> risk(model, merge = FALSE)
$mu
[1] 4755.327 4746.600
$sigma
[1] 4755.327 4752.028 4749.214
attr(,"class")
[1] "inbag"
>
>
> ## test that mstop = 0 is possible
> compare_models <- function (m1, m2) {
+ stopifnot(all.equal(coef(m1), coef(m2)))
+ stopifnot(all.equal(predict(m1), predict(m2)))
+ stopifnot(all.equal(fitted(m1), fitted(m2)))
+ stopifnot(all.equal(selected(m1), selected(m2)))
+ stopifnot(all.equal(risk(m1), risk(m2)))
+ ## remove obvious differences from objects
+ m1$control <- m2$control <- NULL
+ m1$call <- m2$call <- NULL
+ if (!all.equal(m1, m2))
+ stop("Objects of offset model + 1 step and model with 1 step not identical")
+ invisible(NULL)
+ }
>
> # set up models
> mod <- glmboostLSS(y ~ ., data = dat, method = "noncyclic", control = boost_control(mstop = 0))
> mod2 <- glmboostLSS(y ~ ., data = dat, method = "noncyclic", control = boost_control(mstop = 1))
> mod3 <- glmboostLSS(y ~ ., data = dat, method = "noncyclic", control = boost_control(mstop = 1))
>
> lapply(coef(mod), function(x) stopifnot(is.null(x)))
$mu
NULL
$sigma
NULL
>
> mstop(mod3) <- 0
> mapply(compare_models, m1 = mod, m2 = mod3)
Error in !all.equal(m1, m2) : invalid argument type
Calls: mapply -> <Anonymous>
Execution halted
Flavor: r-devel-linux-x86_64-debian-gcc
Version: 2.0-1.1
Check: tests
Result: ERROR
Running ‘bugfixes.R’
Running ‘regtest-families.R’ [12s/13s]
Running ‘regtest-gamboostLSS.R’ [11s/13s]
Running ‘regtest-glmboostLSS.R’
Running ‘regtest-mstop.R’
Running ‘regtest-noncyclic_fitting.R’ [18s/20s]
Running ‘regtest-stabilization.R’ [38s/45s]
Running ‘regtest-stabsel.R’ [12s/11s]
Running the tests in ‘tests/regtest-noncyclic_fitting.R’ failed.
Complete output:
> require("gamboostLSS")
Loading required package: gamboostLSS
Loading required package: mboost
Loading required package: parallel
Loading required package: stabs
Attaching package: 'gamboostLSS'
The following object is masked from 'package:stats':
model.weights
>
> ###negbin dist, linear###
>
> set.seed(2611)
> x1 <- rnorm(1000)
> x2 <- rnorm(1000)
> x3 <- rnorm(1000)
> x4 <- rnorm(1000)
> x5 <- rnorm(1000)
> x6 <- rnorm(1000)
> mu <- exp(1.5 + x1^2 +0.5 * x2 - 3 * sin(x3) -1 * x4)
> sigma <- exp(-0.2 * x4 +0.2 * x5 +0.4 * x6)
> y <- numeric(1000)
> for (i in 1:1000)
+ y[i] <- rnbinom(1, size = sigma[i], mu = mu[i])
> dat <- data.frame(x1, x2, x3, x4, x5, x6, y)
>
> #fit models at number of params + 1
>
> #glmboost
> model <- glmboostLSS(y ~ ., families = NBinomialLSS(), data = dat,
+ control = boost_control(mstop = 3), method = "noncyclic")
>
> #linear baselearner with bols
> model <- gamboostLSS(y ~ ., families = NBinomialLSS(), data = dat,
+ control = boost_control(mstop = 3), method = "noncyclic",
+ baselearner = "bols")
>
> #nonlinear bbs baselearner
>
> model <- gamboostLSS(y ~ ., families = NBinomialLSS(), data = dat,
+ control = boost_control(mstop = 3), method = "noncyclic",
+ baselearner = "bbs")
>
> #reducing model and increasing it afterwards should yield the same fit
>
> model <- glmboostLSS(y ~ ., families = NBinomialLSS(), data = dat,
+ control = boost_control(mstop = 50), method = "noncyclic")
>
> m_co <- coef(model)
>
> mstop(model) <- 5
> mstop(model) <- 50
>
> stopifnot(all.equal(m_co, coef(model)))
>
>
> model <- gamboostLSS(y ~ ., families = NBinomialLSS(), data = dat,
+ control = boost_control(mstop = 50), method = "noncyclic",
+ baselearner = "bols")
>
> m_co <- coef(model)
>
> mstop(model) <- 5
> mstop(model) <- 50
>
> stopifnot(all.equal(m_co, coef(model)))
>
>
> model <- gamboostLSS(y ~ ., families = NBinomialLSS(), data = dat,
+ control = boost_control(mstop = 50), method = "noncyclic",
+ baselearner = "bbs")
>
> m_co <- coef(model)
>
> mstop(model) <- 5
> mstop(model) <- 50
>
> stopifnot(all.equal(m_co, coef(model)))
>
>
> model <- gamboostLSS(y ~ ., families = NBinomialLSS(), data = dat,
+ control = boost_control(mstop = 50), method = "noncyclic",
+ baselearner = "bbs")
>
> m_co <- coef(model)
>
> mstop(model) <- 5
> mstop(model) <- 50
>
> stopifnot(all.equal(m_co, coef(model)))
>
> ## check cvrisk for noncyclic models
> model <- glmboostLSS(y ~ ., families = NBinomialLSS(), data = dat,
+ control = boost_control(mstop = 3), method = "noncyclic")
> cvr1 <- cvrisk(model, grid = 1:50, cv(model.weights(model), B = 5))
Starting cross-validation...
[Fold: 1]
[Fold: 2]
[ 1] .........[ 1] .............................................................. -- risk: 1805.447
[ 41] ................
Final risk: 1799.837
.. -- risk: 1734.54
[Fold: 3]
[ 41] ........[ 1] ...
Final risk: 1732.471
...
[Fold: 4]
....[ 1] .................................................. -- risk: 1836.44
.[ 41] .................
Final risk: 1830.995
..
[Fold: 5]
........[ 1] .... -- risk: 1644.972
.[ 41] ..............
Final risk: 1643.654
................................ -- risk: 1748.201
[ 41] .........
Final risk: 1745.054
> cvr1
Cross-validated
glmboostLSS(formula = y ~ ., data = dat, families = NBinomialLSS(), control = boost_control(mstop = 3), method = "noncyclic")
1 2 3 4 5 6 7 8
4.857889 4.857889 4.854336 4.851760 4.848367 4.845487 4.843456 4.840692
9 10 11 12 13 14 15 16
4.837335 4.835769 4.833281 4.831312 4.829041 4.827176 4.824955 4.822376
17 18 19 20 21 22 23 24
4.820667 4.819740 4.817552 4.816011 4.813472 4.812703 4.810620 4.809356
25 26 27 28 29 30 31 32
4.807388 4.806713 4.804566 4.803307 4.801508 4.800541 4.799011 4.798048
33 34 35 36 37 38 39 40
4.796181 4.795305 4.793928 4.793380 4.791924 4.790579 4.789696 4.788866
41 42 43 44 45 46 47 48
4.787808 4.786352 4.785354 4.784043 4.783679 4.782704 4.781571 4.780256
49 50
4.779861 4.778943
Optimal number of boosting iterations: 50
> plot(cvr1)
>
> risk(model, merge = TRUE)
mu sigma sigma sigma mu
4755.327 4755.327 4752.028 4749.214 4746.600
> risk(model, merge = FALSE)
$mu
[1] 4755.327 4746.600
$sigma
[1] 4755.327 4752.028 4749.214
attr(,"class")
[1] "inbag"
>
>
> ## test that mstop = 0 is possible
> compare_models <- function (m1, m2) {
+ stopifnot(all.equal(coef(m1), coef(m2)))
+ stopifnot(all.equal(predict(m1), predict(m2)))
+ stopifnot(all.equal(fitted(m1), fitted(m2)))
+ stopifnot(all.equal(selected(m1), selected(m2)))
+ stopifnot(all.equal(risk(m1), risk(m2)))
+ ## remove obvious differences from objects
+ m1$control <- m2$control <- NULL
+ m1$call <- m2$call <- NULL
+ if (!all.equal(m1, m2))
+ stop("Objects of offset model + 1 step and model with 1 step not identical")
+ invisible(NULL)
+ }
>
> # set up models
> mod <- glmboostLSS(y ~ ., data = dat, method = "noncyclic", control = boost_control(mstop = 0))
> mod2 <- glmboostLSS(y ~ ., data = dat, method = "noncyclic", control = boost_control(mstop = 1))
> mod3 <- glmboostLSS(y ~ ., data = dat, method = "noncyclic", control = boost_control(mstop = 1))
>
> lapply(coef(mod), function(x) stopifnot(is.null(x)))
$mu
NULL
$sigma
NULL
>
> mstop(mod3) <- 0
> mapply(compare_models, m1 = mod, m2 = mod3)
Error in !all.equal(m1, m2) : invalid argument type
Calls: mapply -> <Anonymous>
Execution halted
Flavor: r-devel-linux-x86_64-fedora-clang
Version: 2.0-1.1
Check: tests
Result: ERROR
Running ‘bugfixes.R’
Running ‘regtest-families.R’ [12s/16s]
Running ‘regtest-gamboostLSS.R’ [10s/12s]
Running ‘regtest-glmboostLSS.R’
Running ‘regtest-mstop.R’
Running ‘regtest-noncyclic_fitting.R’ [16s/18s]
Running ‘regtest-stabilization.R’ [39s/45s]
Running ‘regtest-stabsel.R’ [12s/11s]
Running the tests in ‘tests/regtest-noncyclic_fitting.R’ failed.
Complete output:
> require("gamboostLSS")
Loading required package: gamboostLSS
Loading required package: mboost
Loading required package: parallel
Loading required package: stabs
Attaching package: 'gamboostLSS'
The following object is masked from 'package:stats':
model.weights
>
> ###negbin dist, linear###
>
> set.seed(2611)
> x1 <- rnorm(1000)
> x2 <- rnorm(1000)
> x3 <- rnorm(1000)
> x4 <- rnorm(1000)
> x5 <- rnorm(1000)
> x6 <- rnorm(1000)
> mu <- exp(1.5 + x1^2 +0.5 * x2 - 3 * sin(x3) -1 * x4)
> sigma <- exp(-0.2 * x4 +0.2 * x5 +0.4 * x6)
> y <- numeric(1000)
> for (i in 1:1000)
+ y[i] <- rnbinom(1, size = sigma[i], mu = mu[i])
> dat <- data.frame(x1, x2, x3, x4, x5, x6, y)
>
> #fit models at number of params + 1
>
> #glmboost
> model <- glmboostLSS(y ~ ., families = NBinomialLSS(), data = dat,
+ control = boost_control(mstop = 3), method = "noncyclic")
>
> #linear baselearner with bols
> model <- gamboostLSS(y ~ ., families = NBinomialLSS(), data = dat,
+ control = boost_control(mstop = 3), method = "noncyclic",
+ baselearner = "bols")
>
> #nonlinear bbs baselearner
>
> model <- gamboostLSS(y ~ ., families = NBinomialLSS(), data = dat,
+ control = boost_control(mstop = 3), method = "noncyclic",
+ baselearner = "bbs")
>
> #reducing model and increasing it afterwards should yield the same fit
>
> model <- glmboostLSS(y ~ ., families = NBinomialLSS(), data = dat,
+ control = boost_control(mstop = 50), method = "noncyclic")
>
> m_co <- coef(model)
>
> mstop(model) <- 5
> mstop(model) <- 50
>
> stopifnot(all.equal(m_co, coef(model)))
>
>
> model <- gamboostLSS(y ~ ., families = NBinomialLSS(), data = dat,
+ control = boost_control(mstop = 50), method = "noncyclic",
+ baselearner = "bols")
>
> m_co <- coef(model)
>
> mstop(model) <- 5
> mstop(model) <- 50
>
> stopifnot(all.equal(m_co, coef(model)))
>
>
> model <- gamboostLSS(y ~ ., families = NBinomialLSS(), data = dat,
+ control = boost_control(mstop = 50), method = "noncyclic",
+ baselearner = "bbs")
>
> m_co <- coef(model)
>
> mstop(model) <- 5
> mstop(model) <- 50
>
> stopifnot(all.equal(m_co, coef(model)))
>
>
> model <- gamboostLSS(y ~ ., families = NBinomialLSS(), data = dat,
+ control = boost_control(mstop = 50), method = "noncyclic",
+ baselearner = "bbs")
>
> m_co <- coef(model)
>
> mstop(model) <- 5
> mstop(model) <- 50
>
> stopifnot(all.equal(m_co, coef(model)))
>
> ## check cvrisk for noncyclic models
> model <- glmboostLSS(y ~ ., families = NBinomialLSS(), data = dat,
+ control = boost_control(mstop = 3), method = "noncyclic")
> cvr1 <- cvrisk(model, grid = 1:50, cv(model.weights(model), B = 5))
Starting cross-validation...
[Fold: 1]
[Fold: 2]
[ 1] ..........[ 1] ................................................... -- risk: 1805.447
[ 41] ...............
Final risk: 1799.837
...
[Fold: 3]
.......... -- risk: 1734.54
[ 1] .[ 41] ....................
Final risk: 1732.471
...
[Fold: 4]
........[ 1] .................................... -- risk: 1836.44
[ 41] .............
Final risk: 1830.995
..
[Fold: 5]
.....[ 1] ...................... -- risk: 1644.972
.[ 41] ..................
Final risk: 1643.654
.................. -- risk: 1748.201
[ 41] .........
Final risk: 1745.054
> cvr1
Cross-validated
glmboostLSS(formula = y ~ ., data = dat, families = NBinomialLSS(), control = boost_control(mstop = 3), method = "noncyclic")
1 2 3 4 5 6 7 8
4.857889 4.857889 4.854336 4.851760 4.848367 4.845487 4.843456 4.840692
9 10 11 12 13 14 15 16
4.837335 4.835769 4.833281 4.831312 4.829041 4.827176 4.824955 4.822376
17 18 19 20 21 22 23 24
4.820667 4.819740 4.817552 4.816011 4.813472 4.812703 4.810620 4.809356
25 26 27 28 29 30 31 32
4.807388 4.806713 4.804566 4.803307 4.801508 4.800541 4.799011 4.798048
33 34 35 36 37 38 39 40
4.796181 4.795305 4.793928 4.793380 4.791924 4.790579 4.789696 4.788866
41 42 43 44 45 46 47 48
4.787808 4.786352 4.785354 4.784043 4.783679 4.782704 4.781571 4.780256
49 50
4.779861 4.778943
Optimal number of boosting iterations: 50
> plot(cvr1)
>
> risk(model, merge = TRUE)
mu sigma sigma sigma mu
4755.327 4755.327 4752.028 4749.214 4746.600
> risk(model, merge = FALSE)
$mu
[1] 4755.327 4746.600
$sigma
[1] 4755.327 4752.028 4749.214
attr(,"class")
[1] "inbag"
>
>
> ## test that mstop = 0 is possible
> compare_models <- function (m1, m2) {
+ stopifnot(all.equal(coef(m1), coef(m2)))
+ stopifnot(all.equal(predict(m1), predict(m2)))
+ stopifnot(all.equal(fitted(m1), fitted(m2)))
+ stopifnot(all.equal(selected(m1), selected(m2)))
+ stopifnot(all.equal(risk(m1), risk(m2)))
+ ## remove obvious differences from objects
+ m1$control <- m2$control <- NULL
+ m1$call <- m2$call <- NULL
+ if (!all.equal(m1, m2))
+ stop("Objects of offset model + 1 step and model with 1 step not identical")
+ invisible(NULL)
+ }
>
> # set up models
> mod <- glmboostLSS(y ~ ., data = dat, method = "noncyclic", control = boost_control(mstop = 0))
> mod2 <- glmboostLSS(y ~ ., data = dat, method = "noncyclic", control = boost_control(mstop = 1))
> mod3 <- glmboostLSS(y ~ ., data = dat, method = "noncyclic", control = boost_control(mstop = 1))
>
> lapply(coef(mod), function(x) stopifnot(is.null(x)))
$mu
NULL
$sigma
NULL
>
> mstop(mod3) <- 0
> mapply(compare_models, m1 = mod, m2 = mod3)
Error in !all.equal(m1, m2) : invalid argument type
Calls: mapply -> <Anonymous>
Execution halted
Flavor: r-devel-linux-x86_64-fedora-gcc
Version: 2.0-1.1
Check: tests
Result: ERROR
Running 'bugfixes.R' [5s]
Running 'regtest-families.R' [15s]
Running 'regtest-gamboostLSS.R' [11s]
Running 'regtest-glmboostLSS.R' [10s]
Running 'regtest-mstop.R' [6s]
Running 'regtest-noncyclic_fitting.R' [18s]
Running 'regtest-stabilization.R' [46s]
Running 'regtest-stabsel.R' [10s]
Running the tests in 'tests/regtest-noncyclic_fitting.R' failed.
Complete output:
> require("gamboostLSS")
Loading required package: gamboostLSS
Loading required package: mboost
Loading required package: parallel
Loading required package: stabs
Attaching package: 'gamboostLSS'
The following object is masked from 'package:stats':
model.weights
>
> ###negbin dist, linear###
>
> set.seed(2611)
> x1 <- rnorm(1000)
> x2 <- rnorm(1000)
> x3 <- rnorm(1000)
> x4 <- rnorm(1000)
> x5 <- rnorm(1000)
> x6 <- rnorm(1000)
> mu <- exp(1.5 + x1^2 +0.5 * x2 - 3 * sin(x3) -1 * x4)
> sigma <- exp(-0.2 * x4 +0.2 * x5 +0.4 * x6)
> y <- numeric(1000)
> for (i in 1:1000)
+ y[i] <- rnbinom(1, size = sigma[i], mu = mu[i])
> dat <- data.frame(x1, x2, x3, x4, x5, x6, y)
>
> #fit models at number of params + 1
>
> #glmboost
> model <- glmboostLSS(y ~ ., families = NBinomialLSS(), data = dat,
+ control = boost_control(mstop = 3), method = "noncyclic")
>
> #linear baselearner with bols
> model <- gamboostLSS(y ~ ., families = NBinomialLSS(), data = dat,
+ control = boost_control(mstop = 3), method = "noncyclic",
+ baselearner = "bols")
>
> #nonlinear bbs baselearner
>
> model <- gamboostLSS(y ~ ., families = NBinomialLSS(), data = dat,
+ control = boost_control(mstop = 3), method = "noncyclic",
+ baselearner = "bbs")
>
> #reducing model and increasing it afterwards should yield the same fit
>
> model <- glmboostLSS(y ~ ., families = NBinomialLSS(), data = dat,
+ control = boost_control(mstop = 50), method = "noncyclic")
>
> m_co <- coef(model)
>
> mstop(model) <- 5
> mstop(model) <- 50
>
> stopifnot(all.equal(m_co, coef(model)))
>
>
> model <- gamboostLSS(y ~ ., families = NBinomialLSS(), data = dat,
+ control = boost_control(mstop = 50), method = "noncyclic",
+ baselearner = "bols")
>
> m_co <- coef(model)
>
> mstop(model) <- 5
> mstop(model) <- 50
>
> stopifnot(all.equal(m_co, coef(model)))
>
>
> model <- gamboostLSS(y ~ ., families = NBinomialLSS(), data = dat,
+ control = boost_control(mstop = 50), method = "noncyclic",
+ baselearner = "bbs")
>
> m_co <- coef(model)
>
> mstop(model) <- 5
> mstop(model) <- 50
>
> stopifnot(all.equal(m_co, coef(model)))
>
>
> model <- gamboostLSS(y ~ ., families = NBinomialLSS(), data = dat,
+ control = boost_control(mstop = 50), method = "noncyclic",
+ baselearner = "bbs")
>
> m_co <- coef(model)
>
> mstop(model) <- 5
> mstop(model) <- 50
>
> stopifnot(all.equal(m_co, coef(model)))
>
> ## check cvrisk for noncyclic models
> model <- glmboostLSS(y ~ ., families = NBinomialLSS(), data = dat,
+ control = boost_control(mstop = 3), method = "noncyclic")
> cvr1 <- cvrisk(model, grid = 1:50, cv(model.weights(model), B = 5))
Starting cross-validation...
[Fold: 1]
[ 1] ........................................ -- risk: 1805.447
[ 41] .........
Final risk: 1799.837
[Fold: 2]
[ 1] ........................................ -- risk: 1734.54
[ 41] .........
Final risk: 1732.471
[Fold: 3]
[ 1] ........................................ -- risk: 1836.44
[ 41] .........
Final risk: 1830.995
[Fold: 4]
[ 1] ........................................ -- risk: 1644.972
[ 41] .........
Final risk: 1643.654
[Fold: 5]
[ 1] ........................................ -- risk: 1748.201
[ 41] .........
Final risk: 1745.054
> cvr1
Cross-validated
glmboostLSS(formula = y ~ ., data = dat, families = NBinomialLSS(), control = boost_control(mstop = 3), method = "noncyclic")
1 2 3 4 5 6 7 8
4.857889 4.857889 4.854336 4.851760 4.848367 4.845487 4.843456 4.840692
9 10 11 12 13 14 15 16
4.837335 4.835769 4.833281 4.831312 4.829041 4.827176 4.824955 4.822376
17 18 19 20 21 22 23 24
4.820667 4.819740 4.817552 4.816011 4.813472 4.812703 4.810620 4.809356
25 26 27 28 29 30 31 32
4.807388 4.806713 4.804566 4.803307 4.801508 4.800541 4.799011 4.798048
33 34 35 36 37 38 39 40
4.796181 4.795305 4.793928 4.793380 4.791924 4.790579 4.789696 4.788866
41 42 43 44 45 46 47 48
4.787808 4.786352 4.785354 4.784043 4.783679 4.782704 4.781571 4.780256
49 50
4.779861 4.778943
Optimal number of boosting iterations: 50
> plot(cvr1)
>
> risk(model, merge = TRUE)
mu sigma sigma mu mu sigma mu mu
1770.175 1770.175 1768.441 1768.050 1767.677 1766.076 1765.724 1765.388
sigma mu mu sigma mu mu sigma sigma
1763.912 1763.594 1763.292 1761.933 1761.646 1761.373 1760.615 1759.371
mu mu mu sigma sigma mu mu sigma
1759.107 1758.855 1758.614 1757.906 1756.754 1756.521 1756.299 1755.650
mu mu sigma sigma mu mu mu sigma
1755.432 1755.224 1754.151 1753.555 1753.354 1753.162 1752.977 1751.993
sigma mu mu mu sigma sigma mu mu
1751.437 1751.259 1751.089 1750.925 1750.025 1749.507 1749.349 1749.182
mu sigma sigma mu mu mu sigma sigma
1749.036 1748.201 1747.724 1747.584 1747.450 1747.321 1746.559 1746.116
mu mu mu sigma
1745.992 1745.873 1745.759 1745.054
> risk(model, merge = FALSE)
$mu
[1] 4755.327 4746.600
$sigma
[1] 4755.327 4752.028 4749.214
attr(,"class")
[1] "inbag"
>
>
> ## test that mstop = 0 is possible
> compare_models <- function (m1, m2) {
+ stopifnot(all.equal(coef(m1), coef(m2)))
+ stopifnot(all.equal(predict(m1), predict(m2)))
+ stopifnot(all.equal(fitted(m1), fitted(m2)))
+ stopifnot(all.equal(selected(m1), selected(m2)))
+ stopifnot(all.equal(risk(m1), risk(m2)))
+ ## remove obvious differences from objects
+ m1$control <- m2$control <- NULL
+ m1$call <- m2$call <- NULL
+ if (!all.equal(m1, m2))
+ stop("Objects of offset model + 1 step and model with 1 step not identical")
+ invisible(NULL)
+ }
>
> # set up models
> mod <- glmboostLSS(y ~ ., data = dat, method = "noncyclic", control = boost_control(mstop = 0))
> mod2 <- glmboostLSS(y ~ ., data = dat, method = "noncyclic", control = boost_control(mstop = 1))
> mod3 <- glmboostLSS(y ~ ., data = dat, method = "noncyclic", control = boost_control(mstop = 1))
>
> lapply(coef(mod), function(x) stopifnot(is.null(x)))
$mu
NULL
$sigma
NULL
>
> mstop(mod3) <- 0
> mapply(compare_models, m1 = mod, m2 = mod3)
Error in !all.equal(m1, m2) : invalid argument type
Calls: mapply -> <Anonymous>
Execution halted
Flavor: r-devel-windows-ix86+x86_64
Version: 2.0-1.1
Check: re-building of vignette outputs
Result: WARN
Error(s) in re-building vignettes:
...
--- re-building ‘gamboostLSS_Tutorial.Rnw’ using Sweave
Loading required package: R2BayesX
Loading required package: BayesXsrc
Loading required package: colorspace
Loading required package: mgcv
Loading required package: nlme
This is mgcv 1.8-33. For overview type 'help("mgcv-package")'.
Loading required package: gamboostLSS
Loading required package: mboost
Loading required package: parallel
Loading required package: stabs
Attaching package: 'gamboostLSS'
The following object is masked from 'package:stats':
model.weights
Loading required namespace: BayesX
Error: processing vignette 'gamboostLSS_Tutorial.Rnw' failed with diagnostics:
Running ‘texi2dvi’ on ‘gamboostLSS_Tutorial.tex’ failed.
LaTeX errors:
! Undefined control sequence.
<recently read> \Hy@colorlink
l.107 \begin{document}
! Undefined control sequence.
\close@pdflink ->\Hy@endcolorlink
\Hy@VerboseLinkStop \pdfendlink
l.107 \begin{document}
! Undefined control sequence.
<recently read> \Hy@colorlink
l.107 \begin{document}
! Undefined control sequence.
\close@pdflink ->\Hy@endcolorlink
\Hy@VerboseLinkStop \pdfendlink
l.107 \begin{document}
! Undefined control sequence.
\hyper@linkurl ...tionraw >>}\relax \Hy@colorlink
\@urlcolor #1\Hy@xspace@en...
l.107 \begin{document}
! Undefined control sequence.
\close@pdflink ->\Hy@endcolorlink
\Hy@VerboseLinkStop \pdfendlink
l.107 \begin{document}
--- failed re-building 'gamboostLSS_Tutorial.Rnw'
SUMMARY: processing the following file failed:
'gamboostLSS_Tutorial.Rnw'
Error: Vignette re-building failed.
Execution halted
Flavor: r-release-linux-x86_64