Run Rats data model

rats_example(seed = 123, nCores = pmin(parallel::detectCores(), 4))

Arguments

seed

numeric, seed for rstan, Default: 123

nCores

numeric, Maximum number of cores to use, Default: pmin(parallel::detectCores(),4)

Source

Data Description, Data and Script

Value

stanfit object

See also

rstan detectCores

Other examples: rat_data

Examples

# \donttest{ require(shredder) rats <- rats_example(nCores = 1)
#> rstan (Version 2.21.2, GitRev: 2e1f913d3ca3)
#> For execution on a local, multicore CPU with excess RAM we recommend calling #> options(mc.cores = parallel::detectCores()). #> To avoid recompilation of unchanged Stan programs, we recommend calling #> rstan_options(auto_write = TRUE)
#> #> SAMPLING FOR MODEL 'rats' NOW (CHAIN 1). #> Chain 1: #> Chain 1: Gradient evaluation took 4.8e-05 seconds #> Chain 1: 1000 transitions using 10 leapfrog steps per transition would take 0.48 seconds. #> Chain 1: Adjust your expectations accordingly! #> Chain 1: #> Chain 1: #> Chain 1: Iteration: 1 / 2000 [ 0%] (Warmup) #> Chain 1: Iteration: 200 / 2000 [ 10%] (Warmup) #> Chain 1: Iteration: 400 / 2000 [ 20%] (Warmup) #> Chain 1: Iteration: 600 / 2000 [ 30%] (Warmup) #> Chain 1: Iteration: 800 / 2000 [ 40%] (Warmup) #> Chain 1: Iteration: 1000 / 2000 [ 50%] (Warmup) #> Chain 1: Iteration: 1001 / 2000 [ 50%] (Sampling) #> Chain 1: Iteration: 1200 / 2000 [ 60%] (Sampling) #> Chain 1: Iteration: 1400 / 2000 [ 70%] (Sampling) #> Chain 1: Iteration: 1600 / 2000 [ 80%] (Sampling) #> Chain 1: Iteration: 1800 / 2000 [ 90%] (Sampling) #> Chain 1: Iteration: 2000 / 2000 [100%] (Sampling) #> Chain 1: #> Chain 1: Elapsed Time: 0.447051 seconds (Warm-up) #> Chain 1: 0.293003 seconds (Sampling) #> Chain 1: 0.740054 seconds (Total) #> Chain 1: #> #> SAMPLING FOR MODEL 'rats' NOW (CHAIN 2). #> Chain 2: #> Chain 2: Gradient evaluation took 2e-05 seconds #> Chain 2: 1000 transitions using 10 leapfrog steps per transition would take 0.2 seconds. #> Chain 2: Adjust your expectations accordingly! #> Chain 2: #> Chain 2: #> Chain 2: Iteration: 1 / 2000 [ 0%] (Warmup) #> Chain 2: Iteration: 200 / 2000 [ 10%] (Warmup) #> Chain 2: Iteration: 400 / 2000 [ 20%] (Warmup) #> Chain 2: Iteration: 600 / 2000 [ 30%] (Warmup) #> Chain 2: Iteration: 800 / 2000 [ 40%] (Warmup) #> Chain 2: Iteration: 1000 / 2000 [ 50%] (Warmup) #> Chain 2: Iteration: 1001 / 2000 [ 50%] (Sampling) #> Chain 2: Iteration: 1200 / 2000 [ 60%] (Sampling) #> Chain 2: Iteration: 1400 / 2000 [ 70%] (Sampling) #> Chain 2: Iteration: 1600 / 2000 [ 80%] (Sampling) #> Chain 2: Iteration: 1800 / 2000 [ 90%] (Sampling) #> Chain 2: Iteration: 2000 / 2000 [100%] (Sampling) #> Chain 2: #> Chain 2: Elapsed Time: 0.545472 seconds (Warm-up) #> Chain 2: 0.260851 seconds (Sampling) #> Chain 2: 0.806323 seconds (Total) #> Chain 2: #> #> SAMPLING FOR MODEL 'rats' NOW (CHAIN 3). #> Chain 3: #> Chain 3: Gradient evaluation took 1.9e-05 seconds #> Chain 3: 1000 transitions using 10 leapfrog steps per transition would take 0.19 seconds. #> Chain 3: Adjust your expectations accordingly! #> Chain 3: #> Chain 3: #> Chain 3: Iteration: 1 / 2000 [ 0%] (Warmup) #> Chain 3: Iteration: 200 / 2000 [ 10%] (Warmup) #> Chain 3: Iteration: 400 / 2000 [ 20%] (Warmup) #> Chain 3: Iteration: 600 / 2000 [ 30%] (Warmup) #> Chain 3: Iteration: 800 / 2000 [ 40%] (Warmup) #> Chain 3: Iteration: 1000 / 2000 [ 50%] (Warmup) #> Chain 3: Iteration: 1001 / 2000 [ 50%] (Sampling) #> Chain 3: Iteration: 1200 / 2000 [ 60%] (Sampling) #> Chain 3: Iteration: 1400 / 2000 [ 70%] (Sampling) #> Chain 3: Iteration: 1600 / 2000 [ 80%] (Sampling) #> Chain 3: Iteration: 1800 / 2000 [ 90%] (Sampling) #> Chain 3: Iteration: 2000 / 2000 [100%] (Sampling) #> Chain 3: #> Chain 3: Elapsed Time: 0.857569 seconds (Warm-up) #> Chain 3: 0.198415 seconds (Sampling) #> Chain 3: 1.05598 seconds (Total) #> Chain 3: #> #> SAMPLING FOR MODEL 'rats' NOW (CHAIN 4). #> Chain 4: #> Chain 4: Gradient evaluation took 2.4e-05 seconds #> Chain 4: 1000 transitions using 10 leapfrog steps per transition would take 0.24 seconds. #> Chain 4: Adjust your expectations accordingly! #> Chain 4: #> Chain 4: #> Chain 4: Iteration: 1 / 2000 [ 0%] (Warmup) #> Chain 4: Iteration: 200 / 2000 [ 10%] (Warmup) #> Chain 4: Iteration: 400 / 2000 [ 20%] (Warmup) #> Chain 4: Iteration: 600 / 2000 [ 30%] (Warmup) #> Chain 4: Iteration: 800 / 2000 [ 40%] (Warmup) #> Chain 4: Iteration: 1000 / 2000 [ 50%] (Warmup) #> Chain 4: Iteration: 1001 / 2000 [ 50%] (Sampling) #> Chain 4: Iteration: 1200 / 2000 [ 60%] (Sampling) #> Chain 4: Iteration: 1400 / 2000 [ 70%] (Sampling) #> Chain 4: Iteration: 1600 / 2000 [ 80%] (Sampling) #> Chain 4: Iteration: 1800 / 2000 [ 90%] (Sampling) #> Chain 4: Iteration: 2000 / 2000 [100%] (Sampling) #> Chain 4: #> Chain 4: Elapsed Time: 0.860255 seconds (Warm-up) #> Chain 4: 0.175961 seconds (Sampling) #> Chain 4: 1.03622 seconds (Total) #> Chain 4:
rats
#> Inference for Stan model: rats. #> 4 chains, each with iter=2000; warmup=1000; thin=1; #> post-warmup draws per chain=1000, total post-warmup draws=4000. #> #> mean se_mean sd 2.5% 25% 50% 75% 97.5% #> alpha[1] 239.93 0.03 2.61 234.75 238.22 239.91 241.71 244.96 #> alpha[2] 247.80 0.04 2.70 242.54 245.97 247.79 249.62 253.07 #> alpha[3] 252.44 0.04 2.60 247.27 250.72 252.46 254.21 257.55 #> alpha[4] 232.56 0.04 2.65 227.48 230.69 232.54 234.34 237.79 #> alpha[5] 231.58 0.03 2.73 226.42 229.71 231.56 233.46 236.86 #> alpha[6] 249.76 0.04 2.71 244.61 247.94 249.74 251.60 255.03 #> alpha[7] 228.66 0.03 2.67 223.37 226.97 228.65 230.47 233.89 #> alpha[8] 248.41 0.03 2.69 243.04 246.62 248.43 250.21 253.52 #> alpha[9] 283.31 0.04 2.70 277.91 281.57 283.37 285.05 288.59 #> alpha[10] 219.31 0.03 2.66 214.14 217.52 219.26 221.14 224.55 #> alpha[11] 258.25 0.04 2.71 252.87 256.39 258.32 260.12 263.41 #> alpha[12] 228.17 0.04 2.63 223.21 226.39 228.14 229.91 233.39 #> alpha[13] 242.39 0.04 2.68 237.08 240.57 242.45 244.22 247.70 #> alpha[14] 268.25 0.04 2.66 262.97 266.43 268.29 270.08 273.34 #> alpha[15] 242.73 0.03 2.65 237.51 241.02 242.71 244.48 247.91 #> alpha[16] 245.35 0.03 2.68 239.99 243.54 245.32 247.15 250.60 #> alpha[17] 232.16 0.04 2.71 226.83 230.31 232.15 233.99 237.51 #> alpha[18] 240.42 0.03 2.64 235.33 238.69 240.43 242.18 245.52 #> alpha[19] 253.77 0.04 2.68 248.45 251.99 253.80 255.54 259.06 #> alpha[20] 241.62 0.03 2.60 236.58 239.91 241.57 243.34 246.78 #> alpha[21] 248.59 0.03 2.70 243.24 246.79 248.56 250.41 253.85 #> alpha[22] 225.31 0.04 2.77 219.92 223.48 225.28 227.18 230.91 #> alpha[23] 228.52 0.03 2.61 223.43 226.79 228.54 230.24 233.62 #> alpha[24] 245.11 0.03 2.62 239.91 243.40 245.14 246.83 250.29 #> alpha[25] 234.44 0.03 2.69 229.26 232.63 234.43 236.23 239.82 #> alpha[26] 253.92 0.04 2.61 248.70 252.16 253.92 255.65 259.09 #> alpha[27] 254.27 0.03 2.57 249.27 252.55 254.25 255.96 259.55 #> alpha[28] 243.01 0.04 2.70 237.55 241.20 243.04 244.87 248.15 #> alpha[29] 217.91 0.03 2.69 212.74 216.09 217.89 219.71 223.13 #> alpha[30] 241.42 0.03 2.61 236.37 239.64 241.41 243.24 246.48 #> beta[1] 6.06 0.00 0.24 5.59 5.91 6.07 6.22 6.53 #> beta[2] 7.05 0.00 0.26 6.55 6.88 7.05 7.22 7.55 #> beta[3] 6.48 0.00 0.24 6.02 6.32 6.48 6.65 6.97 #> beta[4] 5.34 0.00 0.26 4.82 5.17 5.34 5.52 5.84 #> beta[5] 6.57 0.00 0.24 6.09 6.41 6.57 6.73 7.05 #> beta[6] 6.17 0.00 0.24 5.70 6.00 6.17 6.34 6.64 #> beta[7] 5.97 0.00 0.24 5.50 5.81 5.97 6.14 6.44 #> beta[8] 6.42 0.00 0.24 5.95 6.25 6.41 6.59 6.90 #> beta[9] 7.05 0.00 0.25 6.54 6.89 7.05 7.22 7.54 #> beta[10] 5.84 0.00 0.24 5.36 5.68 5.84 6.00 6.31 #> beta[11] 6.80 0.00 0.25 6.31 6.63 6.80 6.97 7.28 #> beta[12] 6.12 0.00 0.24 5.65 5.96 6.11 6.28 6.58 #> beta[13] 6.16 0.00 0.25 5.65 6.01 6.16 6.32 6.66 #> beta[14] 6.69 0.00 0.24 6.22 6.52 6.69 6.85 7.17 #> beta[15] 5.42 0.00 0.25 4.94 5.25 5.41 5.59 5.91 #> beta[16] 5.93 0.00 0.24 5.45 5.77 5.93 6.09 6.39 #> beta[17] 6.28 0.00 0.24 5.82 6.12 6.28 6.44 6.74 #> beta[18] 5.84 0.00 0.24 5.36 5.68 5.83 6.00 6.30 #> beta[19] 6.40 0.00 0.24 5.93 6.23 6.40 6.56 6.85 #> beta[20] 6.05 0.00 0.25 5.56 5.89 6.05 6.22 6.54 #> beta[21] 6.40 0.00 0.24 5.93 6.24 6.40 6.56 6.86 #> beta[22] 5.86 0.00 0.24 5.40 5.69 5.86 6.02 6.31 #> beta[23] 5.75 0.00 0.24 5.27 5.59 5.75 5.91 6.23 #> beta[24] 5.89 0.00 0.24 5.41 5.73 5.89 6.05 6.37 #> beta[25] 6.91 0.00 0.25 6.42 6.74 6.90 7.07 7.40 #> beta[26] 6.54 0.00 0.24 6.06 6.39 6.55 6.70 7.01 #> beta[27] 5.90 0.00 0.24 5.41 5.73 5.90 6.06 6.38 #> beta[28] 5.85 0.00 0.23 5.40 5.69 5.84 6.01 6.31 #> beta[29] 5.68 0.00 0.25 5.20 5.51 5.67 5.84 6.17 #> beta[30] 6.13 0.00 0.23 5.68 5.97 6.12 6.28 6.59 #> mu_alpha 242.47 0.05 2.76 236.95 240.61 242.50 244.38 247.70 #> mu_beta 6.19 0.00 0.11 5.98 6.12 6.19 6.25 6.40 #> sigmasq_y 37.16 0.12 5.69 27.74 33.14 36.56 40.58 50.12 #> sigmasq_alpha 218.39 1.06 63.89 126.08 173.31 208.62 251.30 372.24 #> sigmasq_beta 0.27 0.00 0.10 0.13 0.21 0.26 0.32 0.52 #> sigma_y 6.08 0.01 0.46 5.27 5.76 6.05 6.37 7.08 #> sigma_alpha 14.63 0.03 2.07 11.23 13.16 14.44 15.85 19.29 #> sigma_beta 0.52 0.00 0.09 0.36 0.45 0.51 0.57 0.72 #> alpha0 106.39 0.06 3.60 99.23 104.00 106.44 108.76 113.55 #> lp__ -437.92 0.21 7.04 -453.36 -442.36 -437.34 -432.93 -425.72 #> n_eff Rhat #> alpha[1] 6102 1 #> alpha[2] 5745 1 #> alpha[3] 4957 1 #> alpha[4] 5564 1 #> alpha[5] 6405 1 #> alpha[6] 5168 1 #> alpha[7] 6562 1 #> alpha[8] 5921 1 #> alpha[9] 4862 1 #> alpha[10] 5997 1 #> alpha[11] 5403 1 #> alpha[12] 5125 1 #> alpha[13] 5586 1 #> alpha[14] 5341 1 #> alpha[15] 5750 1 #> alpha[16] 6011 1 #> alpha[17] 5920 1 #> alpha[18] 5940 1 #> alpha[19] 5602 1 #> alpha[20] 6264 1 #> alpha[21] 6674 1 #> alpha[22] 6170 1 #> alpha[23] 6656 1 #> alpha[24] 6691 1 #> alpha[25] 6256 1 #> alpha[26] 5479 1 #> alpha[27] 5780 1 #> alpha[28] 5767 1 #> alpha[29] 6317 1 #> alpha[30] 6029 1 #> beta[1] 5584 1 #> beta[2] 4936 1 #> beta[3] 4433 1 #> beta[4] 5458 1 #> beta[5] 5527 1 #> beta[6] 5028 1 #> beta[7] 5714 1 #> beta[8] 5518 1 #> beta[9] 5162 1 #> beta[10] 5171 1 #> beta[11] 5098 1 #> beta[12] 5552 1 #> beta[13] 5429 1 #> beta[14] 5107 1 #> beta[15] 4556 1 #> beta[16] 5506 1 #> beta[17] 5684 1 #> beta[18] 5159 1 #> beta[19] 5036 1 #> beta[20] 6193 1 #> beta[21] 6641 1 #> beta[22] 5890 1 #> beta[23] 6016 1 #> beta[24] 6260 1 #> beta[25] 4974 1 #> beta[26] 5722 1 #> beta[27] 5821 1 #> beta[28] 5740 1 #> beta[29] 5303 1 #> beta[30] 6428 1 #> mu_alpha 3585 1 #> mu_beta 4462 1 #> sigmasq_y 2366 1 #> sigmasq_alpha 3615 1 #> sigmasq_beta 3028 1 #> sigma_y 2370 1 #> sigma_alpha 3919 1 #> sigma_beta 2897 1 #> alpha0 4122 1 #> lp__ 1098 1 #> #> Samples were drawn using NUTS(diag_e) at Mon Aug 31 11:07:31 2020. #> For each parameter, n_eff is a crude measure of effective sample size, #> and Rhat is the potential scale reduction factor on split chains (at #> convergence, Rhat=1).
# }