library(tidyverse)
library(cowplot)
library(brms)
library(tidybayes)
library(patchwork)
library(here)
sppnames <- c( "afarensis","africanus","habilis","boisei",
"rudolfensis","ergaster","sapiens")
brainvolcc <- c( 438 , 452 , 612, 521, 752, 871, 1350 )
masskg <- c( 37.0 , 35.5 , 34.5 , 41.5 , 55.5 , 61.0 , 53.5 )
d <- data.frame( species=sppnames , brain=brainvolcc , mass=masskg )
rethinking::precis(d)
## mean sd 5.5% 94.5% histogram
## species NaN NA NA NA
## brain 713.7143 322.24769 442.62 1191.930 ▇▃▃▃▃▁▁▁▁▃
## mass 45.5000 10.90489 34.83 59.185 ▇▃▁▃▁▁▁▁▁▃▃▁▁▃
d <- d %>%
mutate(across(c(mass, brain),
rethinking::standardize,
.names = "{.col}_std"))
m1 <- brm(
data=d,
family=gaussian,
brain_std ~ mass_std,
prior = c( prior(normal(.5, 1), class=Intercept),
prior(normal(0, 10), class=b),
prior(exponential(1), class=sigma)),
iter=2000, warmup=1000, seed=5, chains=1,
file=here("files/models/41.1"))
posterior_summary(m1)
## Estimate Est.Error Q2.5 Q97.5
## b_Intercept 0.5290686 0.1033275 0.30007396 0.7083120
## b_mass_std 0.1689096 0.1103162 -0.06450775 0.3912348
## sigma 0.2506617 0.1162559 0.12134343 0.5376844
## Intercept 0.5290686 0.1033275 0.30007396 0.7083120
## lprior -4.3970827 0.1223031 -4.70870685 -4.2634509
## lp__ -5.1619229 1.6177241 -9.34957367 -3.2702676
m2 <- brm(
data=d,
family=gaussian,
brain_std ~ mass_std + I(mass_std^2) + I(mass_std^3) + I(mass_std^4) + I(mass_std^5),
prior = c( prior(normal(.5, 1), class=Intercept),
prior(normal(0, 10), class=b),
prior(exponential(1), class=sigma)),
iter=2000, warmup=1000, seed=5, chains=1,
file=here("files/models/41.2"))
posterior_summary(m2)
## Estimate Est.Error Q2.5 Q97.5
## b_Intercept 1.33862316 1.2783977 -1.7216173 3.6480030
## b_mass_std 5.15242364 2.7562205 -0.9873790 9.6166649
## b_Imass_stdE2 -2.20115118 3.4695255 -8.2777789 5.6728162
## b_Imass_stdE3 -7.71302916 5.7220566 -16.6542003 5.3668222
## b_Imass_stdE4 0.63293350 2.5803001 -4.5253165 5.8936511
## b_Imass_stdE5 2.73146232 2.8412008 -3.9354286 7.5118611
## sigma 0.69638151 0.4214775 0.2882512 1.7543063
## Intercept 0.03600954 0.2826595 -0.4724095 0.6651902
## lprior -18.69941199 0.7452101 -20.5710238 -17.7516872
## lp__ -25.18150004 3.6189201 -33.1389107 -19.0816838
post1 = as_draws_df(m1) # get posterior distribution
avg_line = colMeans(post1)
p1 <- d %>%
ggplot( aes(x=mass_std, y=brain_std) ) +
geom_point() +
geom_abline( aes(intercept=b_Intercept, slope=b_mass_std),
data=post1[1:20, ],
alpha=.2) +
geom_abline( intercept=avg_line["b_Intercept"], slope=avg_line["b_mass_std"],
color = "#1c5253") +
lims(y=c(-3,6)) +
labs( x="mass (standardized)",
y="brain size (standardized)",
title="m1")
nd = data.frame(mass_std = seq(min(d$mass_std), max(d$mass_std), length.out=1000))
post2 = add_epred_draws(m2, newdata = nd) # get predicted vals (expected means)
avg_line = post2 %>% with_groups(mass_std, summarise, m = mean(.epred))
p2 <- d %>%
ggplot( aes(x=mass_std, y=brain_std) ) +
geom_point() +
geom_line( aes(y=.epred, group=.draw),
data=filter(post2, .draw <= 20),
color = "black",
alpha=.2) +
geom_smooth( aes(y=m), data=avg_line, se=F, colour="#1c5253") +
lims(y=c(-3,6)) +
labs( x="mass (standardized)",
y="brain size (standardized)",
title="m2")
p1 + p2
## `geom_smooth()` using method = 'gam' and formula = 'y ~ s(x, bs = "cs")'
A benefit of Bayesian methods over frequentist is the ability to regularize your parameter coefficient estimates. This is a pretty extreme example of setting a skeptical prior, but it demonstrates the point. Of course, we don’t want our priors to be too skeptical, or we’ll miss important (and regular) information.
m2 <- brm(
data=d,
family=gaussian,
brain_std ~ mass_std + I(mass_std^2) + I(mass_std^3) +
I(mass_std^4) + I(mass_std^5),
prior = c( prior(normal(.5, 1), class=Intercept),
prior(normal(0, 10), class=b),
prior(exponential(1), class=sigma)),
iter=2000, warmup=1000, seed=5, chains=1,
file=here("files/models/41.2"))
m2r <- brm(
data=d,
family=gaussian,
brain_std ~ mass_std + I(mass_std^2) + I(mass_std^3) +
I(mass_std^4) + I(mass_std^5),
prior = c( prior(normal(.5, 1), class=Intercept),
prior(normal(0, 1), class=b),
prior(exponential(1), class=sigma)),
iter=2000, warmup=1000, seed=5, chains=1,
file=here("files/models/41.2r"))
nd = data.frame(mass_std = seq(min(d$mass_std), max(d$mass_std), length.out=1000))
post2 = add_epred_draws(m2, newdata = nd) # get predicted vals (expected means)
avg_line = post2 %>% with_groups(mass_std, summarise, m = mean(.epred))
p2 <- d %>%
ggplot( aes(x=mass_std, y=brain_std) ) +
geom_point() +
geom_line( aes(y=.epred, group=.draw),
data=filter(post2, .draw <= 20),
color = "black",
alpha=.2) +
geom_smooth( aes(y=m), data=avg_line, se=F, colour="#1c5253") +
lims(y=c(-3,6)) +
labs( x="mass (standardized)",
y="brain size (standardized)",
title="m2")
post2r = add_epred_draws(m2r, newdata = nd) # get predicted vals (expected means)
avg_liner = post2r %>% with_groups(mass_std, summarise, m = mean(.epred))
p2r <- d %>%
ggplot( aes(x=mass_std, y=brain_std) ) +
geom_point() +
geom_line( aes(y=.epred, group=.draw),
data=filter(post2r, .draw <= 20),
color = "black",
alpha=.2) +
geom_smooth( aes(y=m), data=avg_liner, se=F, colour="#1c5253") +
lims(y=c(-3,6)) +
labs( x="mass (standardized)",
y="brain size (standardized)",
title="m2 - regularized")
p2 + p2r
## `geom_smooth()` using method = 'gam' and formula = 'y ~ s(x, bs = "cs")'
## `geom_smooth()` using method = 'gam' and formula = 'y ~ s(x, bs = "cs")'
Another option is to compare the out-of-sample prediction of models using cross validation. This will not change the estimates of your coefficients, but it will help you compare models. For prediction only!
The loo package gives us access to the loo
function, which stands for Leave-One-Out. Behind the scenes, this
function uses PSIS, which doesn’t actually compute the cross-validation,
but rather estimates it.
library(loo)
## This is loo version 2.8.0
## - Online documentation and vignettes at mc-stan.org/loo
## - As of v2.0.0 loo defaults to 1 core but we recommend using as many as possible. Use the 'cores' argument or set options(mc.cores = NUM_CORES) for an entire session.
loo(m1)
##
## Computed from 1000 by 7 log-likelihood matrix.
##
## Estimate SE
## elpd_loo -1.2 2.3
## p_loo 2.9 1.3
## looic 2.5 4.7
## ------
## MCSE of elpd_loo is NA.
## MCSE and ESS estimates assume MCMC draws (r_eff in [0.3, 0.6]).
##
## Pareto k diagnostic values:
## Count Pct. Min. ESS
## (-Inf, 0.67] (good) 6 85.7% 201
## (0.67, 1] (bad) 1 14.3% <NA>
## (1, Inf) (very bad) 0 0.0% <NA>
## See help('pareto-k-diagnostic') for details.
Pointwise value of PSIS is reliable when k is less than .5, ok when k is between .5 and 7., bad when k is between .7 and 1, very bad when k is greater than 1.
elpd_loo: log of out-of-sample accuracy. higher is
better. p_loo: estimate of model complexity; bigger = more
complex looic -2 * elpd_loo or deviance. lower is
better
Another option is the Widely Acceptable Information Criteria.
m1_waic = waic(m1)
m1_waic
##
## Computed from 1000 by 7 log-likelihood matrix.
##
## Estimate SE
## elpd_waic -0.8 2.0
## p_waic 2.4 1.0
## waic 1.6 3.9
##
## 1 (14.3%) p_waic estimates greater than 0.4. We recommend trying loo instead.
m1_waic$pointwise
## elpd_waic p_waic waic
## [1,] 0.24961896 0.1811117 -0.4992379
## [2,] 0.27164921 0.1844870 -0.5432984
## [3,] 0.14890179 0.2109195 -0.2978036
## [4,] 0.29267886 0.1513239 -0.5853577
## [5,] 0.08540521 0.2116338 -0.1708104
## [6,] -0.08791516 0.3422239 0.1758303
## [7,] -1.77251166 1.1547520 3.5450233
elpd_loo: log of out-of-sample accuracy. higher is
better. p_loo: estimate of model complexity; bigger = more
complex looic -2 * elpd_loo or deviance. lower is
better
m1 <- add_criterion(m1, criterion = c("loo", "waic"))
m2 <- add_criterion(m2, criterion = c("loo", "waic"))
m2r <- add_criterion(m2r, criterion = c("loo", "waic"))
loo_compare( m1, m2, m2r, criterion="loo")
## elpd_diff se_diff
## m1 0.0 0.0
## m2 -8.1 2.0
## m2r -10.4 0.8
loo_compare( m1, m2, m2r, criterion="waic")
## elpd_diff se_diff
## m1 0.0 0.0
## m2 -7.8 1.8
## m2r -10.2 0.7
The filled points are the in-sample deviance values. The open points are the WAIC values. Notice that naturally each model does better in-sample than it is expected to do out-of-sample. The line segments show the standard error of each WAIC. These are the values in the column labeled SE in the table above.
A central tension in our modeling is the one between explanation – good causal models – and prediction. In McElreath’s lecture, he leads us to the intuition that predictive models are generally those that do a terrible job of representing the causal model. So the tools covered in this lecture should be considered tools for prediction, but not for identifying causal models.
When trying to maximize prediction, we need to be wary of OVERFITTING – when the model learns too much from the sample. Methods for avoiding overfitting favor simpler models. However, we must also be wary of UNDERFITTING or learning too little.
There are two common famililes of approches:
Use of a REGULARIZING prior, which helps stop the model from becoming too excited about any one data point.
Use of a scoring device, like INFORMATION CRITERIA and CROSS-VALIDATION, to estimate predictive accuracy.
base = d %>%
ggplot(aes(x=masskg, y=brainvolcc)) +
geom_point() +
geom_text(aes(label=sppnames), hjust=0, nudge_x = 1) +
labs(x="body mass (kg)", y="brain volume (cc)")
p1 = base + geom_smooth(method='lm', se =F) +ggtitle("Simple linear model")
p2 = base + geom_smooth(method='lm', se =F, formula=y~poly(x, 6)) +ggtitle("6th degree polynomial")
(p1 | p2)
## `geom_smooth()` using formula = 'y ~ x'
If your goal is simply prediction, not (causal) explanation, why not simply add as many variables into the model as possible?
models = data compression when there are as many parameters as data points, we haven’t compressed the data. we’ve just encoded the raw data into a different form.
# Set up plotting area
plot(0, 0, type = "n", xlim = c(-3, 3), ylim = c(0, 2),
xlab = "parameter value", ylab = "Density",
main = "")
# Create x-values for plotting
x <- seq(-3, 3, length.out = 1000)
# Generate three density curves with different spreads
# Thick curve - very peaked (high kurtosis)
y1 <- dnorm(x, mean = 0, sd = 0.3)
# Scale to match the peak height in the original
y1 <- y1 * (2/max(y1))
# Medium curve - moderate spread
y2 <- dnorm(x, mean = 0, sd = 0.6)
# Scale to match the peak height in the original
y2 <- y2 * (0.8/max(y2))
# Dashed curve - most spread (normal distribution)
y3 <- dnorm(x, mean = 0, sd = 1)
# Scale to match the peak height in the original
y3 <- y3 * (0.4/max(y3))
# Add the curves to the plot
lines(x, y1, lwd = 3)
lines(x, y2, lwd = 1)
lines(x, y3, lwd = 1, lty = 2)
One tool in our toolbelt is regularizing priors. REGULARIZATION is a means by which you prevent the model from being “too excited” by the training sample, or to fit too closely to the specific patterns in that sample. There are many tools for regularization (ridge regression, lasso regression), but they all have the effect of downweighting regression parameters towards 0.
As a Bayesian, you also have the tool of regularizing priors. As your priors become more “skeptical” (usually, closer to 0), your model will adapt less to the data. Be wary of having priors that are too tight, because you risk underfitting. (Of course, the more data you have, the less influence your priors have. But that shouldn’t concern you, because overfitting is less of a concern with larger datasets.)
nd = data.frame(mass_std = seq(min(d$mass_std), max(d$mass_std), length.out=100))
pred_2 = add_epred_draws(nd, m2) %>% filter(.draw <= 20)
pred_2r = add_epred_draws(nd, m2r) %>% filter(.draw <= 20)
p1 = pred_2 %>% ggplot(aes(x = mass_std, y=.epred)) +
geom_line(aes(group=.draw), alpha=.3) +
geom_point(aes(y=brain_std), data=d, color = "#1c5253") +
lims(y=c(-2,4)) +
labs(y="Brain size (standardized)")
p2 = pred_2r %>% ggplot(aes(x = mass_std, y=.epred)) +
geom_line(aes(group=.draw), alpha=.3) +
geom_point(aes(y=brain_std), data=d, color = "#1c5253") +
lims(y=c(-2,4)) +
labs(y="Brain size (standardized)")
p1 + p2
## Warning: Removed 5 rows containing missing values or values outside the scale range
## (`geom_line()`).
## Warning: Removed 1 row containing missing values or values outside the scale range
## (`geom_line()`).
First: establishing a measurement scale. The two major dimensions to consider are:
cost-benefit analysis
how much does it cost when we are wrong?
how much do we win when we’re right?
accuracy in context
| Day | Current Weatherman | New Weatherman | Outcome |
|---|---|---|---|
| 1 | 1.0 | 0.0 | rain |
| 2 | 1.0 | 0.0 | rain |
| 3 | 1.0 | 0.0 | rain |
| 4 | 0.6 | 0.0 | sun |
| 5 | 0.6 | 0.0 | sun |
| 6 | 0.6 | 0.0 | sun |
| 7 | 0.6 | 0.0 | sun |
| 8 | 0.6 | 0.0 | sun |
| 9 | 0.6 | 0.0 | sun |
| 10 | 0.6 | 0.0 | sun |
If accuracy is the chance of a correct prediction:
Current=[(3×1)+(.4×7)]/10=.58
New=[(3×0)+(1×7)]/10=.70
| Day | Current Weatherman | New Weatherman | Outcome |
|---|---|---|---|
| 1 | 1.0 | 0.0 | rain |
| 2 | 1.0 | 0.0 | rain |
| 3 | 1.0 | 0.0 | rain |
| 4 | 0.6 | 0.0 | sun |
| 5 | 0.6 | 0.0 | sun |
| 6 | 0.6 | 0.0 | sun |
| 7 | 0.6 | 0.0 | sun |
| 8 | 0.6 | 0.0 | sun |
| 9 | 0.6 | 0.0 | sun |
| 10 | 0.6 | 0.0 | sun |
Finally, our previous measure of “accuracy” (we used a HIT RATE definition) is only one way to think about accuracy. What if accuracy is knowing the true data generating model? We might consider computing the probability of predicting the exact sequence of days (joint likelihood in Bayesian terms).
Current=(1)3×(0.4)7≈.005
New=(0)3×(1)7=0
How do we measure distance from perfect prediction? One important thing to keep in mind is that some targets are easier to hit than others. Therefore, the key to measuring distance is to ask, “How much is our uncertainty reduced by learning an outcome?” This reduction is formally referred to as INFORMATION.
We need to formalize our measure of uncertainty. This measurement should:
This is satisfied by the INFORMATION ENTROPY FUNCTION. If there are n different possible events and each event i has probability pi, and we call the list of probabilities p, then the unique measure of uncertainty we seek is1:
H(p)=−E log(pi)=−n∑i=1pilog(pi)
In words: The uncertainty contained in a probability distribution is the average log-probability of an event.
The log of numbers between 0 and 1 are negative.
## Warning: The `<scale>` argument of `guides()` cannot be `FALSE`. Use "none" instead as
## of ggplot2 3.3.4.
## This warning is displayed once every 8 hours.
## Call `lifecycle::last_lifecycle_warnings()` to see where this warning was
## generated.
Consider the weather forecast. When the day arrives, the weather is no longer uncertain. There were 3 rainy days and 3 sunny days. Therefore:
p <- c( .3, .7 )
-sum( p * log(p) )
## [1] 0.6108643
This is our uncertainty.
If we lived in Abu Dhabi, we might have a different probability of rain (let’s say .01), and therefore a different amount of uncertainty.
p <- c( .01, .99 )
-sum( p * log(p) )
## [1] 0.05600153
We define DIVERGENCE to be the additional uncertainty induced by using probabilities from one distribution to describe another distribution. This is known as the Kullback-Leibler divergence or simply KL divergence.
The divergence between a target (p) and a model (q) is defined as:
DKL(p,q)=∑ipi(log(pi)−log(qi))=∑ipilog(piqi)
In plainer language, the divergence is the average difference in log probability between the target and model, or the difference in entropies.
Suppose, for example, the truth is that it rains 30% of the time. If our weatherman believes that it rains 25% of the time, how much additional uncertainty is introduced as a consequence of using the weatherman’s prediction to approximate the true weather?
DKL(p,q)=∑ipi(log(pi)−log(qi))=∑ipilog(piqi)
p = c(.3, .7)
q = c(.25, .75)
sum(p*log(p/q))
## [1] 0.006401457
What is the divergence when we get the model exactly correct with the target?
difference = 0
At this point, you’re probably thinking, “How can I put this into practice? Divergence is measuring distance from truth, but I don’t know the truth.” But we won’t use divergence to estimate the difference from one model to truth; we’re only interested in using it to compare two models (q and r) to each other. All we need to know are each model’s average log probability: E log(qi) and E log(ri).
DKL(p,q)−DKL(p,r)=∑ipi(log(pi)−log(qi))−∑ipi(log(pi)−log(ri))=∑ipilog(pi)−∑ipilog(qi)−∑ipilog(pi)+∑ipilog(ri)=∑ipilog(pi)−∑ipilog(pi)−∑ipilog(qi)+∑ipilog(ri)=∑ipilog(ri)−∑ipilog(qi)=E[log(ri)]−E[log(qi)]
To do so, we simply sum over all the observations, i, yielding a total score for each model:
S(q)=∑ilog(qi)
To compute this score for a Bayesian model, we need to find the log of the average probability for each observation i, where the average is taken over the posterior distribution. The score is known as the LOG-POINTWISE-PREDICTIVE-DENSITY.
Let’s see an example: First, we get the log-probabilities:
log_prob = log_lik(m1)
log_prob %>% round(3)
## [,1] [,2] [,3] [,4] [,5] [,6] [,7]
## [1,] 0.072 0.220 0.600 0.064 -0.071 -0.009 0.092
## [2,] 0.165 0.147 -0.029 0.160 -0.063 -0.185 -0.143
## [3,] -0.082 -0.050 0.019 -0.079 -0.092 -0.073 -0.243
## [4,] 0.516 0.477 0.116 0.513 0.137 -0.066 -0.204
## [5,] 0.261 0.241 0.042 0.262 0.269 0.266 -1.011
## [6,] 0.681 0.708 0.593 0.704 0.719 0.693 -2.195
## [7,] 0.144 0.255 0.449 0.056 -0.391 -0.495 0.272
## [8,] -0.018 0.055 0.188 -0.077 -0.372 -0.444 0.112
## [9,] -0.154 -0.139 -0.069 -0.114 -0.064 -0.072 -0.592
## [10,] 0.368 0.404 0.372 0.348 0.221 0.210 -0.283
## [11,] 0.244 0.263 0.253 0.260 0.278 0.279 -0.786
## [12,] 0.050 0.055 0.040 0.062 0.008 -0.074 -1.100
## [13,] 0.029 0.030 -0.029 0.029 0.001 -0.040 -0.980
## [14,] 0.807 0.686 -0.146 0.846 0.208 -0.188 -0.700
## [15,] 0.833 0.748 0.025 0.845 0.315 0.026 -0.844
## [16,] 0.327 0.396 0.573 0.388 0.490 0.546 -0.774
## [17,] 1.032 1.112 0.635 0.894 -0.195 -0.565 -1.072
## [18,] 0.417 0.394 0.141 0.420 0.416 0.417 -1.023
## [19,] 0.493 0.518 0.414 0.483 0.407 0.412 -0.639
## [20,] 0.245 0.365 0.643 0.253 0.191 0.259 -0.190
## [21,] 0.072 0.098 0.148 0.092 0.122 0.141 -0.471
## [22,] -0.127 -0.110 -0.015 -0.072 -0.003 -0.014 -0.595
## [23,] 0.403 0.396 0.216 0.401 0.395 0.376 -1.361
## [24,] -0.145 -0.112 0.226 0.077 0.381 0.356 -0.775
## [25,] 1.018 0.964 0.186 1.016 0.757 0.676 -2.400
## [26,] 0.243 0.315 0.489 0.272 0.303 0.362 -0.348
## [27,] 1.041 1.081 0.589 0.999 0.639 0.585 -2.270
## [28,] 1.041 1.081 0.589 0.999 0.639 0.585 -2.270
## [29,] 0.646 0.755 0.858 0.672 0.669 0.746 -1.515
## [30,] 1.131 1.146 0.431 1.068 0.425 0.238 -2.268
## [31,] 1.131 1.146 0.431 1.068 0.425 0.238 -2.268
## [32,] 1.131 1.146 0.431 1.068 0.425 0.238 -2.268
## [33,] 0.307 0.352 0.434 0.339 0.388 0.421 -0.657
## [34,] 1.012 1.078 0.736 0.998 0.854 0.890 -2.923
## [35,] 0.587 0.669 0.716 0.590 0.533 0.581 -0.939
## [36,] 0.404 0.468 0.600 0.456 0.539 0.585 -0.943
## [37,] -0.331 -0.298 -0.307 -0.466 -1.542 -2.156 -0.449
## [38,] -0.443 -0.399 -0.315 -0.498 -0.760 -0.851 -0.299
## [39,] -0.274 -0.256 -0.006 -0.103 0.146 0.140 -0.515
## [40,] -0.159 -0.086 0.211 -0.077 0.082 0.174 -0.081
## [41,] 0.756 0.471 -0.964 0.943 0.219 -0.405 -1.175
## [42,] 0.929 1.006 0.809 0.937 0.892 0.947 -2.829
## [43,] -0.193 -0.181 -0.189 -0.204 -0.268 -0.282 -0.341
## [44,] -1.020 -1.143 -0.666 -0.315 0.139 -0.438 -1.350
## [45,] -0.231 -0.225 -0.217 -0.219 -0.216 -0.234 -0.716
## [46,] -0.481 -0.481 -0.516 -0.509 -0.715 -0.875 -1.404
## [47,] 0.097 0.180 0.477 0.190 0.359 0.447 -0.418
## [48,] -0.340 -0.316 -0.259 -0.345 -0.379 -0.373 -0.343
## [49,] -0.242 -0.247 -0.319 -0.254 -0.442 -0.547 -0.306
## [50,] -0.067 -0.041 -0.047 -0.111 -0.397 -0.514 -0.088
## [51,] 0.210 0.208 0.067 0.188 -0.090 -0.223 -0.070
## [52,] 0.302 0.293 0.099 0.269 -0.209 -0.461 0.070
## [53,] 0.475 0.447 0.142 0.469 0.178 0.032 -0.252
## [54,] 0.440 0.505 0.583 0.453 0.443 0.488 -0.644
## [55,] 0.055 -0.108 -0.717 0.218 0.195 0.007 -0.464
## [56,] -0.225 -0.280 -0.494 -0.174 -0.233 -0.326 -0.394
## [57,] -0.370 -0.830 -2.028 0.216 -0.651 -1.973 0.225
## [58,] -0.044 -0.011 0.013 -0.081 -0.286 -0.349 -0.088
## [59,] 0.660 0.900 1.187 0.699 0.639 0.799 -3.104
## [60,] 0.439 0.380 -0.014 0.463 0.232 0.084 -0.358
## [61,] 0.461 0.525 0.438 0.316 -0.736 -1.213 0.391
## [62,] 0.461 0.525 0.438 0.316 -0.736 -1.213 0.391
## [63,] 0.590 0.610 0.299 0.387 -1.790 -3.011 0.596
## [64,] 0.529 0.552 0.325 0.404 -0.815 -1.443 0.406
## [65,] 0.356 0.348 0.110 0.262 -0.980 -1.703 0.337
## [66,] 0.365 0.200 -0.454 0.449 -1.102 -2.280 0.403
## [67,] 0.161 0.156 0.171 0.164 -0.516 -1.166 -2.779
## [68,] -0.066 -0.014 0.027 -0.161 -0.724 -0.969 0.028
## [69,] -0.238 -0.203 -0.155 -0.277 -0.486 -0.555 -0.182
## [70,] -0.110 -0.224 -0.571 -0.047 -1.398 -2.464 -0.087
## [71,] -0.722 -0.722 -0.739 -0.741 -0.900 -1.024 -1.341
## [72,] -0.168 -0.197 -0.487 -0.260 -0.583 -0.812 -2.678
## [73,] -0.222 -0.100 0.510 0.033 0.506 0.660 -0.688
## [74,] 0.902 0.876 0.301 0.871 0.351 0.139 -1.005
## [75,] 0.349 0.468 0.876 0.533 0.825 0.887 -2.045
## [76,] 0.403 0.307 -0.197 0.463 0.216 0.011 -0.350
## [77,] 1.150 0.972 -0.524 1.206 0.942 0.805 -4.348
## [78,] 0.256 0.423 0.844 0.312 0.359 0.490 -0.684
## [79,] 0.256 0.423 0.844 0.312 0.359 0.490 -0.684
## [80,] 0.681 0.689 0.501 0.689 0.643 0.551 -2.677
## [81,] 0.714 0.713 0.457 0.713 0.662 0.571 -2.887
## [82,] 0.022 -0.026 -0.313 0.029 0.070 0.063 -1.319
## [83,] 0.218 0.139 -0.249 0.275 0.143 0.007 -0.343
## [84,] 0.326 0.318 0.134 0.317 0.150 0.082 -0.274
## [85,] -0.085 -0.056 0.181 0.061 0.262 0.249 -0.653
## [86,] 0.436 0.445 0.346 0.445 0.436 0.401 -1.444
## [87,] 0.564 0.634 0.608 0.521 0.253 0.217 -0.356
## [88,] 0.970 0.822 -0.293 1.024 0.699 0.506 -2.343
## [89,] 0.970 0.822 -0.293 1.024 0.699 0.506 -2.343
## [90,] 0.381 0.478 0.705 0.437 0.522 0.604 -0.823
## [91,] 0.693 0.659 0.226 0.693 0.505 0.432 -0.943
## [92,] 0.374 0.527 0.773 0.307 -0.069 -0.090 -0.058
## [93,] 0.351 0.387 0.332 0.307 0.020 -0.069 -0.041
## [94,] 0.094 0.171 0.291 0.022 -0.349 -0.450 0.170
## [95,] 0.469 0.337 -0.350 0.558 0.482 0.357 -0.947
## [96,] 0.271 0.337 0.443 0.265 0.192 0.218 -0.197
## [97,] 0.779 0.812 0.565 0.753 0.548 0.525 -1.125
## [98,] 0.653 0.750 0.801 0.661 0.606 0.665 -1.264
## [99,] 0.776 0.813 0.584 0.750 0.553 0.536 -1.137
## [100,] 0.377 0.296 -0.165 0.424 0.393 0.336 -0.798
## [101,] 0.681 0.713 0.491 0.626 0.184 0.039 -0.338
## [102,] 0.680 0.710 0.491 0.631 0.243 0.124 -0.413
## [103,] 0.400 0.492 0.707 0.468 0.576 0.651 -0.983
## [104,] 0.432 0.458 0.401 0.435 0.423 0.442 -0.771
## [105,] 0.597 0.672 0.706 0.607 0.580 0.630 -1.064
## [106,] 0.099 0.173 0.459 0.200 0.384 0.461 -0.490
## [107,] 0.963 1.059 0.695 0.815 -0.283 -0.655 -0.747
## [108,] 0.608 0.663 0.558 0.554 0.197 0.110 -0.299
## [109,] 0.144 0.084 -0.217 0.183 0.023 -0.113 -0.250
## [110,] 0.203 0.212 0.122 0.186 0.039 -0.009 -0.203
## [111,] 0.449 0.498 0.614 0.526 0.617 0.598 -1.531
## [112,] 0.273 0.433 0.753 0.231 -0.042 -0.019 -0.047
## [113,] 0.404 0.383 0.143 0.407 0.381 0.376 -0.822
## [114,] 0.882 0.717 -0.336 0.925 -0.338 -1.148 -0.336
## [115,] -0.007 -0.003 0.329 0.276 0.376 0.053 -1.809
## [116,] 0.061 0.047 -0.093 0.064 0.068 0.068 -0.710
## [117,] 0.759 0.662 -0.056 0.796 0.549 0.399 -1.218
## [118,] 0.232 0.264 0.214 0.175 -0.230 -0.393 0.082
## [119,] -0.075 -0.032 0.052 -0.091 -0.183 -0.185 -0.109
## [120,] 0.519 0.256 -0.894 0.727 0.195 -0.358 -0.606
## [121,] 0.356 0.397 0.433 0.373 0.390 0.419 -0.651
## [122,] -0.530 -0.377 0.348 -0.288 0.198 0.427 -0.142
## [123,] 0.091 0.289 0.867 0.195 0.350 0.529 -0.784
## [124,] 0.749 0.734 0.325 0.728 0.390 0.261 -0.734
## [125,] 0.700 0.673 0.223 0.626 -0.531 -1.158 0.239
## [126,] 0.175 0.102 -0.278 0.220 0.274 0.262 -0.890
## [127,] 0.243 0.132 -0.398 0.325 0.369 0.315 -0.870
## [128,] 0.429 0.443 0.308 0.410 0.250 0.210 -0.344
## [129,] 0.589 0.588 0.344 0.583 0.502 0.494 -0.913
## [130,] -0.166 -0.027 0.528 -0.005 0.304 0.475 -0.309
## [131,] -0.651 -0.482 0.378 -0.326 0.318 0.578 -0.390
## [132,] 0.430 0.434 0.315 0.433 0.385 0.308 -1.729
## [133,] 0.717 0.758 0.505 0.623 -0.157 -0.470 -0.045
## [134,] 0.814 0.812 0.445 0.809 0.743 0.750 -1.836
## [135,] 0.548 0.654 0.794 0.570 0.557 0.629 -1.043
## [136,] -0.151 -0.129 -0.075 -0.139 -0.120 -0.101 -0.375
## [137,] 0.083 0.130 0.234 0.094 0.092 0.124 -0.213
## [138,] 0.719 0.757 0.597 0.714 0.647 0.670 -1.348
## [139,] 0.235 0.241 0.176 0.241 0.226 0.189 -1.104
## [140,] 0.420 0.379 0.055 0.431 0.250 0.149 -0.395
## [141,] 0.135 0.193 0.543 0.339 0.603 0.568 -1.346
## [142,] 0.653 0.397 -0.776 0.820 -0.312 -1.221 -0.152
## [143,] 0.154 0.207 0.595 0.404 0.663 0.546 -1.842
## [144,] 0.721 0.805 0.678 0.624 -0.013 -0.198 -0.277
## [145,] 0.798 1.023 1.073 0.693 0.043 0.005 -1.853
## [146,] 0.951 0.933 0.357 0.933 0.638 0.555 -1.759
## [147,] 0.324 0.425 0.541 0.237 -0.234 -0.352 0.194
## [148,] -0.085 -0.152 -0.409 -0.038 -0.482 -0.842 -0.090
## [149,] -0.185 -0.235 -0.435 -0.145 -0.331 -0.498 -0.268
## [150,] 0.250 0.211 -0.052 0.251 -0.164 -0.423 0.004
## [151,] 0.592 0.589 0.265 0.494 -0.702 -1.344 0.364
## [152,] -0.399 -0.413 -0.155 -0.148 0.084 -0.059 -0.788
## [153,] -0.494 -0.494 -0.065 -0.129 0.337 0.253 -0.755
## [154,] 0.249 0.025 -0.946 0.436 0.594 0.500 -1.495
## [155,] -0.264 -0.264 -0.186 -0.196 -0.214 -0.334 -0.919
## [156,] 0.092 0.097 0.148 0.143 -0.003 -0.228 -1.499
## [157,] 0.296 0.274 0.015 0.246 0.034 -0.152 -2.509
## [158,] 0.293 0.322 0.422 0.366 0.413 0.345 -1.291
## [159,] -0.052 -0.037 0.038 -0.003 0.040 0.009 -0.715
## [160,] 0.133 0.151 0.362 0.291 0.355 0.174 -1.464
## [161,] 0.646 0.674 0.452 0.583 0.059 -0.139 -0.154
## [162,] 0.518 0.635 0.784 0.506 0.367 0.415 -0.644
## [163,] 0.657 0.666 0.421 0.642 0.504 0.484 -0.886
## [164,] -0.063 -0.008 0.069 -0.129 -0.479 -0.598 0.033
## [165,] 0.105 0.239 0.442 -0.067 -0.954 -1.267 0.414
## [166,] 0.246 0.196 -0.103 0.270 0.073 -0.066 -0.230
## [167,] 1.064 1.043 0.269 0.983 -0.121 -0.611 -0.993
## [168,] 0.251 0.329 0.595 0.355 0.532 0.595 -0.865
## [169,] 0.995 1.069 0.645 0.876 -0.042 -0.337 -1.046
## [170,] 0.749 0.804 0.650 0.724 0.543 0.542 -1.100
## [171,] 0.611 0.708 0.787 0.619 0.569 0.629 -1.110
## [172,] 0.610 0.698 0.675 0.544 0.145 0.068 -0.299
## [173,] 0.393 0.428 0.374 0.368 0.205 0.178 -0.253
## [174,] 0.342 0.451 0.605 0.281 -0.055 -0.100 0.057
## [175,] 0.635 0.766 0.897 0.640 0.551 0.625 -1.310
## [176,] -0.393 -0.128 0.914 -0.037 0.625 0.918 -1.979
## [177,] 0.161 0.106 -0.189 0.196 0.079 -0.023 -0.322
## [178,] 0.095 0.000 -0.382 0.172 -0.105 -0.371 -0.119
## [179,] 0.551 0.576 0.492 0.566 0.582 0.585 -1.412
## [180,] 0.441 0.516 0.610 0.445 0.402 0.446 -0.546
## [181,] 0.484 0.534 0.515 0.468 0.355 0.363 -0.474
## [182,] 0.887 0.937 0.666 0.864 0.685 0.691 -1.788
## [183,] 0.592 0.746 0.979 0.638 0.661 0.774 -1.761
## [184,] 0.830 0.843 0.520 0.823 0.748 0.761 -1.848
## [185,] 0.341 0.471 0.799 0.413 0.521 0.634 -0.901
## [186,] 0.177 0.208 0.267 0.202 0.242 0.263 -0.551
## [187,] 0.401 0.400 0.232 0.396 0.340 0.334 -0.624
## [188,] 0.244 0.279 0.344 0.277 0.327 0.346 -0.673
## [189,] 0.829 0.777 0.178 0.821 0.359 0.140 -0.856
## [190,] 0.829 0.777 0.178 0.821 0.359 0.140 -0.856
## [191,] 0.297 0.286 0.113 0.295 0.233 0.215 -0.499
## [192,] 0.041 0.054 0.257 0.197 0.273 0.112 -1.232
## [193,] -0.094 -0.094 -0.111 -0.103 -0.375 -0.631 -1.597
## [194,] -0.236 -0.356 -0.769 -0.104 0.002 -0.073 -0.570
## [195,] -0.100 -0.211 -0.635 0.010 0.096 0.035 -0.614
## [196,] 0.491 0.593 0.673 0.429 0.079 0.030 -0.142
## [197,] 0.377 0.447 0.782 0.591 0.789 0.663 -2.566
## [198,] 0.188 0.229 0.245 0.155 -0.034 -0.076 -0.055
## [199,] -0.058 -0.055 -0.090 -0.055 -0.054 -0.058 -0.677
## [200,] -0.747 -0.780 -0.249 -0.215 0.386 0.185 -0.977
## [201,] 0.070 0.087 0.054 0.052 -0.067 -0.098 -0.197
## [202,] 0.335 0.267 -0.114 0.370 0.122 -0.059 -0.234
## [203,] 0.353 0.302 -0.032 0.372 0.133 -0.021 -0.240
## [204,] 0.118 0.143 0.275 0.200 0.281 0.237 -0.905
## [205,] 0.761 0.746 0.294 0.687 -0.318 -0.824 0.046
## [206,] 0.573 0.676 0.669 0.457 -0.239 -0.452 0.081
## [207,] 0.567 0.599 0.612 0.625 0.619 0.508 -2.286
## [208,] 0.356 0.239 -0.318 0.440 0.245 0.043 -0.415
## [209,] 0.491 0.380 -0.201 0.545 -0.119 -0.596 0.013
## [210,] 0.289 0.319 0.403 0.349 0.403 0.367 -1.123
## [211,] 0.537 0.514 0.192 0.495 0.301 0.107 -3.155
## [212,] 0.552 0.527 0.189 0.508 0.305 0.103 -3.292
## [213,] 0.526 0.525 0.364 0.511 0.302 0.071 -2.802
## [214,] -0.263 -0.220 0.151 -0.046 0.315 0.363 -0.468
## [215,] 0.352 0.211 -0.410 0.460 0.056 -0.316 -0.156
## [216,] 0.076 0.126 0.560 0.367 0.663 0.522 -1.875
## [217,] 0.295 0.345 0.565 0.424 0.574 0.532 -1.407
## [218,] 0.127 0.153 0.211 0.156 0.201 0.212 -0.594
## [219,] 0.754 0.821 0.683 0.716 0.461 0.441 -0.969
## [220,] 0.956 0.977 0.534 0.926 0.647 0.603 -1.834
## [221,] 0.768 0.809 0.636 0.769 0.728 0.758 -1.705
## [222,] 0.960 1.037 0.796 0.959 0.877 0.928 -2.869
## [223,] 0.485 0.522 0.490 0.494 0.491 0.517 -0.883
## [224,] 0.529 0.543 0.391 0.523 0.472 0.479 -0.844
## [225,] -0.184 -0.115 0.132 -0.141 -0.071 0.000 -0.015
## [226,] 0.178 0.242 0.356 0.160 0.045 0.054 -0.056
## [227,] -0.047 -0.066 -0.200 -0.041 -0.101 -0.138 -0.384
## [228,] -0.147 -0.010 0.489 -0.039 0.158 0.309 -0.091
## [229,] 0.542 0.583 0.561 0.562 0.585 0.610 -1.198
## [230,] 0.078 0.182 0.616 0.252 0.560 0.662 -0.869
## [231,] 0.437 0.240 -0.627 0.593 0.265 -0.101 -0.513
## [232,] 0.417 0.258 -0.457 0.536 0.171 -0.182 -0.317
## [233,] 0.678 0.750 0.927 0.840 0.901 0.694 -4.115
## [234,] 0.379 0.433 0.538 0.421 0.488 0.527 -0.843
## [235,] 0.947 0.973 0.633 0.956 0.955 0.973 -3.258
## [236,] -0.314 -0.300 -0.223 -0.273 -0.211 -0.207 -0.534
## [237,] -0.184 -0.161 -0.019 -0.110 0.011 0.026 -0.451
## [238,] 0.988 0.953 0.295 0.984 0.821 0.791 -2.490
## [239,] 0.091 0.135 0.328 0.180 0.325 0.351 -0.605
## [240,] -0.586 -0.364 0.354 -0.569 -0.673 -0.550 0.452
## [241,] 0.193 0.224 0.175 0.129 -0.335 -0.535 0.106
## [242,] 0.216 0.293 0.377 0.135 -0.299 -0.428 0.192
## [243,] 1.041 0.838 -0.761 1.081 1.202 1.200 -7.108
## [244,] 1.093 1.245 0.772 0.908 -0.393 -0.767 -1.850
## [245,] 0.578 0.624 0.604 0.601 0.628 0.656 -1.328
## [246,] 0.538 0.641 0.813 0.589 0.654 0.735 -1.324
## [247,] 0.834 0.789 0.200 0.808 0.135 -0.196 -0.531
## [248,] 0.655 0.691 0.502 0.605 0.226 0.115 -0.365
## [249,] 0.511 0.499 0.248 0.507 0.417 0.394 -0.720
## [250,] -0.082 -0.028 -0.006 -0.222 -1.155 -1.624 -0.022
## [251,] -0.514 -0.527 -0.416 -0.392 -0.335 -0.458 -0.825
## [252,] -0.781 -0.805 -0.743 -0.677 -0.745 -0.949 -1.116
## [253,] -0.235 -0.178 -0.004 -0.232 -0.262 -0.231 -0.051
## [254,] -0.247 -0.148 0.108 -0.297 -0.545 -0.575 0.165
## [255,] -0.351 -0.293 -0.129 -0.367 -0.463 -0.460 -0.088
## [256,] -0.056 0.023 0.215 -0.083 -0.234 -0.234 0.096
## [257,] 0.217 0.074 -0.458 0.308 -0.830 -1.733 0.264
## [258,] 0.201 0.098 -0.322 0.250 -0.720 -1.447 0.207
## [259,] -0.024 -0.059 -0.244 -0.018 -0.390 -0.643 -0.093
## [260,] 0.346 0.415 0.547 0.371 0.394 0.447 -0.527
## [261,] 0.667 0.672 0.404 0.652 0.496 0.466 -0.870
## [262,] 0.778 0.911 0.875 0.705 0.253 0.209 -1.056
## [263,] 0.778 0.911 0.875 0.705 0.253 0.209 -1.056
## [264,] 0.737 0.809 0.649 0.655 0.103 -0.052 -0.397
## [265,] 0.558 0.505 0.072 0.575 0.551 0.535 -1.223
## [266,] 0.795 0.827 0.576 0.775 0.619 0.615 -1.337
## [267,] -0.170 -0.132 0.028 -0.116 -0.013 0.034 -0.259
## [268,] 0.537 0.565 0.484 0.545 0.547 0.565 -1.116
## [269,] 0.279 0.336 0.467 0.317 0.376 0.423 -0.547
## [270,] 0.868 0.842 0.310 0.854 0.554 0.452 -1.312
## [271,] 0.722 0.820 0.748 0.650 0.204 0.125 -0.600
## [272,] 0.697 0.732 0.602 0.708 0.709 0.733 -1.677
## [273,] 0.852 0.784 0.095 0.813 -0.300 -0.915 -0.139
## [274,] 0.614 0.613 0.362 0.606 0.517 0.507 -0.943
## [275,] 0.309 0.325 0.338 0.344 0.324 0.237 -1.389
## [276,] 0.272 0.309 0.458 0.365 0.460 0.415 -1.201
## [277,] -0.033 -0.020 0.029 0.002 0.026 -0.006 -0.749
## [278,] -0.180 -0.173 -0.178 -0.173 -0.166 -0.170 -0.637
## [279,] -0.319 -0.336 -0.433 -0.311 -0.389 -0.443 -0.447
## [280,] -0.365 -0.376 -0.451 -0.360 -0.409 -0.439 -0.512
## [281,] -0.297 -0.327 -0.467 -0.273 -0.304 -0.346 -0.494
## [282,] 0.539 0.517 0.162 0.454 -0.854 -1.608 0.405
## [283,] -0.021 -0.019 0.073 0.060 -0.028 -0.238 -1.276
## [284,] 0.016 0.022 0.165 0.134 0.123 -0.069 -1.245
## [285,] -0.055 0.017 0.313 0.038 0.218 0.306 -0.221
## [286,] -0.522 -0.456 -0.045 -0.345 0.019 0.157 -0.016
## [287,] -0.832 -0.851 -0.713 -0.666 -0.439 -0.465 -0.630
## [288,] 0.073 0.190 0.320 -0.146 -1.391 -1.938 0.314
## [289,] 0.081 0.105 0.270 0.188 0.289 0.226 -0.948
## [290,] 0.215 0.288 0.506 0.276 0.383 0.451 -0.494
## [291,] 0.237 0.344 0.558 0.213 0.047 0.068 -0.006
## [292,] 0.403 0.410 0.286 0.404 0.396 0.404 -0.882
## [293,] -0.171 -0.169 -0.129 -0.134 -0.217 -0.356 -1.075
## [294,] 0.211 0.216 0.308 0.295 0.114 -0.198 -1.911
## [295,] 0.186 0.259 0.450 0.217 0.254 0.315 -0.270
## [296,] 1.109 1.150 0.674 1.119 1.107 1.140 -4.671
## [297,] 0.555 0.543 0.253 0.533 0.186 0.033 -0.257
## [298,] 0.691 0.773 0.790 0.716 0.727 0.785 -1.667
## [299,] 0.708 0.732 0.422 0.605 -0.392 -0.854 0.150
## [300,] 0.465 0.555 0.634 0.426 0.191 0.180 -0.233
## [301,] -0.071 0.111 0.676 0.004 0.093 0.250 -0.158
## [302,] 0.724 0.739 0.446 0.686 0.331 0.219 -0.595
## [303,] 0.794 0.832 0.612 0.782 0.674 0.689 -1.514
## [304,] 0.675 0.825 0.995 0.714 0.717 0.825 -2.083
## [305,] 0.675 0.825 0.995 0.714 0.717 0.825 -2.083
## [306,] 0.774 0.723 0.177 0.780 0.575 0.489 -1.223
## [307,] 0.558 0.668 0.743 0.505 0.193 0.170 -0.374
## [308,] 0.484 0.447 0.109 0.493 0.483 0.478 -1.107
## [309,] 0.592 0.611 0.422 0.566 0.353 0.302 -0.513
## [310,] 0.495 0.643 1.032 0.673 0.951 1.029 -2.952
## [311,] 0.571 0.591 0.436 0.559 0.467 0.467 -0.774
## [312,] -0.161 0.106 0.780 -0.221 -0.618 -0.570 0.250
## [313,] -0.285 0.009 0.781 -0.333 -0.707 -0.630 0.252
## [314,] 0.218 0.366 0.676 0.178 -0.078 -0.060 0.056
## [315,] 0.965 0.906 0.170 0.971 0.800 0.749 -2.379
## [316,] -0.210 -0.168 -0.048 -0.204 -0.217 -0.192 -0.163
## [317,] -0.138 -0.126 -0.044 -0.082 -0.039 -0.082 -0.721
## [318,] 0.324 0.350 0.464 0.414 0.430 0.301 -1.621
## [319,] 0.324 0.340 0.240 0.299 0.105 0.045 -0.177
## [320,] -0.175 0.046 0.642 -0.207 -0.475 -0.412 0.302
## [321,] -0.175 0.046 0.642 -0.207 -0.475 -0.412 0.302
## [322,] -0.067 0.109 0.556 -0.117 -0.415 -0.400 0.321
## [323,] 0.131 0.292 0.636 0.060 -0.322 -0.349 0.248
## [324,] 0.312 0.382 0.666 0.464 0.671 0.663 -1.538
## [325,] 0.252 0.314 0.627 0.436 0.655 0.605 -1.626
## [326,] 0.801 0.788 0.387 0.800 0.761 0.769 -1.983
## [327,] 0.737 0.798 0.583 0.632 -0.152 -0.441 -0.108
## [328,] 0.820 0.785 0.266 0.819 0.645 0.591 -1.476
## [329,] 0.775 0.760 0.393 0.764 0.708 0.616 -3.372
## [330,] 0.269 0.423 0.642 0.111 -0.713 -0.956 0.419
## [331,] 0.611 0.587 0.230 0.592 0.197 0.011 -0.301
## [332,] 0.357 0.554 0.912 0.311 -0.007 0.031 -0.364
## [333,] 0.228 0.299 0.531 0.314 0.462 0.522 -0.696
## [334,] 0.796 0.883 0.654 0.642 -0.482 -0.915 -0.014
## [335,] 0.738 0.785 0.698 0.768 0.802 0.815 -2.223
## [336,] 0.890 0.856 0.291 0.890 0.773 0.752 -2.066
## [337,] 0.847 0.772 0.074 0.867 0.758 0.712 -2.011
## [338,] 0.773 0.731 0.216 0.775 0.581 0.506 -1.229
## [339,] 0.624 0.674 0.632 0.637 0.638 0.673 -1.297
## [340,] 0.490 0.568 0.581 0.432 0.093 0.026 -0.115
## [341,] 0.091 0.235 0.540 0.010 -0.399 -0.455 0.305
## [342,] 0.227 0.236 0.184 0.234 0.239 0.232 -0.860
## [343,] 0.098 0.119 0.092 0.078 -0.055 -0.089 -0.168
## [344,] -0.214 -0.198 -0.105 -0.163 -0.090 -0.091 -0.537
## [345,] 0.067 0.309 0.966 0.157 0.247 0.445 -0.927
## [346,] 0.572 0.860 1.072 0.283 -1.331 -1.788 -0.062
## [347,] 0.384 0.456 0.607 0.429 0.498 0.556 -0.776
## [348,] 0.232 0.376 0.703 0.226 0.101 0.165 -0.133
## [349,] -0.300 -0.089 0.614 -0.202 -0.069 0.124 -0.014
## [350,] 0.774 0.538 -0.804 0.912 0.810 0.639 -2.616
## [351,] -0.137 -0.178 -0.361 -0.121 -0.545 -0.853 -0.147
## [352,] -0.067 -0.210 -0.742 0.076 0.188 0.106 -0.666
## [353,] -0.244 -0.206 0.145 -0.032 0.309 0.341 -0.509
## [354,] -0.041 -0.019 0.037 -0.020 0.015 0.032 -0.450
## [355,] -0.073 -0.061 -0.018 -0.044 -0.020 -0.041 -0.697
## [356,] 1.135 1.137 0.292 0.991 -0.659 -1.405 -0.845
## [357,] 0.724 0.738 0.495 0.722 0.684 0.700 -1.561
## [358,] 0.925 1.000 0.705 0.854 0.354 0.253 -1.323
## [359,] 0.952 1.004 0.653 0.912 0.606 0.573 -1.823
## [360,] 0.843 0.810 0.296 0.845 0.799 0.800 -2.202
## [361,] 0.314 0.384 0.659 0.456 0.658 0.661 -1.450
## [362,] 0.087 -0.081 -0.711 0.252 0.195 -0.020 -0.432
## [363,] 0.478 0.531 0.550 0.484 0.460 0.494 -0.712
## [364,] -0.447 -0.395 0.202 -0.057 0.545 0.571 -0.862
## [365,] -0.051 0.026 0.329 0.038 0.209 0.302 -0.195
## [366,] -0.145 -0.093 0.128 -0.076 0.059 0.126 -0.176
## [367,] 0.177 0.263 0.755 0.466 0.821 0.751 -2.309
## [368,] 0.689 0.710 0.490 0.674 0.545 0.537 -1.005
## [369,] 1.109 0.973 -0.268 1.145 0.924 0.825 -3.767
## [370,] 1.109 0.973 -0.268 1.145 0.924 0.825 -3.767
## [371,] 0.257 0.343 0.631 0.364 0.549 0.623 -0.886
## [372,] 0.945 0.874 0.098 0.945 0.470 0.249 -1.418
## [373,] 0.425 0.545 0.791 0.464 0.491 0.582 -0.837
## [374,] 0.843 1.055 1.026 0.711 -0.084 -0.194 -1.628
## [375,] 0.843 1.055 1.026 0.711 -0.084 -0.194 -1.628
## [376,] 0.686 0.708 0.552 0.697 0.708 0.712 -1.877
## [377,] 0.569 0.688 0.757 0.494 0.060 -0.007 -0.254
## [378,] 0.687 0.727 0.542 0.644 0.335 0.263 -0.567
## [379,] 0.600 0.595 0.328 0.590 0.459 0.428 -0.774
## [380,] 1.000 1.051 0.551 0.877 -0.197 -0.601 -0.781
## [381,] 0.436 0.447 0.330 0.434 0.412 0.422 -0.801
## [382,] 0.605 0.619 0.463 0.613 0.619 0.616 -1.639
## [383,] 1.017 0.979 0.207 0.941 -0.273 -0.861 -0.620
## [384,] 0.940 0.981 0.548 0.845 0.025 -0.268 -0.760
## [385,] 0.770 0.900 0.887 0.721 0.396 0.400 -1.275
## [386,] 0.865 0.988 0.890 0.829 0.559 0.585 -1.892
## [387,] 0.804 0.883 0.850 0.852 0.913 0.951 -2.771
## [388,] 0.742 0.753 0.386 0.644 -0.392 -0.886 0.118
## [389,] 0.116 0.120 0.069 0.120 0.098 0.058 -1.005
## [390,] 0.114 0.139 0.325 0.240 0.342 0.250 -1.118
## [391,] -0.291 -0.214 0.258 -0.068 0.346 0.462 -0.353
## [392,] -0.124 -0.042 0.336 0.012 0.276 0.386 -0.258
## [393,] -0.298 -0.363 -0.626 -0.241 -0.136 -0.133 -0.810
## [394,] 0.574 0.504 0.012 0.579 -0.095 -0.508 -0.004
## [395,] 0.669 0.796 0.862 0.637 0.401 0.429 -1.001
## [396,] 0.597 0.622 0.515 0.609 0.621 0.630 -1.470
## [397,] 0.046 0.103 0.237 0.048 0.010 0.039 -0.093
## [398,] 0.403 0.365 0.051 0.382 -0.289 -0.687 0.149
## [399,] 0.437 0.384 0.009 0.421 -0.409 -0.924 0.215
## [400,] 0.244 0.268 0.210 0.213 -0.002 -0.071 -0.091
## [401,] 0.484 0.390 -0.238 0.494 0.562 0.547 -2.723
## [402,] -0.141 -0.071 0.159 -0.116 -0.092 -0.034 0.002
## [403,] 0.622 0.666 0.521 0.568 0.189 0.083 -0.291
## [404,] 0.453 0.518 0.552 0.437 0.320 0.335 -0.398
## [405,] 0.649 0.758 0.963 0.776 0.954 0.976 -3.039
## [406,] 0.365 0.459 0.905 0.642 0.922 0.785 -3.306
## [407,] 0.358 0.349 0.134 0.319 -0.246 -0.546 0.118
## [408,] 0.299 0.283 0.087 0.301 0.299 0.300 -0.847
## [409,] 0.249 0.188 -0.138 0.233 -0.840 -1.558 0.237
## [410,] -0.134 -0.102 -0.089 -0.191 -0.544 -0.694 -0.098
## [411,] -0.052 -0.004 0.093 -0.073 -0.189 -0.196 -0.062
## [412,] -0.941 -0.942 -0.959 -0.941 -0.963 -0.974 -0.977
## [413,] -0.290 -0.501 -1.288 -0.086 0.311 0.336 -1.593
## [414,] 0.227 0.152 -0.230 0.279 0.214 0.128 -0.488
## [415,] 0.354 0.297 -0.059 0.381 0.281 0.205 -0.517
## [416,] 0.427 0.530 0.807 0.538 0.719 0.791 -1.530
## [417,] 0.489 0.451 0.107 0.492 0.218 0.073 -0.315
## [418,] 1.007 1.011 0.512 1.008 0.983 1.002 -3.483
## [419,] 0.522 0.519 0.287 0.506 0.308 0.241 -0.438
## [420,] 0.339 0.280 -0.089 0.367 0.365 0.342 -0.847
## [421,] -0.235 -0.225 -0.071 -0.121 0.003 -0.044 -0.650
## [422,] 0.162 0.197 0.395 0.283 0.415 0.366 -1.065
## [423,] 0.913 0.701 -0.689 1.011 0.962 0.870 -3.640
## [424,] -0.037 0.034 0.291 0.025 0.141 0.220 -0.144
## [425,] -0.472 -0.335 0.253 -0.319 -0.014 0.173 0.115
## [426,] -0.546 -0.604 -0.817 -0.492 -0.371 -0.356 -0.932
## [427,] -0.690 -0.796 -1.065 -0.541 -0.485 -0.620 -0.576
## [428,] 0.409 0.400 0.214 0.396 0.322 0.231 -1.912
## [429,] 0.410 0.538 0.647 0.262 -0.562 -0.827 0.339
## [430,] 0.184 0.205 0.295 0.250 0.282 0.208 -1.102
## [431,] 0.173 0.198 0.306 0.244 0.304 0.253 -0.997
## [432,] -0.639 -0.566 -0.060 -0.401 0.083 0.250 -0.008
## [433,] 0.188 0.219 0.238 0.187 0.158 0.174 -0.335
## [434,] 0.765 0.812 0.604 0.724 0.430 0.379 -0.874
## [435,] 0.534 0.667 0.832 0.510 0.314 0.355 -0.653
## [436,] 1.237 1.156 -0.116 1.124 -0.922 -1.954 -1.156
## [437,] 1.003 1.215 1.149 1.087 1.166 1.309 -6.844
## [438,] 1.003 1.215 1.149 1.087 1.166 1.309 -6.844
## [439,] 0.515 0.768 1.229 0.634 0.785 0.985 -3.485
## [440,] 0.661 0.654 0.340 0.641 0.387 0.301 -0.628
## [441,] 0.516 0.783 1.228 0.595 0.632 0.829 -3.116
## [442,] 1.408 1.278 -0.507 1.362 -0.049 -0.675 -4.012
## [443,] 1.408 1.278 -0.507 1.362 -0.049 -0.675 -4.012
## [444,] 0.389 0.453 0.600 0.449 0.545 0.589 -0.970
## [445,] 0.902 1.038 0.881 0.817 0.274 0.207 -1.585
## [446,] 0.564 0.613 0.618 0.593 0.631 0.659 -1.336
## [447,] 0.886 0.752 -0.237 0.938 0.846 0.776 -2.629
## [448,] -0.103 -0.114 0.096 0.099 0.080 -0.233 -1.408
## [449,] -0.104 -0.089 -0.088 -0.107 -0.134 -0.129 -0.365
## [450,] -0.273 -0.223 0.129 -0.091 0.239 0.315 -0.315
## [451,] 0.040 0.092 0.177 0.011 -0.143 -0.164 -0.021
## [452,] 0.101 0.148 0.198 0.060 -0.158 -0.211 0.004
## [453,] -0.670 -0.552 0.139 -0.380 0.204 0.423 -0.086
## [454,] -0.456 -0.446 -0.389 -0.426 -0.380 -0.376 -0.607
## [455,] -0.778 -0.833 -0.510 -0.368 0.042 -0.163 -0.738
## [456,] -0.623 -0.629 -0.512 -0.507 -0.371 -0.410 -0.658
## [457,] 0.674 0.718 0.648 0.697 0.725 0.745 -1.778
## [458,] 0.707 0.775 0.618 0.618 0.010 -0.181 -0.231
## [459,] 0.624 0.735 0.814 0.612 0.476 0.522 -0.970
## [460,] 0.399 0.399 0.251 0.400 0.400 0.395 -1.147
## [461,] 0.182 0.199 0.245 0.224 0.232 0.169 -1.066
## [462,] 0.237 0.273 0.366 0.286 0.357 0.365 -0.779
## [463,] 0.499 0.607 0.940 0.685 0.929 0.917 -2.904
## [464,] 0.116 0.236 0.630 0.214 0.389 0.511 -0.475
## [465,] 0.380 0.412 0.358 0.361 0.233 0.219 -0.308
## [466,] 0.330 0.379 0.440 0.343 0.345 0.380 -0.502
## [467,] 0.649 0.825 0.864 0.439 -0.800 -1.199 0.132
## [468,] 0.490 0.498 0.314 0.468 0.255 0.188 -0.340
## [469,] 0.474 0.525 0.576 0.507 0.555 0.587 -1.058
## [470,] 0.637 0.573 0.060 0.658 0.612 0.584 -1.368
## [471,] -0.359 -0.258 0.252 -0.173 0.202 0.361 -0.107
## [472,] 0.412 0.326 -0.184 0.453 0.503 0.493 -1.328
## [473,] -0.289 -0.271 -0.218 -0.274 -0.248 -0.231 -0.444
## [474,] 1.168 1.141 0.316 1.165 1.101 1.119 -4.762
## [475,] 0.693 0.516 -0.515 0.791 0.781 0.695 -2.217
## [476,] 0.338 0.352 0.341 0.366 0.330 0.236 -1.493
## [477,] 0.375 0.373 0.235 0.374 0.357 0.323 -1.382
## [478,] 0.146 0.260 0.563 0.168 0.153 0.231 -0.106
## [479,] 0.540 0.597 0.731 0.644 0.736 0.670 -2.248
## [480,] 0.543 0.483 0.019 0.558 0.586 0.586 -1.643
## [481,] 0.617 0.579 0.178 0.621 0.631 0.632 -1.783
## [482,] 0.065 0.092 0.141 0.080 0.101 0.122 -0.412
## [483,] 0.500 0.460 0.104 0.509 0.511 0.509 -1.252
## [484,] 0.525 0.500 0.189 0.523 0.354 0.283 -0.551
## [485,] 0.374 0.314 -0.061 0.404 0.302 0.224 -0.546
## [486,] 0.495 0.281 -0.672 0.660 0.187 -0.281 -0.457
## [487,] 0.204 0.283 0.450 0.198 0.117 0.150 -0.087
## [488,] 0.134 0.175 0.250 0.141 0.133 0.160 -0.272
## [489,] 0.299 0.177 -0.470 0.361 0.506 0.515 -1.881
## [490,] -0.368 -0.324 -0.237 -0.413 -0.628 -0.693 -0.226
## [491,] -0.687 -0.607 -0.336 -0.698 -0.787 -0.765 -0.081
## [492,] -0.653 -0.589 -0.315 -0.606 -0.516 -0.437 -0.052
## [493,] -0.270 -0.221 -0.087 -0.282 -0.357 -0.349 -0.113
## [494,] 0.013 0.067 0.079 -0.121 -1.020 -1.461 0.092
## [495,] -0.043 0.033 0.198 -0.088 -0.315 -0.350 0.108
## [496,] 0.343 0.331 0.141 0.344 0.341 0.343 -0.912
## [497,] 0.419 0.444 0.276 0.310 -0.699 -1.214 0.339
## [498,] 0.305 0.418 0.679 0.335 0.342 0.424 -0.430
## [499,] -0.087 -0.019 0.233 -0.028 0.082 0.159 -0.105
## [500,] -0.068 0.136 0.724 -0.024 -0.038 0.104 -0.084
## [501,] 0.766 0.752 0.400 0.758 0.717 0.642 -3.169
## [502,] 0.191 0.245 0.494 0.320 0.506 0.513 -0.997
## [503,] 0.336 0.325 0.138 0.335 0.283 0.271 -0.581
## [504,] 0.003 0.018 0.200 0.137 0.228 0.115 -1.022
## [505,] -0.637 -0.538 0.255 -0.191 0.583 0.721 -0.840
## [506,] 0.488 0.571 0.705 0.529 0.583 0.647 -1.030
## [507,] 0.232 0.246 0.401 0.360 0.297 0.036 -1.852
## [508,] 0.002 0.038 0.074 -0.030 -0.206 -0.250 -0.075
## [509,] 0.907 0.711 -0.603 0.994 0.962 0.887 -3.569
## [510,] 0.015 0.122 0.480 0.088 0.211 0.319 -0.155
## [511,] 0.321 0.411 0.705 0.440 0.640 0.704 -1.200
## [512,] 0.456 0.577 0.752 0.434 0.257 0.291 -0.407
## [513,] 0.653 0.712 0.585 0.589 0.159 0.044 -0.305
## [514,] -0.239 -0.307 -0.582 -0.179 -0.086 -0.093 -0.738
## [515,] 0.187 0.150 -0.150 0.158 0.082 -0.001 -2.026
## [516,] 0.225 0.391 0.608 0.008 -1.146 -1.560 0.532
## [517,] 0.025 0.195 0.576 -0.059 -0.494 -0.541 0.352
## [518,] 0.383 0.358 0.098 0.371 0.013 -0.178 -0.076
## [519,] 0.502 0.385 -0.226 0.568 0.110 -0.238 -0.223
## [520,] 0.184 0.248 0.560 0.347 0.583 0.590 -1.169
## [521,] -0.526 -0.376 0.390 -0.232 0.348 0.576 -0.387
## [522,] 0.269 0.164 -0.318 0.348 0.130 -0.087 -0.269
## [523,] -0.387 -0.433 0.071 0.141 0.336 -0.243 -2.154
## [524,] -0.611 -0.599 -0.555 -0.600 -0.578 -0.565 -0.627
## [525,] 0.068 0.076 -0.024 0.004 -0.662 -1.027 0.063
## [526,] 0.609 0.652 0.805 0.757 0.679 0.347 -3.784
## [527,] 0.603 0.579 0.194 0.529 -0.627 -1.273 0.328
## [528,] 0.537 0.562 0.400 0.493 0.145 0.028 -0.182
## [529,] -0.105 0.117 0.985 0.237 0.838 1.040 -2.536
## [530,] -0.088 -0.068 0.064 -0.008 0.092 0.074 -0.620
## [531,] 0.233 0.276 0.371 0.264 0.313 0.348 -0.527
## [532,] 0.425 0.427 0.253 0.410 0.253 0.205 -0.368
## [533,] 0.209 0.261 0.379 0.233 0.265 0.308 -0.373
## [534,] 0.933 0.917 0.359 0.907 0.519 0.392 -1.418
## [535,] 0.556 0.680 0.811 0.529 0.329 0.361 -0.654
## [536,] 0.863 0.826 0.280 0.865 0.860 0.865 -2.754
## [537,] 0.433 0.583 0.791 0.359 -0.058 -0.097 -0.106
## [538,] 0.615 0.742 0.838 0.576 0.319 0.336 -0.747
## [539,] 0.447 0.521 0.521 0.367 -0.111 -0.253 0.090
## [540,] 0.556 0.557 0.324 0.536 0.319 0.249 -0.454
## [541,] -0.720 -0.736 -0.067 -0.112 0.563 0.321 -1.425
## [542,] -0.255 -0.238 -0.146 -0.210 -0.136 -0.125 -0.487
## [543,] 0.215 0.212 0.041 0.140 -0.804 -1.349 0.204
## [544,] 0.395 0.355 0.030 0.403 0.425 0.424 -1.275
## [545,] 0.254 0.399 0.746 0.279 0.249 0.345 -0.359
## [546,] 0.388 0.341 0.004 0.387 -0.150 -0.480 0.055
## [547,] 0.442 0.452 0.308 0.429 0.318 0.299 -0.482
## [548,] 0.183 0.188 0.099 0.179 0.141 0.140 -0.455
## [549,] 0.934 0.910 0.372 0.933 0.934 0.930 -3.495
## [550,] 0.994 1.103 0.866 0.973 0.774 0.823 -2.965
## [551,] 0.106 0.166 0.272 0.083 -0.049 -0.053 -0.019
## [552,] 0.543 0.511 0.149 0.509 -0.234 -0.647 0.128
## [553,] 0.206 0.256 0.328 0.198 0.130 0.146 -0.183
## [554,] 0.316 0.399 0.471 0.236 -0.205 -0.328 0.169
## [555,] -0.067 0.060 0.454 -0.031 -0.012 0.085 0.082
## [556,] 0.183 0.185 0.100 0.184 0.184 0.185 -0.729
## [557,] 0.728 0.804 0.772 0.744 0.731 0.784 -1.709
## [558,] 0.515 0.546 0.542 0.555 0.581 0.541 -1.683
## [559,] 0.197 0.300 0.623 0.286 0.443 0.543 -0.583
## [560,] 1.081 1.069 0.332 1.029 0.355 0.113 -1.769
## [561,] 1.081 1.069 0.332 1.029 0.355 0.113 -1.769
## [562,] 1.059 1.181 0.945 1.094 1.098 1.182 -4.983
## [563,] 0.797 0.795 0.414 0.776 0.535 0.472 -1.106
## [564,] 0.686 0.839 1.005 0.725 0.729 0.838 -2.176
## [565,] 0.700 0.636 0.089 0.719 0.592 0.529 -1.243
## [566,] 0.356 0.361 0.220 0.341 0.195 0.152 -0.310
## [567,] 0.360 0.486 0.744 0.375 0.326 0.403 -0.471
## [568,] 0.437 0.497 0.619 0.498 0.590 0.621 -1.169
## [569,] 0.105 0.159 0.302 0.132 0.169 0.216 -0.251
## [570,] 0.120 0.159 0.294 0.178 0.273 0.298 -0.561
## [571,] 0.373 0.443 0.571 0.401 0.429 0.483 -0.603
## [572,] 0.312 0.393 0.511 0.286 0.122 0.126 -0.094
## [573,] 1.063 1.158 0.693 0.943 0.048 -0.197 -1.684
## [574,] 0.277 0.241 -0.022 0.291 0.242 0.210 -0.561
## [575,] 0.583 0.657 0.588 0.493 -0.086 -0.268 -0.003
## [576,] 0.766 0.629 -0.309 0.815 0.882 0.874 -3.072
## [577,] -0.063 0.036 0.350 -0.026 0.013 0.096 0.038
## [578,] 0.083 0.115 0.114 0.040 -0.219 -0.307 -0.020
## [579,] 0.663 0.518 -0.355 0.739 0.732 0.668 -1.937
## [580,] 0.652 0.717 0.648 0.624 0.433 0.430 -0.741
## [581,] 0.933 1.025 0.792 0.889 0.574 0.566 -1.927
## [582,] 0.399 0.436 0.394 0.374 0.217 0.195 -0.262
## [583,] 0.967 1.029 0.850 1.025 1.063 1.005 -4.746
## [584,] 0.975 1.017 0.748 1.008 1.027 0.987 -4.369
## [585,] 0.613 0.570 0.135 0.583 -0.230 -0.689 0.098
## [586,] 0.260 0.145 -0.418 0.340 0.425 0.396 -1.109
## [587,] 0.210 0.073 -0.567 0.309 0.452 0.435 -1.320
## [588,] 0.063 0.042 -0.121 0.063 -0.139 -0.259 -0.172
## [589,] 0.053 0.046 -0.071 0.037 -0.205 -0.330 -0.124
## [590,] -0.077 -0.066 -0.099 -0.092 -0.204 -0.240 -0.251
## [591,] -0.769 -0.638 0.256 -0.318 0.515 0.730 -0.699
## [592,] 0.113 0.009 -0.471 0.190 0.300 0.288 -1.014
## [593,] -0.817 -0.844 -0.355 -0.334 0.303 0.207 -0.650
## [594,] 0.141 0.119 -0.068 0.126 -0.305 -0.562 0.010
## [595,] 0.976 1.003 0.639 0.986 0.985 1.003 -3.506
## [596,] 0.976 1.003 0.639 0.986 0.985 1.003 -3.506
## [597,] 0.363 0.562 0.977 0.390 0.326 0.453 -0.984
## [598,] 0.136 0.367 1.033 0.304 0.587 0.802 -1.774
## [599,] 0.587 0.823 1.016 0.396 -0.666 -0.906 -0.269
## [600,] 0.587 0.823 1.016 0.396 -0.666 -0.906 -0.269
## [601,] 1.042 1.032 0.447 1.042 1.030 1.042 -4.012
## [602,] 0.432 0.503 0.515 0.366 -0.022 -0.122 0.022
## [603,] 0.291 0.222 -0.150 0.331 0.066 -0.133 -0.184
## [604,] 0.304 0.293 0.096 0.276 -0.164 -0.396 0.042
## [605,] 0.235 0.360 0.670 0.264 0.261 0.349 -0.298
## [606,] 0.235 0.336 0.572 0.250 0.222 0.286 -0.202
## [607,] 0.393 0.441 0.513 0.429 0.484 0.515 -0.884
## [608,] 0.602 0.573 0.211 0.602 0.428 0.358 -0.715
## [609,] 0.644 0.669 0.521 0.648 0.635 0.655 -1.384
## [610,] 0.648 0.729 0.688 0.603 0.322 0.295 -0.580
## [611,] 1.087 1.078 0.432 1.085 1.037 1.057 -4.002
## [612,] 0.661 0.741 0.793 0.704 0.759 0.812 -1.792
## [613,] 0.478 0.521 0.458 0.445 0.233 0.195 -0.276
## [614,] 0.623 0.693 0.574 0.515 -0.226 -0.496 0.080
## [615,] 0.854 1.020 0.980 0.793 0.387 0.397 -1.958
## [616,] 0.236 0.243 0.155 0.232 0.194 0.195 -0.480
## [617,] 0.526 0.541 0.465 0.547 0.512 0.423 -1.992
## [618,] 0.445 0.573 0.703 0.346 -0.198 -0.318 0.093
## [619,] 0.687 0.758 0.761 0.720 0.757 0.804 -1.798
## [620,] 0.751 0.860 0.760 0.645 -0.022 -0.194 -0.426
## [621,] 0.804 0.901 0.748 0.713 0.118 -0.024 -0.676
## [622,] 0.441 0.484 0.665 0.575 0.635 0.481 -2.275
## [623,] -0.082 -0.063 -0.015 -0.062 -0.030 -0.017 -0.473
## [624,] -0.013 0.009 0.054 0.003 0.029 0.046 -0.445
## [625,] 0.628 0.697 0.743 0.672 0.732 0.774 -1.687
## [626,] 0.603 0.590 0.285 0.594 0.423 0.367 -0.688
## [627,] 0.577 0.667 0.730 0.570 0.469 0.510 -0.800
## [628,] 0.660 0.649 0.305 0.631 0.219 0.044 -0.359
## [629,] 0.515 0.592 0.745 0.591 0.708 0.749 -1.563
## [630,] 0.430 0.435 0.266 0.412 0.240 0.187 -0.339
## [631,] 0.608 0.644 0.554 0.613 0.599 0.624 -1.191
## [632,] 0.076 0.279 0.769 0.047 -0.201 -0.139 0.046
## [633,] -0.053 0.103 0.648 0.071 0.296 0.463 -0.374
## [634,] 0.327 0.146 -0.655 0.474 0.469 0.319 -0.959
## [635,] -0.080 -0.011 0.356 0.087 0.386 0.463 -0.496
## [636,] 0.124 0.192 0.374 0.150 0.179 0.235 -0.192
## [637,] -0.410 -0.236 0.336 -0.377 -0.401 -0.284 0.339
## [638,] 0.172 0.079 -0.320 0.241 -0.097 -0.391 -0.077
## [639,] 0.374 0.336 0.029 0.383 0.399 0.398 -1.108
## [640,] 0.450 0.458 0.366 0.460 0.416 0.334 -1.759
## [641,] 0.386 0.413 0.477 0.451 0.459 0.358 -1.635
## [642,] 0.748 0.726 0.339 0.736 0.696 0.623 -3.155
## [643,] 0.748 0.726 0.339 0.736 0.696 0.623 -3.155
## [644,] 0.436 0.586 0.821 0.397 0.132 0.156 -0.347
## [645,] 0.741 0.583 -0.446 0.803 0.900 0.894 -3.306
## [646,] 0.556 0.457 -0.220 0.572 0.649 0.641 -2.818
## [647,] 0.270 0.226 -0.067 0.290 0.271 0.247 -0.669
## [648,] 0.097 0.048 -0.215 0.128 0.088 0.038 -0.458
## [649,] -0.248 -0.051 0.551 -0.223 -0.284 -0.162 0.231
## [650,] 0.016 0.159 0.504 -0.035 -0.320 -0.324 0.272
## [651,] 0.179 0.306 0.521 0.070 -0.480 -0.618 0.327
## [652,] 0.596 0.622 0.560 0.626 0.633 0.581 -2.028
## [653,] 0.435 0.523 0.758 0.540 0.707 0.756 -1.512
## [654,] -0.342 -0.304 0.044 -0.135 0.222 0.280 -0.354
## [655,] 0.866 0.834 0.299 0.864 0.736 0.709 -1.870
## [656,] 0.579 0.651 0.716 0.615 0.659 0.710 -1.336
## [657,] 0.330 0.491 0.927 0.452 0.656 0.800 -1.492
## [658,] 0.641 0.644 0.390 0.629 0.508 0.489 -0.899
## [659,] 0.292 0.460 1.055 0.575 1.008 1.073 -3.349
## [660,] 0.539 0.485 0.063 0.545 0.116 -0.134 -0.192
## [661,] 0.167 0.226 0.563 0.363 0.613 0.575 -1.402
## [662,] -0.151 -0.119 -0.023 -0.135 -0.111 -0.082 -0.263
## [663,] -0.174 -0.480 -1.370 0.194 -0.178 -0.865 -0.022
## [664,] -0.406 -0.389 -0.378 -0.432 -0.586 -0.645 -0.394
## [665,] -0.418 -0.698 -1.493 -0.053 0.056 -0.289 -0.363
## [666,] 0.115 0.074 -0.208 0.107 0.110 0.081 -1.531
## [667,] 0.318 0.373 0.401 0.283 0.084 0.053 -0.084
## [668,] 0.536 0.386 -0.359 0.631 0.103 -0.322 -0.269
## [669,] -0.248 -0.742 -2.275 0.334 0.120 -0.690 -0.427
## [670,] -1.027 -1.028 -1.026 -1.030 -1.148 -1.251 -1.392
## [671,] -0.833 -0.835 -0.826 -0.827 -0.967 -1.102 -1.301
## [672,] -1.309 -1.367 -1.492 -1.210 -1.080 -1.102 -1.181
## [673,] -1.346 -1.398 -1.511 -1.258 -1.133 -1.146 -1.238
## [674,] -0.596 -0.689 -0.954 -0.474 -0.376 -0.448 -0.617
## [675,] -0.254 -0.208 0.505 0.274 0.792 0.514 -2.641
## [676,] 0.097 0.153 0.527 0.322 0.606 0.559 -1.382
## [677,] 0.631 0.579 0.110 0.620 -0.004 -0.355 -0.106
## [678,] 0.825 0.891 0.806 0.865 0.914 0.939 -2.839
## [679,] 0.825 0.891 0.806 0.865 0.914 0.939 -2.839
## [680,] 0.916 1.103 1.096 0.949 0.900 1.029 -4.261
## [681,] 0.772 0.816 0.592 0.732 0.433 0.377 -0.885
## [682,] 0.197 0.112 -0.324 0.249 0.326 0.318 -1.034
## [683,] 0.172 0.281 0.483 0.108 -0.223 -0.272 0.197
## [684,] 0.065 0.132 0.240 0.009 -0.279 -0.349 0.105
## [685,] 0.324 0.441 0.515 0.127 -1.049 -1.537 0.474
## [686,] 0.263 0.334 0.558 0.347 0.490 0.548 -0.771
## [687,] 1.034 0.975 0.164 1.037 0.852 0.804 -2.826
## [688,] 0.826 0.839 0.508 0.815 0.710 0.714 -1.681
## [689,] 0.803 0.863 0.788 0.845 0.897 0.910 -2.782
## [690,] 0.534 0.526 0.281 0.528 0.434 0.414 -0.744
## [691,] 0.550 0.589 0.579 0.584 0.625 0.625 -1.533
## [692,] 0.556 0.385 -0.454 0.671 0.226 -0.163 -0.485
## [693,] -0.127 -0.124 -0.212 -0.204 -1.080 -1.605 -0.143
## [694,] -0.177 -0.147 0.279 0.128 0.495 0.405 -1.126
## [695,] -0.183 -0.185 0.296 0.239 0.488 0.106 -2.046
## [696,] 0.381 0.217 -0.495 0.510 0.079 -0.333 -0.197
## [697,] 1.156 1.185 0.432 1.059 0.107 -0.216 -1.965
## [698,] 0.417 0.822 1.301 0.224 -0.883 -0.970 -1.671
## [699,] 0.015 0.009 -0.085 0.014 0.014 0.009 -0.764
## [700,] -0.017 -0.026 -0.135 -0.020 -0.025 -0.037 -0.879
## [701,] 0.047 0.045 -0.031 0.041 -0.009 -0.069 -1.125
## [702,] 0.545 0.656 0.949 0.707 0.930 0.942 -2.855
## [703,] 0.760 0.794 0.527 0.700 0.219 0.066 -0.506
## [704,] 0.817 0.749 0.100 0.817 0.274 -0.008 -0.716
## [705,] 0.455 0.577 0.748 0.426 0.221 0.245 -0.356
## [706,] 0.446 0.428 0.186 0.444 0.448 0.441 -1.380
## [707,] 0.441 0.440 0.286 0.440 0.416 0.370 -1.606
## [708,] 0.663 0.697 0.656 0.709 0.714 0.630 -2.556
## [709,] 0.723 0.826 0.692 0.566 -0.496 -0.886 0.083
## [710,] 0.598 0.635 0.508 0.571 0.388 0.364 -0.577
## [711,] 0.797 0.869 0.712 0.759 0.496 0.479 -1.157
## [712,] 0.623 0.719 0.808 0.649 0.655 0.724 -1.377
## [713,] 0.374 0.340 0.054 0.382 0.220 0.132 -0.368
## [714,] 0.334 0.384 0.403 0.306 0.144 0.128 -0.157
## [715,] 0.698 0.697 0.359 0.656 0.162 -0.043 -0.321
## [716,] 0.551 0.624 0.748 0.623 0.730 0.760 -1.701
## [717,] 0.802 0.892 0.873 0.844 0.891 0.949 -2.623
## [718,] 0.720 0.750 0.500 0.666 0.224 0.082 -0.441
## [719,] -0.022 0.020 0.055 -0.078 -0.391 -0.504 -0.003
## [720,] 0.914 0.890 0.380 0.903 0.865 0.785 -4.276
## [721,] 0.660 0.696 0.652 0.703 0.724 0.669 -2.370
## [722,] 0.898 0.933 0.569 0.847 0.429 0.326 -1.187
## [723,] 0.756 0.847 0.869 0.804 0.863 0.921 -2.413
## [724,] 0.829 0.787 0.222 0.810 0.288 0.046 -0.729
## [725,] 0.829 0.787 0.222 0.810 0.288 0.046 -0.729
## [726,] 0.693 0.752 0.774 0.759 0.829 0.806 -2.555
## [727,] 0.499 0.524 0.493 0.529 0.544 0.507 -1.597
## [728,] 0.574 0.586 0.458 0.586 0.573 0.523 -1.937
## [729,] 0.432 0.524 0.766 0.535 0.701 0.759 -1.469
## [730,] 0.907 0.960 0.556 0.774 -0.364 -0.826 -0.287
## [731,] 0.089 0.152 0.582 0.349 0.674 0.615 -1.617
## [732,] 0.008 0.023 0.035 0.021 0.038 0.043 -0.571
## [733,] 0.532 0.431 -0.144 0.578 0.030 -0.352 -0.131
## [734,] 0.553 0.528 0.181 0.514 -0.206 -0.594 0.111
## [735,] -0.240 -0.205 0.263 0.088 0.515 0.453 -1.062
## [736,] 0.313 0.416 0.719 0.411 0.581 0.672 -0.979
## [737,] 0.656 0.768 0.924 0.734 0.850 0.924 -2.308
## [738,] 0.510 0.551 0.531 0.525 0.537 0.564 -1.014
## [739,] 0.420 0.436 0.336 0.417 0.382 0.392 -0.687
## [740,] 0.565 0.701 0.894 0.583 0.533 0.620 -1.190
## [741,] 1.207 1.192 0.287 1.173 0.723 0.618 -3.448
## [742,] 1.207 1.192 0.287 1.173 0.723 0.618 -3.448
## [743,] 0.538 0.689 0.934 0.568 0.548 0.652 -1.310
## [744,] 0.538 0.689 0.934 0.568 0.548 0.652 -1.310
## [745,] 0.811 0.829 0.536 0.803 0.721 0.734 -1.711
## [746,] 0.669 0.748 0.639 0.575 -0.045 -0.234 -0.144
## [747,] 0.568 0.452 -0.219 0.635 0.495 0.361 -0.962
## [748,] 0.713 0.763 0.644 0.709 0.641 0.670 -1.322
## [749,] 0.680 0.721 0.523 0.617 0.139 -0.016 -0.290
## [750,] 0.364 0.421 0.666 0.515 0.671 0.601 -1.851
## [751,] 0.176 0.220 0.332 0.211 0.270 0.307 -0.462
## [752,] 0.870 0.869 0.425 0.842 0.529 0.441 -1.250
## [753,] 0.445 0.475 0.432 0.450 0.441 0.462 -0.788
## [754,] 0.439 0.489 0.401 0.335 -0.420 -0.742 0.264
## [755,] 0.339 0.393 0.462 0.345 0.321 0.355 -0.420
## [756,] 1.339 1.229 -0.279 1.344 0.946 0.832 -5.594
## [757,] 1.339 1.229 -0.279 1.344 0.946 0.832 -5.594
## [758,] 1.212 1.043 -0.544 1.251 1.229 1.217 -6.627
## [759,] 0.796 0.798 0.469 0.794 0.763 0.776 -1.972
## [760,] 0.499 0.586 0.704 0.517 0.509 0.570 -0.826
## [761,] 0.551 0.664 0.764 0.515 0.277 0.287 -0.505
## [762,] 0.515 0.551 0.496 0.519 0.499 0.523 -0.878
## [763,] 0.785 0.813 0.667 0.817 0.797 0.698 -3.267
## [764,] 0.707 0.798 0.598 0.488 -1.187 -1.950 0.483
## [765,] 0.934 0.935 0.569 0.929 0.747 0.493 -5.313
## [766,] 0.934 0.935 0.569 0.929 0.747 0.493 -5.313
## [767,] 0.095 0.164 0.408 0.171 0.310 0.381 -0.376
## [768,] 0.244 0.157 -0.281 0.304 0.308 0.255 -0.720
## [769,] 0.015 -0.032 -0.275 0.045 0.054 0.032 -0.577
## [770,] -0.292 -0.270 -0.105 -0.203 -0.049 -0.022 -0.390
## [771,] 0.693 0.736 0.587 0.673 0.523 0.522 -0.953
## [772,] 0.892 0.901 0.485 0.872 0.663 0.631 -1.652
## [773,] 0.736 0.833 0.793 0.705 0.486 0.502 -1.125
## [774,] 0.003 0.219 0.779 0.000 -0.166 -0.065 -0.021
## [775,] 0.463 0.526 0.564 0.455 0.375 0.402 -0.502
## [776,] 0.920 1.023 0.790 0.844 0.330 0.247 -1.454
## [777,] -0.092 -0.083 -0.074 -0.076 -0.074 -0.100 -0.755
## [778,] 0.502 0.731 0.912 0.219 -1.380 -1.924 0.452
## [779,] 0.502 0.731 0.912 0.219 -1.380 -1.924 0.452
## [780,] 0.747 0.685 0.095 0.748 0.773 0.761 -2.962
## [781,] 0.085 0.028 -0.242 0.102 -0.518 -0.959 0.052
## [782,] -0.755 -0.724 -0.096 -0.298 0.438 0.492 -0.555
## [783,] -0.289 -0.190 0.292 -0.114 0.234 0.381 -0.156
## [784,] 0.420 0.323 -0.223 0.477 0.485 0.441 -1.073
## [785,] -0.036 0.059 0.412 0.054 0.223 0.332 -0.172
## [786,] -0.140 -0.144 0.023 0.011 0.032 -0.172 -1.102
## [787,] 0.437 0.491 0.362 0.260 -1.193 -1.932 0.465
## [788,] -0.180 -0.230 -0.508 -0.174 -0.131 -0.142 -1.415
## [789,] 0.198 0.185 0.029 0.192 0.180 0.156 -1.154
## [790,] 0.114 -0.021 -0.649 0.202 0.394 0.407 -1.617
## [791,] 0.038 0.000 -0.227 0.055 0.087 0.087 -0.801
## [792,] 0.405 0.428 0.375 0.409 0.402 0.419 -0.760
## [793,] 0.557 0.770 1.025 0.481 0.012 0.015 -0.863
## [794,] 0.559 0.535 0.222 0.560 0.565 0.564 -1.550
## [795,] 0.361 0.335 0.087 0.366 0.350 0.345 -0.815
## [796,] 0.429 0.622 0.951 0.401 0.150 0.213 -0.667
## [797,] -0.117 -0.108 -0.029 -0.057 -0.041 -0.117 -0.844
## [798,] 0.441 0.443 0.263 0.425 0.253 0.199 -0.360
## [799,] 0.502 0.510 0.278 0.431 -0.330 -0.707 0.204
## [800,] 0.502 0.510 0.278 0.431 -0.330 -0.707 0.204
## [801,] 0.526 0.504 0.207 0.514 0.480 0.423 -2.175
## [802,] 0.395 0.403 0.253 0.376 0.196 0.139 -0.280
## [803,] 0.344 0.175 -0.573 0.481 0.378 0.176 -0.698
## [804,] 0.327 0.403 0.557 0.352 0.370 0.428 -0.465
## [805,] 0.192 0.256 0.406 0.211 0.222 0.271 -0.253
## [806,] 0.572 0.492 -0.068 0.602 0.627 0.617 -1.635
## [807,] -0.686 -0.354 0.781 -0.486 -0.158 0.174 -0.553
## [808,] -0.231 -0.322 -0.680 -0.150 -0.001 0.003 -0.889
## [809,] -0.040 -0.037 -0.087 -0.041 -0.047 -0.044 -0.531
## [810,] 0.212 0.237 0.369 0.304 0.358 0.265 -1.241
## [811,] 0.448 0.475 0.422 0.453 0.447 0.466 -0.825
## [812,] -0.075 -0.050 0.073 -0.013 0.082 0.091 -0.510
## [813,] -0.080 -0.058 0.177 0.077 0.265 0.216 -0.775
## [814,] -0.067 0.068 0.545 0.042 0.239 0.385 -0.210
## [815,] 0.128 0.130 0.043 0.127 0.105 0.107 -0.509
## [816,] 0.327 0.355 0.411 0.376 0.416 0.382 -1.173
## [817,] 0.261 0.285 0.379 0.333 0.361 0.273 -1.297
## [818,] 0.463 0.597 0.698 0.318 -0.496 -0.745 0.275
## [819,] 0.585 0.710 1.056 0.798 1.055 1.007 -4.068
## [820,] 0.592 0.580 0.279 0.576 0.322 0.224 -0.478
## [821,] 0.732 0.677 0.143 0.739 0.457 0.320 -0.885
## [822,] 0.053 0.049 -0.036 0.049 0.024 -0.011 -1.009
## [823,] -0.173 -0.188 -0.298 -0.168 -0.161 -0.162 -0.647
## [824,] 0.024 -0.067 -0.547 0.052 0.166 0.169 -1.873
## [825,] 0.137 0.148 0.041 0.066 -0.642 -1.023 0.124
## [826,] -0.083 -0.079 -0.155 -0.124 -0.565 -0.807 -0.097
## [827,] -0.446 -0.440 -0.449 -0.453 -0.497 -0.508 -0.532
## [828,] -0.457 -0.452 -0.458 -0.457 -0.464 -0.461 -0.618
## [829,] 0.118 0.030 -0.370 0.189 0.192 0.124 -0.571
## [830,] 0.376 0.472 0.595 0.329 0.063 0.039 -0.051
## [831,] 1.020 1.063 0.601 0.974 0.591 0.527 -2.059
## [832,] 0.151 0.168 0.247 0.212 0.225 0.140 -1.098
## [833,] 1.177 1.199 0.473 1.140 0.782 0.737 -3.482
## [834,] 1.177 1.199 0.473 1.140 0.782 0.737 -3.482
## [835,] 0.048 0.120 0.305 0.052 0.013 0.052 -0.029
## [836,] 0.074 0.142 0.305 0.071 0.011 0.041 -0.037
## [837,] 0.299 0.333 0.513 0.425 0.499 0.377 -1.634
## [838,] 0.561 0.633 0.643 0.543 0.413 0.432 -0.625
## [839,] 0.460 0.430 0.123 0.458 0.467 0.456 -1.607
## [840,] 0.701 0.717 0.371 0.580 -0.696 -1.337 0.325
## [841,] 0.701 0.717 0.371 0.580 -0.696 -1.337 0.325
## [842,] 0.663 0.642 0.294 0.663 0.633 0.635 -1.465
## [843,] 0.554 0.729 1.005 0.584 0.544 0.660 -1.562
## [844,] 0.480 0.473 0.256 0.476 0.419 0.413 -0.774
## [845,] 0.341 0.364 0.332 0.347 0.346 0.363 -0.681
## [846,] -0.125 -0.030 0.471 0.100 0.505 0.614 -0.687
## [847,] 0.123 0.196 0.404 0.161 0.217 0.282 -0.224
## [848,] 0.349 0.430 0.549 0.333 0.209 0.231 -0.205
## [849,] 0.818 0.791 0.326 0.819 0.812 0.817 -2.457
## [850,] 0.424 0.709 1.132 0.327 -0.270 -0.261 -0.945
## [851,] 0.433 0.507 0.479 0.317 -0.435 -0.727 0.282
## [852,] 0.731 0.759 0.606 0.750 0.766 0.755 -2.298
## [853,] 0.984 0.870 -0.109 1.005 0.405 0.086 -1.541
## [854,] 0.837 0.786 0.193 0.840 0.854 0.852 -3.003
## [855,] 0.456 0.536 0.559 0.382 -0.042 -0.149 0.029
## [856,] 0.814 0.873 0.648 0.756 0.350 0.264 -0.879
## [857,] 0.484 0.542 0.578 0.492 0.474 0.513 -0.736
## [858,] 0.826 0.821 0.367 0.769 0.078 -0.220 -0.430
## [859,] 0.360 0.376 0.573 0.527 0.376 -0.037 -2.785
## [860,] -0.030 0.009 0.171 0.033 0.147 0.186 -0.377
## [861,] -0.560 -0.559 -0.568 -0.558 -0.565 -0.577 -0.853
## [862,] 0.532 0.513 0.227 0.507 0.391 0.261 -2.663
## [863,] 0.236 0.230 0.101 0.234 0.229 0.214 -1.041
## [864,] 0.263 0.208 -0.116 0.293 0.208 0.136 -0.458
## [865,] 0.702 0.543 -0.351 0.788 0.281 -0.100 -0.717
## [866,] 0.535 0.605 0.503 0.389 -0.661 -1.118 0.369
## [867,] 0.188 0.161 -0.049 0.185 -0.143 -0.338 -0.054
## [868,] 0.825 0.872 0.609 0.776 0.414 0.339 -0.979
## [869,] 0.689 0.664 0.277 0.690 0.608 0.591 -1.277
## [870,] 0.429 0.547 0.815 0.500 0.608 0.707 -1.151
## [871,] 0.518 0.624 0.693 0.445 0.031 -0.042 -0.124
## [872,] 0.510 0.602 0.715 0.520 0.480 0.537 -0.770
## [873,] 0.618 0.625 0.373 0.591 0.309 0.216 -0.455
## [874,] 1.190 1.033 -0.403 1.196 0.005 -0.627 -2.113
## [875,] -0.601 -0.593 -0.469 -0.517 -0.368 -0.344 -0.498
## [876,] -0.274 -0.320 -0.497 -0.235 -0.392 -0.541 -0.335
## [877,] 0.268 0.383 0.540 0.162 -0.396 -0.546 0.286
## [878,] 0.069 0.088 0.102 0.080 0.096 0.108 -0.514
## [879,] 0.494 0.406 -0.165 0.526 0.594 0.595 -1.864
## [880,] -0.400 -0.402 -0.465 -0.444 -1.021 -1.380 -0.421
## [881,] 0.701 0.753 0.628 0.683 0.547 0.557 -1.032
## [882,] 0.699 0.631 0.069 0.720 0.567 0.486 -1.162
## [883,] 0.326 0.490 0.887 0.393 0.467 0.601 -0.964
## [884,] -0.015 -0.001 -0.034 -0.031 -0.141 -0.172 -0.229
## [885,] -0.034 -0.021 -0.066 -0.062 -0.279 -0.370 -0.141
## [886,] 0.277 0.156 -0.431 0.361 0.442 0.408 -1.118
## [887,] -1.743 -1.427 0.274 -1.051 0.306 0.805 -1.338
## [888,] 0.154 0.037 -0.446 0.256 0.115 -0.083 -0.309
## [889,] 0.072 0.065 0.179 0.179 -0.090 -0.506 -1.895
## [890,] -0.132 -0.133 -0.145 -0.150 -0.559 -0.921 -1.840
## [891,] -0.707 -0.719 -0.804 -0.719 -0.750 -0.779 -1.234
## [892,] -0.451 -0.496 -0.751 -0.464 -0.474 -0.513 -1.705
## [893,] 0.416 0.436 0.271 0.328 -0.487 -0.890 0.267
## [894,] 0.583 0.597 0.557 0.618 0.376 0.045 -3.217
## [895,] -0.199 -0.299 -0.620 -0.095 -0.379 -0.696 -0.175
## [896,] -0.350 -0.503 -0.919 -0.159 -0.397 -0.791 -0.195
## [897,] -0.123 -0.236 -0.635 -0.005 -0.019 -0.157 -0.386
## [898,] 0.119 0.065 -0.235 0.150 0.187 0.179 -0.785
## [899,] -0.314 -0.355 -0.512 -0.286 -0.558 -0.778 -0.317
## [900,] 0.548 0.571 0.669 0.658 0.444 0.039 -3.419
## [901,] -0.169 -0.125 -0.022 -0.184 -0.270 -0.270 -0.117
## [902,] 0.068 0.155 0.359 0.052 -0.069 -0.048 0.069
## [903,] 0.245 0.419 0.695 0.090 -0.705 -0.912 0.409
## [904,] 0.489 0.564 0.670 0.522 0.560 0.616 -0.960
## [905,] 0.579 0.635 0.714 0.653 0.735 0.713 -2.032
## [906,] 0.329 0.357 0.304 0.306 0.151 0.121 -0.214
## [907,] -0.010 -0.300 -1.424 0.265 0.592 0.506 -1.662
## [908,] 0.277 0.132 -0.559 0.380 0.507 0.478 -1.364
## [909,] 0.450 0.546 0.619 0.381 -0.004 -0.078 -0.018
## [910,] 0.817 0.829 0.515 0.814 0.765 0.781 -1.936
## [911,] 0.525 0.622 0.762 0.553 0.564 0.635 -1.012
## [912,] 0.418 0.531 0.677 0.366 0.069 0.048 -0.106
## [913,] 1.174 1.185 0.469 1.161 1.006 1.023 -4.244
## [914,] 0.757 0.809 0.601 0.700 0.288 0.185 -0.629
## [915,] 0.493 0.531 0.435 0.450 0.162 0.084 -0.186
## [916,] 0.544 0.562 0.474 0.562 0.565 0.531 -1.718
## [917,] 0.481 0.481 0.332 0.478 0.398 0.289 -2.071
## [918,] 0.352 0.375 0.383 0.384 0.404 0.372 -1.197
## [919,] 0.852 0.867 0.468 0.796 0.252 0.060 -0.728
## [920,] 0.490 0.599 0.769 0.505 0.469 0.539 -0.788
## [921,] 0.345 0.465 0.710 0.353 0.289 0.357 -0.379
## [922,] -0.010 0.032 0.067 -0.063 -0.359 -0.461 -0.004
## [923,] 0.147 0.228 0.366 0.093 -0.187 -0.238 0.134
## [924,] 0.388 0.401 0.350 0.405 0.390 0.333 -1.435
## [925,] 0.469 0.588 0.719 0.407 0.052 0.014 -0.142
## [926,] 0.605 0.704 0.741 0.567 0.326 0.326 -0.593
## [927,] 0.525 0.519 0.272 0.509 0.287 0.206 -0.404
## [928,] 0.601 0.616 0.401 0.566 0.262 0.165 -0.369
## [929,] 0.131 0.154 0.361 0.280 0.370 0.230 -1.333
## [930,] 0.339 0.416 0.423 0.210 -0.599 -0.928 0.332
## [931,] 0.617 0.700 0.682 0.574 0.306 0.283 -0.518
## [932,] 0.556 0.567 0.422 0.562 0.567 0.566 -1.453
## [933,] 0.432 0.465 0.483 0.466 0.507 0.505 -1.186
## [934,] 0.916 0.993 0.666 0.802 -0.045 -0.318 -0.726
## [935,] -0.261 -0.256 -0.250 -0.250 -0.256 -0.285 -0.775
## [936,] 0.066 0.082 0.068 0.067 0.055 0.064 -0.426
## [937,] 0.957 0.915 0.253 0.955 0.762 0.711 -2.182
## [938,] 0.354 0.326 0.072 0.359 0.362 0.362 -0.966
## [939,] 0.298 0.233 -0.145 0.334 0.332 0.302 -0.782
## [940,] 0.516 0.337 -0.554 0.642 0.560 0.395 -1.219
## [941,] 0.696 0.536 -0.520 0.742 0.880 0.882 -4.016
## [942,] 0.099 0.092 -0.022 0.100 0.090 0.091 -0.593
## [943,] 0.019 0.010 -0.102 0.019 -0.017 -0.030 -0.441
## [944,] 0.039 -0.127 -0.738 0.206 0.186 -0.008 -0.455
## [945,] 0.456 0.338 -0.251 0.525 -0.015 -0.427 -0.077
## [946,] 0.343 0.300 -0.006 0.348 -0.030 -0.262 -0.056
## [947,] 0.172 0.106 -0.224 0.217 0.095 -0.021 -0.325
## [948,] 0.407 0.435 0.342 0.369 0.098 0.015 -0.118
## [949,] 0.346 0.451 0.956 0.660 0.976 0.820 -3.745
## [950,] 0.829 0.881 0.629 0.782 0.432 0.366 -1.037
## [951,] 0.114 0.244 0.676 0.227 0.430 0.563 -0.586
## [952,] 0.566 0.440 -0.231 0.630 0.003 -0.450 -0.142
## [953,] 0.662 0.700 0.523 0.618 0.295 0.213 -0.474
## [954,] 0.719 0.755 0.560 0.694 0.507 0.489 -0.938
## [955,] 0.685 0.663 0.273 0.670 0.361 0.233 -0.610
## [956,] 0.473 0.574 0.794 0.550 0.673 0.751 -1.349
## [957,] 0.740 0.676 0.100 0.758 0.686 0.653 -1.631
## [958,] 0.243 0.396 0.799 0.309 0.389 0.517 -0.651
## [959,] 0.340 0.315 0.071 0.345 0.345 0.344 -0.910
## [960,] 0.463 0.473 0.268 0.396 -0.295 -0.632 0.183
## [961,] 0.610 0.667 0.566 0.554 0.190 0.101 -0.293
## [962,] 0.442 0.526 0.699 0.498 0.586 0.652 -1.027
## [963,] 0.303 0.276 0.036 0.308 0.317 0.317 -0.965
## [964,] -0.149 -0.112 -0.016 -0.147 -0.168 -0.147 -0.189
## [965,] 0.691 0.527 -0.350 0.763 -0.283 -1.020 -0.042
## [966,] 0.243 0.207 -0.048 0.248 -0.086 -0.293 -0.060
## [967,] 0.515 0.342 -0.476 0.637 0.152 -0.276 -0.352
## [968,] 0.610 0.723 0.895 0.677 0.775 0.858 -1.898
## [969,] 0.707 0.817 0.762 0.610 0.015 -0.125 -0.382
## [970,] 0.533 0.709 0.814 0.316 -0.936 -1.359 0.369
## [971,] 0.793 0.861 0.677 0.738 0.363 0.296 -0.878
## [972,] 0.689 0.747 0.689 0.701 0.691 0.731 -1.499
## [973,] 0.691 0.736 0.565 0.649 0.350 0.288 -0.603
## [974,] 0.690 0.741 0.599 0.654 0.409 0.377 -0.717
## [975,] 0.415 0.494 0.671 0.476 0.574 0.636 -0.996
## [976,] 1.117 1.103 0.370 1.102 0.889 0.868 -3.324
## [977,] 0.451 0.553 0.752 0.493 0.541 0.622 -0.915
## [978,] 0.801 0.644 -0.330 0.877 0.440 0.131 -1.199
## [979,] -0.059 0.123 0.715 0.053 0.236 0.414 -0.384
## [980,] 0.422 0.308 -0.309 0.493 0.524 0.479 -1.212
## [981,] -0.276 -0.233 0.172 -0.029 0.368 0.405 -0.556
## [982,] 0.458 0.261 -0.595 0.609 0.051 -0.466 -0.224
## [983,] -0.296 -0.213 0.176 -0.169 0.088 0.213 -0.037
## [984,] 0.212 0.344 0.576 0.120 -0.349 -0.441 0.270
## [985,] 0.730 0.643 -0.006 0.748 0.163 -0.184 -0.437
## [986,] 0.355 0.131 -0.756 0.549 -0.038 -0.635 -0.114
## [987,] 0.161 0.180 0.275 0.233 0.250 0.153 -1.158
## [988,] 0.566 0.502 0.033 0.578 0.118 -0.158 -0.211
## [989,] 0.298 0.341 0.519 0.406 0.523 0.477 -1.325
## [990,] 0.250 0.294 0.395 0.290 0.355 0.386 -0.628
## [991,] -0.230 -0.079 0.480 -0.110 0.108 0.277 -0.045
## [992,] 0.864 0.871 0.475 0.844 0.638 0.604 -1.509
## [993,] 0.555 0.551 0.311 0.547 0.447 0.428 -0.762
## [994,] 0.123 0.255 0.636 0.179 0.245 0.357 -0.254
## [995,] 0.608 0.655 0.641 0.641 0.685 0.701 -1.632
## [996,] 1.000 0.946 0.177 0.979 0.399 0.150 -1.457
## [997,] 0.682 0.739 0.695 0.703 0.720 0.758 -1.640
## [998,] 0.682 0.739 0.695 0.703 0.720 0.758 -1.640
## [999,] 0.682 0.739 0.695 0.703 0.720 0.758 -1.640
## [1000,] 0.444 0.474 0.539 0.512 0.527 0.427 -1.810
Let’s arrange these by species and convert to probabilities.
prob <- as.data.frame(log_prob) %>%
set_names(pull(d, species)) %>% # add case names, for convenience
mutate(s = 1:n()) %>% # add an s iteration index, for convenience
# make it long
pivot_longer(-s,
names_to = "species",
values_to = "logprob") %>%
# compute the probability scores
mutate(prob = exp(logprob))
prob
## # A tibble: 7,000 × 4
## s species logprob prob
## <int> <chr> <dbl> <dbl>
## 1 1 afarensis 0.0716 1.07
## 2 1 africanus 0.220 1.25
## 3 1 habilis 0.600 1.82
## 4 1 boisei 0.0638 1.07
## 5 1 rudolfensis -0.0708 0.932
## 6 1 ergaster -0.00892 0.991
## 7 1 sapiens 0.0916 1.10
## 8 2 afarensis 0.165 1.18
## 9 2 africanus 0.147 1.16
## 10 2 habilis -0.0289 0.972
## # ℹ 6,990 more rows
We can then calculate the mean probability and take the log of that value. (Annoying.)
prob <- prob %>%
group_by(species) %>%
summarise(log_probability_score = mean(prob) %>% log())
prob
## # A tibble: 7 × 2
## species log_probability_score
## <chr> <dbl>
## 1 afarensis 0.431
## 2 africanus 0.456
## 3 boisei 0.444
## 4 ergaster 0.254
## 5 habilis 0.360
## 6 rudolfensis 0.297
## 7 sapiens -0.618
The log-pointwise-predictive-density for the entire model is just the sum of these:
prob %>%
summarise(total_log_probability_score = sum(log_probability_score))
## # A tibble: 1 × 1
## total_log_probability_score
## <dbl>
## 1 1.62
Sometimes, you’ll see people report deviance, which is just this value multiplied by -2 (for historical reasons).
-2*sum(prob$log_probability_score)
## [1] -3.248558
Here’s the work in a custom function for your convenience.
lppd <- function(brms_fit) {
log_lik(brms_fit) %>%
data.frame() %>%
pivot_longer(everything(),
values_to = "logprob") %>%
mutate(prob = exp(logprob)) %>%
group_by(name) %>%
summarise(log_probability_score = mean(prob) %>% log()) %>%
summarise(total_log_probability_score = sum(log_probability_score))
}
One issue with the log-probability score is that it always improves as the model gets more complex. One way to address this is by calculating the log-probability out-of-sample.
When we usually have data and use it to fit a statistical model, the data comprise a TRAINING SAMPLE. Parameters are estimated from it, and then we can imagine using those estimates to predict outcomes in a new sample, called the TEST SAMPLE.
Using out-of-sample prediction doesn’t change your model; rather, it changes our estimation of the predictive accuracy of our model.
One strategy for estimating predictive accuracy is to actually test the model’s predictive accuracy on another sample. This is known as CROSS-VALIDATION, leaving out a small chunk of observations from our sample and evaluating the model on the observations that were left out.
We’re not actually going to leave out data – imagine! – so instead we’ll divide the data into chunks, or “folds”. The model will then predict each fold after being trained on all the other folds. This is known as k-fold validation.
# Create data for k-fold cross validation visualization
k <- 5 # Number of folds
blocks <- 5 # Number of data blocks per fold
fold_data <- data.frame()
# Generate data for each fold
for (i in 1:k) {
# For each fold, create a row with blocks indicating training or test
fold_row <- data.frame(
fold = paste("Fold", i),
block = 1:blocks,
type = rep("Training", blocks)
)
# Set one block as test data (different block for each fold)
fold_row$type[i] <- "Test"
# Add to the overall data
fold_data <- rbind(fold_data, fold_row)
}
# Convert fold to factor to preserve order
fold_data$fold <- factor(fold_data$fold, levels = unique(fold_data$fold))
# Create the visualization
k_fold_plot <- ggplot(fold_data, aes(x = block, y = fold, fill = type)) +
geom_tile(color = "white", linewidth = 0.5) +
scale_fill_manual(values = c("Training" = "#1c5253", "Test" = "#e07a5f")) +
scale_y_discrete(breaks = NULL) +
labs(
#title = "K-Fold Cross Validation (k=5)",
fill = "Data Usage",
x = "Data Blocks",
y = ""
) +
facet_wrap(fold ~ ., nrow = 5, scale = "free") +
theme_minimal(base_size = 14) +
theme(
plot.title = element_text(hjust = 0.5, face = "bold"),
legend.position = "bottom",
panel.grid = element_blank(),
axis.text.x = element_blank(),
axis.ticks = element_blank()
)
# Display the plot
print(k_fold_plot)
The minimum number of folds is 2, and the maximum is your sample size. The latter is referred to as LEAVE-ONE-OUT-CROSS-VALIDATION, and this is extremely common.
The problem with LOOCV is that it’s computationally intensive. Luckily, there are some clever maths that we can use to approximate the score we would get from running the model over and over. One approach is to use the importance (or weight) of each observation – that is, how much does the prior distribution change if we were to remove this observation from our data? (Similar to influence and leverage.) Importantly, observations that are less likely are more important.
We can use importance in a strategy called PARETO-SMOOTHED IMPORTANCE SAMPLING CROSS-VALIDATION (PSIS) (see this paper). The Pareto part is a smoothing technique that improves the reliability of the importance or weights. By assuming the weights follow a known distribution, the Pareto distribution, we can estimate a reliabile cross-validation score without doing the work of actually cross-validating. We’ll get a PSIS score for each observation in our dataset, as well as a standard error for the score for the entire model.
A second approach is to use the information criteria to computed the expected score out-of-sample. If you look back at the training/testing figure, you’ll find that the difference between training deviance and testing deviance is almost exactly twice the number of parameters in the model (e.g., 2 for the first model with 1 parameter and about 10 for the last with 5 parameters). This is not random, but a known finding in machine learning. We can exploit this for simple estimates of out-of-sample deviance.
A well-known estimate is the AKAIKE INFORMATION CRITERION (AIC):
AIC=Dtrain+2p=−2lppd+2p
where D is the divergence and p is the number of free parameters in the posterior distribution. As the 2 is just there for scaling, what AIC tells us is that the dimensionality of the posterior distribution is a natural measure of the model’s overfitting tendency. More complex models tend to overfit more, directly in proportion to the number of parameters.
AIC isn’t commonly used now. Its approximation is only reliable when:
Similarly the DEVIANCE INFORMATION CRITERION (DIC) doesn’t assume flat priors but does make the other assumptions.
Watanabe’s WIDELY APPLICABLE INFORMATION CRITERION (WAIC) makes no assumption about the shape of the posterior. Its goal is to guess the out-of-sample KL divergence. In a large sample, the approximation converges to the cross-validation approximation, but in finite samples, there may be some disagreement.
Its calculation is the log-posterior-predictive-density plus a penalty proportional to the variance in the posterior predictions:
WAIC(y,Θ)=−2(lppd−∑ivarθlog(p(yi|Θ)))
PSIS and WAIC perform very similarly in the context of ordinary linear models. Of course, they may not when our posterior distributions start to get away from Gaussian or when there are highly influential observations.
PSIS have higher variance as estimators of the KL divergence, while WAIC has greater bias. So we should expect each to be slightly better in different contexts. However, in practice any advantage may be much smaller than the expected error. Watanabe recommends computing both WAIC and PSIS and contrasting them. If there are large differences, this implies one or both criteria are unreliable.
PSIS has a distinct advantage in warning the user about when it is unreliable. The k values that PSIS computes for each observation indicate when the PSIS score may be unreliable, as well as identify which observations are at fault. We’ll see later how useful this can be.
The Bayesian Information Criterion (BIC), is frequently compared with the Akaike Information Criterion (AIC). It’s important to understand that choosing between these criteria isn’t fundamentally about adopting a Bayesian perspective. Both criteria can be derived through either Bayesian or non-Bayesian approaches, and strictly speaking, neither is purely Bayesian.
BIC is mathematically connected to the logarithm of a linear model’s average likelihood. In Bayesian statistics, this average likelihood serves as the denominator in Bayes’ theorem—essentially the likelihood averaged across the prior distribution. Comparing average likelihoods has long been a standard method for model comparison in Bayesian analysis. These comparisons yield what we call “Bayes factors” when expressed as ratios. When transformed to a logarithmic scale, these ratios become differences, making them conceptually similar to comparing information criteria differences.
Since the average likelihood incorporates the prior distribution, models with more parameters naturally incur a complexity penalty. This helps prevent overfitting, although the exact penalty mechanism differs from that of information criteria. Many Bayesian statisticians have reservations about Bayes factors, and all acknowledge certain technical challenges. One significant obstacle is computational difficulty—calculating average likelihood is often complex. Even when posterior distributions can be successfully computed, estimating average likelihood may remain problematic. Another issue is that while weak priors might minimally impact posterior distributions within individual models, they can dramatically influence comparisons between different models.
Regularizing priors—priors which are skeptical of extreme parameter values—reduce fit to sample but tend to improve predictive accuracy.
How do we choose between several plausible models when seeking to maximize accuracy?
We can estimate out-of-sample accuracy with any of a number of techniques, but most popularly:
Regularization and predictive criteria are complementary.
E is mathematical notation for “expected value” or average.↩︎